repo stringlengths 6 47 | file_url stringlengths 77 269 | file_path stringlengths 5 186 | content stringlengths 0 32.8k | language stringclasses 1
value | license stringclasses 7
values | commit_sha stringlengths 40 40 | retrieved_at stringdate 2026-01-07 08:35:43 2026-01-07 08:55:24 | truncated bool 2
classes |
|---|---|---|---|---|---|---|---|---|
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/permalinks_test.go | resources/page/permalinks_test.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"fmt"
"regexp"
"strings"
"sync"
"testing"
"time"
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/source"
)
// testdataPermalinks is used by a couple of tests; the expandsTo content is
// subject to the data in simplePageJSON.
var testdataPermalinks = []struct {
spec string
valid bool
withPage func(p *testPage)
expandsTo string
}{
{":title", true, nil, "spf13-vim-3.0-release-and-new-website"},
{"/:year-:month-:title", true, nil, "/2012-04-spf13-vim-3.0-release-and-new-website"},
{"/:year/:yearday/:month/:monthname/:day/:weekday/:weekdayname/", true, nil, "/2012/97/04/April/06/5/Friday/"}, // Dates
{"/:section/", true, nil, "/blue/"}, // Section
{"/:title/", true, nil, "/spf13-vim-3.0-release-and-new-website/"}, // Title
{"/:slug/", true, nil, "/the-slug/"}, // Slug
{"/:slugorfilename/", true, nil, "/the-slug/"}, // Slug or filename
{"/:filename/", true, nil, "/test-page/"}, // Filename
{"/:06-:1-:2-:Monday", true, nil, "/12-4-6-Friday"}, // Dates with Go formatting
{"/:2006_01_02_15_04_05.000", true, nil, "/2012_04_06_03_01_59.000"}, // Complicated custom date format
{"/:sections/", true, nil, "/a/b/c/"}, // Sections
{"/:sections[last]/", true, nil, "/c/"}, // Sections
{"/:sections[0]/:sections[last]/", true, nil, "/a/c/"}, // Sections
{"/\\:filename", true, nil, "/:filename"}, // Escape sequence
{"/special\\::slug/", true, nil, "/special:the-slug/"},
// contentbasename. // Escape sequence
{"/:contentbasename/", true, nil, "/test-page/"},
// slug, contentbasename. // Content base name
{"/:slugorcontentbasename/", true, func(p *testPage) {
p.slug = ""
}, "/test-page/"},
{"/:slugorcontentbasename/", true, func(p *testPage) {
p.slug = "myslug"
}, "/myslug/"},
{"/:slugorcontentbasename/", true, func(p *testPage) {
p.slug = ""
p.title = "mytitle"
p.file = source.NewContentFileInfoFrom("/", "_index.md")
}, "/test-page/"},
// slug, title. // Section slug
{"/:sectionslug/", true, func(p *testPage) {
p.currentSection = &testPage{slug: "my-slug"}
}, "/my-slug/"},
// slug, title. // Section slugs
{"/:sectionslugs/", true, func(p *testPage) {
// Set up current section with ancestors
currentSection := &testPage{
slug: "c-slug",
kind: "section",
ancestors: Pages{
&testPage{slug: "b-slug", kind: "section"},
&testPage{slug: "a-slug", kind: "section"},
},
}
p.currentSection = currentSection
}, "/a-slug/b-slug/c-slug/"},
// slice: slug, title.
{"/:sectionslugs[0]/:sectionslugs[last]/", true, func(p *testPage) {
currentSection := &testPage{
slug: "c-slug",
kind: "section",
ancestors: Pages{
&testPage{slug: "b-slug", kind: "section"},
&testPage{slug: "a-slug", kind: "section"},
},
}
p.currentSection = currentSection
}, "/a-slug/c-slug/"},
// slice: slug, title.
{"/:sectionslugs[last]/", true, func(p *testPage) {
currentSection := &testPage{
slug: "c-slug",
kind: "section",
ancestors: Pages{
&testPage{slug: "b-slug", kind: "section"},
&testPage{slug: "a-slug", kind: "section"},
},
}
p.currentSection = currentSection
}, "/c-slug/"},
// Failures
{"/blog/:fred", false, nil, ""},
{"/:year//:title", false, nil, ""},
{"/:TITLE", false, nil, ""}, // case is not normalized
{"/:2017", false, nil, ""}, // invalid date format
{"/:2006-01-02", false, nil, ""}, // valid date format but invalid attribute name
}
func urlize(uri string) string {
// This is just an approximation of the real urlize function.
return strings.ToLower(strings.ReplaceAll(uri, " ", "-"))
}
func TestPermalinkExpansion(t *testing.T) {
t.Parallel()
c := qt.New(t)
newPage := func() *testPage {
page := newTestPageWithFile("/test-page/index.md")
page.title = "Spf13 Vim 3.0 Release and new website"
d, _ := time.Parse("2006-01-02 15:04:05", "2012-04-06 03:01:59")
page.date = d
page.section = "blue"
page.slug = "The Slug"
page.kind = "page"
// page.pathInfo
return page
}
for i, item := range testdataPermalinks {
if !item.valid {
continue
}
page := newPage()
if item.withPage != nil {
item.withPage(page)
}
specNameCleaner := regexp.MustCompile(`[\:\/\[\]]`)
name := fmt.Sprintf("[%d] %s", i, specNameCleaner.ReplaceAllString(item.spec, "_"))
c.Run(name, func(c *qt.C) {
patterns := map[string]map[string]string{
"page": {
"posts": item.spec,
},
}
expander, err := NewPermalinkExpander(urlize, patterns)
c.Assert(err, qt.IsNil)
expanded, err := expander.Expand("posts", page)
c.Assert(err, qt.IsNil)
c.Assert(expanded, qt.Equals, item.expandsTo)
expanded, err = expander.ExpandPattern(item.spec, page)
c.Assert(err, qt.IsNil)
c.Assert(expanded, qt.Equals, item.expandsTo)
})
}
}
func TestPermalinkExpansionMultiSection(t *testing.T) {
t.Parallel()
c := qt.New(t)
page := newTestPage()
page.title = "Page Title"
d, _ := time.Parse("2006-01-02", "2012-04-06")
page.date = d
page.section = "blue"
page.slug = "The Slug"
page.kind = "page"
page_slug_fallback := newTestPageWithFile("/page-filename/index.md")
page_slug_fallback.title = "Page Title"
page_slug_fallback.kind = "page"
permalinksConfig := map[string]map[string]string{
"page": {
"posts": "/:slug",
"blog": "/:section/:year",
"recipes": "/:slugorfilename",
"special": "/special\\::slug",
},
}
expander, err := NewPermalinkExpander(urlize, permalinksConfig)
c.Assert(err, qt.IsNil)
expanded, err := expander.Expand("posts", page)
c.Assert(err, qt.IsNil)
c.Assert(expanded, qt.Equals, "/the-slug")
expanded, err = expander.Expand("blog", page)
c.Assert(err, qt.IsNil)
c.Assert(expanded, qt.Equals, "/blue/2012")
expanded, err = expander.Expand("posts", page_slug_fallback)
c.Assert(err, qt.IsNil)
c.Assert(expanded, qt.Equals, "/page-title")
expanded, err = expander.Expand("recipes", page_slug_fallback)
c.Assert(err, qt.IsNil)
c.Assert(expanded, qt.Equals, "/page-filename")
expanded, err = expander.Expand("special", page)
c.Assert(err, qt.IsNil)
c.Assert(expanded, qt.Equals, "/special:the-slug")
}
func TestPermalinkExpansionConcurrent(t *testing.T) {
t.Parallel()
c := qt.New(t)
permalinksConfig := map[string]map[string]string{
"page": {
"posts": "/:slug/",
},
}
expander, err := NewPermalinkExpander(urlize, permalinksConfig)
c.Assert(err, qt.IsNil)
var wg sync.WaitGroup
for i := 1; i < 20; i++ {
wg.Add(1)
go func(i int) {
defer wg.Done()
page := newTestPage()
page.kind = "page"
for j := 1; j < 20; j++ {
page.slug = fmt.Sprintf("slug%d", i+j)
expanded, err := expander.Expand("posts", page)
c.Assert(err, qt.IsNil)
c.Assert(expanded, qt.Equals, fmt.Sprintf("/%s/", page.slug))
}
}(i)
}
wg.Wait()
}
func TestPermalinkExpansionSliceSyntax(t *testing.T) {
t.Parallel()
c := qt.New(t)
exp, err := NewPermalinkExpander(urlize, nil)
c.Assert(err, qt.IsNil)
slice4 := []string{"a", "b", "c", "d"}
fn4 := func(s string) []string {
return exp.toSliceFunc(s)(slice4)
}
slice1 := []string{"a"}
fn1 := func(s string) []string {
return exp.toSliceFunc(s)(slice1)
}
c.Run("Basic", func(c *qt.C) {
c.Assert(fn4("[1:3]"), qt.DeepEquals, []string{"b", "c"})
c.Assert(fn4("[1:]"), qt.DeepEquals, []string{"b", "c", "d"})
c.Assert(fn4("[:2]"), qt.DeepEquals, []string{"a", "b"})
c.Assert(fn4("[0:2]"), qt.DeepEquals, []string{"a", "b"})
c.Assert(fn4("[:]"), qt.DeepEquals, []string{"a", "b", "c", "d"})
c.Assert(fn4(""), qt.DeepEquals, []string{"a", "b", "c", "d"})
c.Assert(fn4("[last]"), qt.DeepEquals, []string{"d"})
c.Assert(fn4("[:last]"), qt.DeepEquals, []string{"a", "b", "c"})
c.Assert(fn1("[last]"), qt.DeepEquals, []string{"a"})
c.Assert(fn1("[:last]"), qt.DeepEquals, []string{})
c.Assert(fn1("[1:last]"), qt.DeepEquals, []string{})
c.Assert(fn1("[1]"), qt.DeepEquals, []string{})
})
c.Run("Out of bounds", func(c *qt.C) {
c.Assert(fn4("[1:5]"), qt.DeepEquals, []string{"b", "c", "d"})
c.Assert(fn4("[-1:5]"), qt.DeepEquals, []string{"a", "b", "c", "d"})
c.Assert(fn4("[5:]"), qt.DeepEquals, []string{})
c.Assert(fn4("[5:]"), qt.DeepEquals, []string{})
c.Assert(fn4("[5:32]"), qt.DeepEquals, []string{})
c.Assert(exp.toSliceFunc("[:1]")(nil), qt.DeepEquals, []string(nil))
c.Assert(exp.toSliceFunc("[:1]")([]string{}), qt.DeepEquals, []string(nil))
// These all return nil
c.Assert(fn4("[]"), qt.IsNil)
c.Assert(fn4("[1:}"), qt.IsNil)
c.Assert(fn4("foo"), qt.IsNil)
})
}
func BenchmarkPermalinkExpand(b *testing.B) {
page := newTestPage()
page.title = "Hugo Rocks"
d, _ := time.Parse("2006-01-02", "2019-02-28")
page.date = d
page.kind = "page"
permalinksConfig := map[string]map[string]string{
"page": {
"posts": "/:year-:month-:title",
},
}
expander, err := NewPermalinkExpander(urlize, permalinksConfig)
if err != nil {
b.Fatal(err)
}
for b.Loop() {
s, err := expander.Expand("posts", page)
if err != nil {
b.Fatal(err)
}
if s != "/2019-02-hugo-rocks" {
b.Fatal(s)
}
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/zero_file.autogen.go | resources/page/zero_file.autogen.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// This file is autogenerated.
package page
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/weighted.go | resources/page/weighted.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"fmt"
"sort"
"github.com/gohugoio/hugo/common/collections"
)
var _ collections.Slicer = WeightedPage{}
// WeightedPages is a list of Pages with their corresponding (and relative) weight
// [{Weight: 30, Page: *1}, {Weight: 40, Page: *2}]
type WeightedPages []WeightedPage
// Page will return the Page (of Kind taxonomyList) that represents this set
// of pages. This method will panic if p is empty, as that should never happen.
func (p WeightedPages) Page() Page {
if len(p) == 0 {
panic("WeightedPages is empty")
}
first := p[0]
// TODO(bep) fix tests
if first.owner == nil {
return nil
}
return first.owner
}
// A WeightedPage is a Page with a weight.
type WeightedPage struct {
Weight int
Page
// Reference to the owning Page. This avoids having to do
// manual .Site.GetPage lookups. It is implemented in this roundabout way
// because we cannot add additional state to the WeightedPages slice
// without breaking lots of templates in the wild.
owner Page
}
func NewWeightedPage(weight int, p Page, owner Page) WeightedPage {
return WeightedPage{Weight: weight, Page: p, owner: owner}
}
func (w WeightedPage) String() string {
return fmt.Sprintf("WeightedPage(%d,%q)", w.Weight, w.Page.Title())
}
// Slice is for internal use.
// for the template functions. See collections.Slice.
func (p WeightedPage) Slice(in any) (any, error) {
switch items := in.(type) {
case WeightedPages:
return items, nil
case []any:
weighted := make(WeightedPages, len(items))
for i, v := range items {
g, ok := v.(WeightedPage)
if !ok {
return nil, fmt.Errorf("type %T is not a WeightedPage", v)
}
weighted[i] = g
}
return weighted, nil
default:
return nil, fmt.Errorf("invalid slice type %T", items)
}
}
// Pages returns the Pages in this weighted page set.
func (wp WeightedPages) Pages() Pages {
pages := make(Pages, len(wp))
for i := range wp {
pages[i] = wp[i].Page
}
return pages
}
// Next returns the next Page relative to the given Page in
// this weighted page set.
func (wp WeightedPages) Next(cur Page) Page {
for x, c := range wp {
if c.Page.Eq(cur) {
if x == 0 {
return nil
}
return wp[x-1].Page
}
}
return nil
}
// Prev returns the previous Page relative to the given Page in
// this weighted page set.
func (wp WeightedPages) Prev(cur Page) Page {
for x, c := range wp {
if c.Page.Eq(cur) {
if x < len(wp)-1 {
return wp[x+1].Page
}
return nil
}
}
return nil
}
func (wp WeightedPages) Len() int { return len(wp) }
func (wp WeightedPages) Swap(i, j int) { wp[i], wp[j] = wp[j], wp[i] }
// Sort stable sorts this weighted page set.
func (wp WeightedPages) Sort() { sort.Stable(wp) }
// Count returns the number of pages in this weighted page set.
func (wp WeightedPages) Count() int { return len(wp) }
func (wp WeightedPages) Less(i, j int) bool {
if wp[i].Weight == wp[j].Weight {
return DefaultPageSort(wp[i].Page, wp[j].Page)
}
return wp[i].Weight < wp[j].Weight
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/page_matcher.go | resources/page/page_matcher.go | // Copyright 2020 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"fmt"
"iter"
"path/filepath"
"slices"
"strings"
"github.com/gohugoio/hugo/common/hashing"
"github.com/gohugoio/hugo/common/hstrings"
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/hugofs/hglob"
"github.com/gohugoio/hugo/hugolib/sitesmatrix"
"github.com/gohugoio/hugo/resources/kinds"
"github.com/mitchellh/mapstructure"
)
// A PageMatcher can be used to match a Page with Glob patterns.
// Note that the pattern matching is case insensitive.
type PageMatcher struct {
// A Glob pattern matching the content path below /content.
// Expects Unix-styled slashes.
// Note that this is the virtual path, so it starts at the mount root
// with a leading "/".
Path string
// A Glob pattern matching the Page's Kind(s), e.g. "{home,section}"
Kind string
// A Glob pattern matching the Page's language, e.g. "{en,sv}".
// Deprecated: use Sites.Matrix instead.
Lang string
// The sites to apply this to.
// Note that we currently only use the Matrix field for cascade matching.
Sites sitesmatrix.Sites
// A Glob pattern matching the Page's Environment, e.g. "{production,development}".
Environment string
// Compiled values.
// The site vectors to apply this to.
SitesMatrixCompiled sitesmatrix.VectorProvider `mapstructure:"-"`
}
func (m PageMatcher) Matches(p Page) bool {
return m.Match(p.Kind(), p.Path(), p.Site().Hugo().Environment, nil)
}
func (m PageMatcher) Match(kind, path, environment string, sitesMatrix sitesmatrix.VectorProvider) bool {
if sitesMatrix != nil {
if m.SitesMatrixCompiled != nil && !m.SitesMatrixCompiled.HasAnyVector(sitesMatrix) {
return false
}
}
if m.Kind != "" {
g, err := hglob.GetGlob(m.Kind)
if err == nil && !g.Match(kind) {
return false
}
}
if m.Path != "" {
g, err := hglob.GetGlob(m.Path)
// TODO(bep) Path() vs filepath vs leading slash.
p := strings.ToLower(filepath.ToSlash(path))
if !(strings.HasPrefix(p, "/")) {
p = "/" + p
}
if err == nil && !g.Match(p) {
return false
}
}
if m.Environment != "" {
g, err := hglob.GetGlob(m.Environment)
if err == nil && !g.Match(environment) {
return false
}
}
return true
}
var disallowedCascadeKeys = map[string]bool{
// These define the structure of the page tree and cannot
// currently be set in the cascade.
"kind": true,
"path": true,
"lang": true,
"cascade": true,
}
// See issue 11977.
func isGlobWithExtension(s string) bool {
pathParts := strings.Split(s, "/")
last := pathParts[len(pathParts)-1]
return strings.Count(last, ".") > 0
}
func checkCascadePattern(logger loggers.Logger, m PageMatcher) {
if m.Lang != "" {
hugo.Deprecate("cascade.target.language", "cascade.target.sites.matrix instead, see https://gohugo.io/content-management/front-matter/#target", "v0.150.0")
}
}
func AddLangToCascadeTargetMap(lang string, m maps.Params) {
maps.SetNestedParamIfNotSet("target.sites.matrix.languages", ".", lang, m)
}
func DecodeCascadeConfig(in any) (*PageMatcherParamsConfigs, error) {
buildConfig := func(in any) (CascadeConfig, any, error) {
dec := cascadeConfigDecoder{}
if in == nil {
return CascadeConfig{}, []map[string]any{}, nil
}
ms, err := maps.ToSliceStringMap(in)
if err != nil {
return CascadeConfig{}, nil, err
}
var cfgs []PageMatcherParamsConfig
for _, m := range ms {
m = maps.CleanConfigStringMap(m)
var (
c PageMatcherParamsConfig
err error
)
c, err = dec.mapToPageMatcherParamsConfig(m)
if err != nil {
return CascadeConfig{}, nil, err
}
for k := range m {
if disallowedCascadeKeys[k] {
return CascadeConfig{}, nil, fmt.Errorf("key %q not allowed in cascade config", k)
}
}
cfgs = append(cfgs, c)
}
if len(cfgs) == 0 {
return CascadeConfig{}, nil, nil
}
var n int
for _, cfg := range cfgs {
if len(cfg.Params) > 0 || len(cfg.Fields) > 0 {
cfgs[n] = cfg
n++
}
}
if n == 0 {
return CascadeConfig{}, nil, nil
}
cfgs = cfgs[:n]
return CascadeConfig{Cascades: cfgs}, cfgs, nil
}
c, err := config.DecodeNamespace[[]PageMatcherParamsConfig](in, buildConfig)
if err != nil || len(c.Config.Cascades) == 0 {
return nil, err
}
return &PageMatcherParamsConfigs{c: []*config.ConfigNamespace[[]PageMatcherParamsConfig, CascadeConfig]{c}}, nil
}
type cascadeConfigDecoder struct{}
func (d cascadeConfigDecoder) mapToPageMatcherParamsConfig(m map[string]any) (PageMatcherParamsConfig, error) {
var pcfg PageMatcherParamsConfig
if pcfg.Fields == nil {
pcfg.Fields = make(maps.Params)
}
if pcfg.Params == nil {
pcfg.Params = make(maps.Params)
}
for k, v := range m {
switch strings.ToLower(k) {
case "_target", "target":
var target PageMatcher
if err := d.decodePageMatcher(v, &target); err != nil {
return pcfg, err
}
pcfg.Target = target
case "params":
params := maps.ToStringMap(v)
for k, v := range params {
if _, found := pcfg.Params[k]; !found {
pcfg.Params[k] = v
}
}
default:
pcfg.Fields[k] = v
}
}
return pcfg, pcfg.init()
}
// decodePageMatcher decodes m into v.
func (d cascadeConfigDecoder) decodePageMatcher(m any, v *PageMatcher) error {
if err := mapstructure.WeakDecode(m, v); err != nil {
return err
}
v.Kind = strings.ToLower(v.Kind)
if v.Kind != "" {
g, _ := hglob.GetGlob(v.Kind)
found := slices.ContainsFunc(kinds.AllKindsInPages, g.Match)
if !found {
return fmt.Errorf("%q did not match a valid Page Kind", v.Kind)
}
}
v.Path = filepath.ToSlash(strings.ToLower(v.Path))
if v.Lang != "" {
v.Sites.Matrix.Languages = append(v.Sites.Matrix.Languages, v.Lang)
v.Sites.Matrix.Languages = hstrings.UniqueStringsReuse(v.Sites.Matrix.Languages)
}
return nil
}
// DecodeCascadeConfigOptions
func (v *PageMatcher) compileSitesMatrix(defaults sitesmatrix.VectorStore, configuredDimensions *sitesmatrix.ConfiguredDimensions) error {
if v.Sites.Matrix.IsZero() && defaults == nil {
// Nothing to do.
v.SitesMatrixCompiled = nil
return nil
}
intSetsCfg := sitesmatrix.IntSetsConfig{
Globs: v.Sites.Matrix,
}
b := sitesmatrix.NewIntSetsBuilder(configuredDimensions).WithConfig(intSetsCfg)
if defaults != nil && v.Sites.Matrix.IsZero() {
b = b.WithDimensionsFromOtherIfNotSet(defaults)
}
b = b.WithAllIfNotSet()
v.SitesMatrixCompiled = b.Build()
return nil
}
type CascadeConfig struct {
Cascades []PageMatcherParamsConfig
}
type PageMatcherParamsConfig struct {
// Apply Params to all Pages matching Target.
Params maps.Params
// Fields holds all fields but Params.
Fields maps.Params
// Target is the PageMatcher that this config applies to.
Target PageMatcher
}
func (p *PageMatcherParamsConfig) init() error {
maps.PrepareParams(p.Params)
maps.PrepareParams(p.Fields)
return nil
}
func (p *PageMatcherParamsConfig) hasSitesMatrix() bool {
if m, ok := p.Fields["sites"]; ok {
mm := maps.ToStringMap(m)
if sm, found := mm["matrix"]; found {
mmm := maps.ToStringMap(sm)
return len(mmm) > 0
}
}
return false
}
type PageMatcherParamsConfigs struct {
c []*config.ConfigNamespace[[]PageMatcherParamsConfig, CascadeConfig]
}
func (c *PageMatcherParamsConfigs) Append(other *PageMatcherParamsConfigs) *PageMatcherParamsConfigs {
if c == nil || len(c.c) == 0 {
return other
}
if other == nil || len(other.c) == 0 {
return c
}
return &PageMatcherParamsConfigs{c: slices.Concat(c.c, other.c)}
}
func (c *PageMatcherParamsConfigs) Prepend(other *PageMatcherParamsConfigs) *PageMatcherParamsConfigs {
if c == nil || len(c.c) == 0 {
return other
}
if other == nil || len(other.c) == 0 {
return c
}
return &PageMatcherParamsConfigs{c: slices.Concat(other.c, c.c)}
}
func (c *PageMatcherParamsConfigs) All() iter.Seq[PageMatcherParamsConfig] {
if c == nil {
return func(func(PageMatcherParamsConfig) bool) {}
}
return func(yield func(PageMatcherParamsConfig) bool) {
if c == nil {
return
}
for _, v := range c.c {
for _, vv := range v.Config.Cascades {
if !yield(vv) {
return
}
}
}
}
}
func (c *PageMatcherParamsConfigs) Len() int {
if c == nil {
return 0
}
var n int
for _, v := range c.c {
n += len(v.Config.Cascades)
}
return n
}
func (c *PageMatcherParamsConfigs) SourceHash() uint64 {
if c == nil {
return 0
}
h := hashing.XxHasher()
defer h.Close()
for _, v := range c.c {
h.WriteString(v.SourceHash)
}
return h.Sum64()
}
func (c *PageMatcherParamsConfigs) InitConfig(logger loggers.Logger, defaultsIn sitesmatrix.VectorStore, configuredDimensions *sitesmatrix.ConfiguredDimensions) error {
if c == nil {
return nil
}
for _, cc := range c.c {
for i := range cc.Config.Cascades {
ccc := cc.Config.Cascades[i]
checkCascadePattern(logger, ccc.Target)
defaults := defaultsIn
hasSitesMatrix := ccc.hasSitesMatrix()
if hasSitesMatrix {
defaults = nil
}
if err := ccc.Target.compileSitesMatrix(defaults, configuredDimensions); err != nil {
return fmt.Errorf("failed to compile cascade target %d: %w", i, err)
}
cc.Config.Cascades[i] = ccc
}
}
return nil
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/page_lazy_contentprovider.go | resources/page/page_lazy_contentprovider.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"context"
"html/template"
"github.com/gohugoio/hugo/common/hsync"
"github.com/gohugoio/hugo/markup/converter"
"github.com/gohugoio/hugo/markup/tableofcontents"
)
// OutputFormatContentProvider represents the method set that is "outputFormat aware" and that we
// provide lazy initialization for in case they get invoked outside of their normal rendering context, e.g. via .Translations.
// Note that this set is currently not complete, but should cover the most common use cases.
// For the others, the implementation will be from the page.NoopPage.
type OutputFormatContentProvider interface {
OutputFormatPageContentProvider
// for internal use.
ContentRenderer
}
// OutputFormatPageContentProvider holds the exported methods from Page that are "outputFormat aware".
type OutputFormatPageContentProvider interface {
MarkupProvider
ContentProvider
TableOfContentsProvider
PageRenderProvider
}
// LazyContentProvider initializes itself when read. Each method of the
// ContentProvider interface initializes a content provider and shares it
// with other methods.
//
// Used in cases where we cannot guarantee whether the content provider
// will be needed. Must create via NewLazyContentProvider.
type LazyContentProvider struct {
init hsync.ValueResetter[OutputFormatContentProvider]
}
// NewLazyContentProvider returns a LazyContentProvider initialized with
// function f. The resulting LazyContentProvider calls f in order to
// retrieve a ContentProvider
func NewLazyContentProvider(f func(ctx context.Context) OutputFormatContentProvider) *LazyContentProvider {
lcp := LazyContentProvider{
init: hsync.OnceMoreValue(f),
}
return &lcp
}
func (lcp *LazyContentProvider) Reset() {
lcp.init.Reset()
}
func (lcp *LazyContentProvider) Markup(opts ...any) Markup {
return lcp.init.Value(context.Background()).Markup(opts...)
}
func (lcp *LazyContentProvider) TableOfContents(ctx context.Context) template.HTML {
return lcp.init.Value(ctx).TableOfContents(ctx)
}
func (lcp *LazyContentProvider) Fragments(ctx context.Context) *tableofcontents.Fragments {
return lcp.init.Value(ctx).Fragments(ctx)
}
func (lcp *LazyContentProvider) Content(ctx context.Context) (any, error) {
return lcp.init.Value(ctx).Content(ctx)
}
func (lcp *LazyContentProvider) ContentWithoutSummary(ctx context.Context) (template.HTML, error) {
return lcp.init.Value(ctx).ContentWithoutSummary(ctx)
}
func (lcp *LazyContentProvider) Plain(ctx context.Context) string {
return lcp.init.Value(ctx).Plain(ctx)
}
func (lcp *LazyContentProvider) PlainWords(ctx context.Context) []string {
return lcp.init.Value(ctx).PlainWords(ctx)
}
func (lcp *LazyContentProvider) Summary(ctx context.Context) template.HTML {
return lcp.init.Value(ctx).Summary(ctx)
}
func (lcp *LazyContentProvider) Truncated(ctx context.Context) bool {
return lcp.init.Value(ctx).Truncated(ctx)
}
func (lcp *LazyContentProvider) FuzzyWordCount(ctx context.Context) int {
return lcp.init.Value(ctx).FuzzyWordCount(ctx)
}
func (lcp *LazyContentProvider) WordCount(ctx context.Context) int {
return lcp.init.Value(ctx).WordCount(ctx)
}
func (lcp *LazyContentProvider) ReadingTime(ctx context.Context) int {
return lcp.init.Value(ctx).ReadingTime(ctx)
}
func (lcp *LazyContentProvider) Len(ctx context.Context) int {
return lcp.init.Value(ctx).Len(ctx)
}
func (lcp *LazyContentProvider) Render(ctx context.Context, layout ...string) (template.HTML, error) {
return lcp.init.Value(ctx).Render(ctx, layout...)
}
func (lcp *LazyContentProvider) RenderString(ctx context.Context, args ...any) (template.HTML, error) {
return lcp.init.Value(ctx).RenderString(ctx, args...)
}
func (lcp *LazyContentProvider) ParseAndRenderContent(ctx context.Context, content []byte, renderTOC bool) (converter.ResultRender, error) {
return lcp.init.Value(ctx).ParseAndRenderContent(ctx, content, renderTOC)
}
func (lcp *LazyContentProvider) ParseContent(ctx context.Context, content []byte) (converter.ResultParse, bool, error) {
return lcp.init.Value(ctx).ParseContent(ctx, content)
}
func (lcp *LazyContentProvider) RenderContent(ctx context.Context, content []byte, doc any) (converter.ResultRender, bool, error) {
return lcp.init.Value(ctx).RenderContent(ctx, content, doc)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pages_related.go | resources/page/pages_related.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"context"
"fmt"
"sync"
"github.com/gohugoio/hugo/common/para"
"github.com/gohugoio/hugo/common/types"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/related"
"github.com/mitchellh/mapstructure"
"github.com/spf13/cast"
)
var (
// Assert that Pages and PageGroup implements the PageGenealogist interface.
_ PageGenealogist = (Pages)(nil)
_ PageGenealogist = PageGroup{}
)
// A PageGenealogist finds related pages in a page collection. This interface is implemented
// by Pages and PageGroup, which makes it available as `{{ .RegularRelated . }}` etc.
type PageGenealogist interface {
// Template example:
// {{ $related := .RegularPages.Related . }}
Related(ctx context.Context, opts any) (Pages, error)
// Template example:
// {{ $related := .RegularPages.RelatedIndices . "tags" "date" }}
// Deprecated: Use Related instead.
RelatedIndices(ctx context.Context, doc related.Document, indices ...any) (Pages, error)
// Template example:
// {{ $related := .RegularPages.RelatedTo ( keyVals "tags" "hugo", "rocks") ( keyVals "date" .Date ) }}
// Deprecated: Use Related instead.
RelatedTo(ctx context.Context, args ...types.KeyValues) (Pages, error)
}
// Related searches all the configured indices with the search keywords from the
// supplied document.
func (p Pages) Related(ctx context.Context, optsv any) (Pages, error) {
if len(p) == 0 {
return nil, nil
}
var opts related.SearchOpts
switch v := optsv.(type) {
case related.Document:
opts.Document = v
case map[string]any:
if err := mapstructure.WeakDecode(v, &opts); err != nil {
return nil, err
}
default:
return nil, fmt.Errorf("invalid argument type %T", optsv)
}
result, err := p.search(ctx, opts)
if err != nil {
return nil, err
}
return result, nil
}
// RelatedIndices searches the given indices with the search keywords from the
// supplied document.
// Deprecated: Use Related instead.
func (p Pages) RelatedIndices(ctx context.Context, doc related.Document, indices ...any) (Pages, error) {
indicesStr, err := cast.ToStringSliceE(indices)
if err != nil {
return nil, err
}
opts := related.SearchOpts{
Document: doc,
Indices: indicesStr,
}
result, err := p.search(ctx, opts)
if err != nil {
return nil, err
}
return result, nil
}
// RelatedTo searches the given indices with the corresponding values.
// Deprecated: Use Related instead.
func (p Pages) RelatedTo(ctx context.Context, args ...types.KeyValues) (Pages, error) {
if len(p) == 0 {
return nil, nil
}
opts := related.SearchOpts{
NamedSlices: args,
}
return p.search(ctx, opts)
}
func (p Pages) search(ctx context.Context, opts related.SearchOpts) (Pages, error) {
return p.withInvertedIndex(ctx, func(idx *related.InvertedIndex) ([]related.Document, error) {
return idx.Search(ctx, opts)
})
}
func (p Pages) withInvertedIndex(ctx context.Context, search func(idx *related.InvertedIndex) ([]related.Document, error)) (Pages, error) {
if len(p) == 0 {
return nil, nil
}
d, ok := p[0].(RelatedDocsHandlerProvider)
if !ok {
return nil, fmt.Errorf("invalid type %T in related search", p[0])
}
cache := d.GetInternalRelatedDocsHandler()
searchIndex, err := cache.getOrCreateIndex(ctx, p)
if err != nil {
return nil, err
}
result, err := search(searchIndex)
if err != nil {
return nil, err
}
if len(result) > 0 {
mp := make(Pages, len(result))
for i, match := range result {
mp[i] = match.(Page)
}
return mp, nil
}
return nil, nil
}
type cachedPostingList struct {
p Pages
postingList *related.InvertedIndex
}
type RelatedDocsHandler struct {
cfg related.Config
postingLists []*cachedPostingList
mu sync.RWMutex
workers *para.Workers
}
func NewRelatedDocsHandler(cfg related.Config) *RelatedDocsHandler {
return &RelatedDocsHandler{cfg: cfg, workers: para.New(config.GetNumWorkerMultiplier())}
}
func (s *RelatedDocsHandler) Clone() *RelatedDocsHandler {
return NewRelatedDocsHandler(s.cfg)
}
// This assumes that a lock has been acquired.
func (s *RelatedDocsHandler) getIndex(p Pages) *related.InvertedIndex {
for _, ci := range s.postingLists {
if pagesEqual(p, ci.p) {
return ci.postingList
}
}
return nil
}
func (s *RelatedDocsHandler) getOrCreateIndex(ctx context.Context, p Pages) (*related.InvertedIndex, error) {
s.mu.RLock()
cachedIndex := s.getIndex(p)
if cachedIndex != nil {
s.mu.RUnlock()
return cachedIndex, nil
}
s.mu.RUnlock()
s.mu.Lock()
defer s.mu.Unlock()
// Double check.
if cachedIndex := s.getIndex(p); cachedIndex != nil {
return cachedIndex, nil
}
for _, c := range s.cfg.Indices {
if c.Type == related.TypeFragments {
// This will trigger building the Pages' fragment map.
g, _ := s.workers.Start(ctx)
for _, page := range p {
fp, ok := page.(related.FragmentProvider)
if !ok {
continue
}
g.Run(func() error {
fp.Fragments(ctx)
return nil
})
}
if err := g.Wait(); err != nil {
return nil, err
}
break
}
}
searchIndex := related.NewInvertedIndex(s.cfg)
for _, page := range p {
if err := searchIndex.Add(ctx, page); err != nil {
return nil, err
}
}
s.postingLists = append(s.postingLists, &cachedPostingList{p: p, postingList: searchIndex})
if err := searchIndex.Finalize(ctx); err != nil {
return nil, err
}
return searchIndex, nil
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pagegroup.go | resources/page/pagegroup.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"context"
"errors"
"fmt"
"reflect"
"sort"
"strings"
"time"
"github.com/spf13/cast"
"github.com/gohugoio/hugo/common/collections"
"github.com/gohugoio/hugo/common/hreflect"
"github.com/gohugoio/hugo/compare"
"github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/resources/resource"
)
var (
_ collections.Slicer = PageGroup{}
_ compare.ProbablyEqer = PageGroup{}
_ compare.ProbablyEqer = PagesGroup{}
)
// PageGroup represents a group of pages, grouped by the key.
// The key is typically a year or similar.
type PageGroup struct {
// The key, typically a year or similar.
Key any
// The Pages in this group.
Pages
}
type mapKeyValues []reflect.Value
func (v mapKeyValues) Len() int { return len(v) }
func (v mapKeyValues) Swap(i, j int) { v[i], v[j] = v[j], v[i] }
type mapKeyByInt struct{ mapKeyValues }
func (s mapKeyByInt) Less(i, j int) bool { return s.mapKeyValues[i].Int() < s.mapKeyValues[j].Int() }
type mapKeyByStr struct {
less func(a, b string) bool
mapKeyValues
}
func (s mapKeyByStr) Less(i, j int) bool {
return s.less(s.mapKeyValues[i].String(), s.mapKeyValues[j].String())
}
func sortKeys(examplePage Page, v []reflect.Value, order string) []reflect.Value {
if len(v) <= 1 {
return v
}
switch v[0].Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
if order == "desc" {
sort.Sort(sort.Reverse(mapKeyByInt{v}))
} else {
sort.Sort(mapKeyByInt{v})
}
case reflect.String:
stringLess, close := collatorStringLess(examplePage)
defer close()
if order == "desc" {
sort.Sort(sort.Reverse(mapKeyByStr{stringLess, v}))
} else {
sort.Sort(mapKeyByStr{stringLess, v})
}
}
return v
}
// PagesGroup represents a list of page groups.
// This is what you get when doing page grouping in the templates.
type PagesGroup []PageGroup
// Reverse reverses the order of this list of page groups.
func (p PagesGroup) Reverse() PagesGroup {
for i, j := 0, len(p)-1; i < j; i, j = i+1, j-1 {
p[i], p[j] = p[j], p[i]
}
return p
}
var (
errorType = reflect.TypeFor[error]()
pagePtrType = reflect.TypeFor[Page]()
pagesType = reflect.TypeFor[Pages]()
)
// GroupBy groups by the value in the given field or method name and with the given order.
// Valid values for order is asc, desc, rev and reverse.
func (p Pages) GroupBy(ctx context.Context, key string, order ...string) (PagesGroup, error) {
if len(p) < 1 {
return nil, nil
}
direction := "asc"
if len(order) > 0 && (strings.ToLower(order[0]) == "desc" || strings.ToLower(order[0]) == "rev" || strings.ToLower(order[0]) == "reverse") {
direction = "desc"
}
var ft any
index := hreflect.GetMethodIndexByName(pagePtrType, key)
if index != -1 {
m := pagePtrType.Method(index)
if m.Type.NumOut() == 0 || m.Type.NumOut() > 2 {
return nil, errors.New(key + " is a Page method but you can't use it with GroupBy")
}
if m.Type.NumOut() == 1 && m.Type.Out(0).Implements(errorType) {
return nil, errors.New(key + " is a Page method but you can't use it with GroupBy")
}
if m.Type.NumOut() == 2 && !m.Type.Out(1).Implements(errorType) {
return nil, errors.New(key + " is a Page method but you can't use it with GroupBy")
}
ft = m
} else {
var ok bool
ft, ok = pagePtrType.Elem().FieldByName(key)
if !ok {
return nil, errors.New(key + " is neither a field nor a method of Page")
}
}
var tmp reflect.Value
switch e := ft.(type) {
case reflect.StructField:
tmp = reflect.MakeMap(reflect.MapOf(e.Type, pagesType))
case reflect.Method:
tmp = reflect.MakeMap(reflect.MapOf(e.Type.Out(0), pagesType))
}
for _, e := range p {
ppv := reflect.ValueOf(e)
var fv reflect.Value
switch ft.(type) {
case reflect.StructField:
fv = ppv.Elem().FieldByName(key)
case reflect.Method:
fv = hreflect.CallMethodByName(ctx, key, ppv)[0]
}
if !fv.IsValid() {
continue
}
if !tmp.MapIndex(fv).IsValid() {
tmp.SetMapIndex(fv, reflect.MakeSlice(pagesType, 0, 0))
}
tmp.SetMapIndex(fv, reflect.Append(tmp.MapIndex(fv), ppv))
}
sortedKeys := sortKeys(p[0], tmp.MapKeys(), direction)
r := make([]PageGroup, len(sortedKeys))
for i, k := range sortedKeys {
r[i] = PageGroup{Key: k.Interface(), Pages: tmp.MapIndex(k).Interface().(Pages)}
}
return r, nil
}
// GroupByParam groups by the given page parameter key's value and with the given order.
// Valid values for order is asc, desc, rev and reverse.
func (p Pages) GroupByParam(key string, order ...string) (PagesGroup, error) {
if len(p) < 1 {
return nil, nil
}
direction := "asc"
if len(order) > 0 && (strings.ToLower(order[0]) == "desc" || strings.ToLower(order[0]) == "rev" || strings.ToLower(order[0]) == "reverse") {
direction = "desc"
}
var tmp reflect.Value
var keyt reflect.Type
for _, e := range p {
param := resource.GetParamToLower(e, key)
if param != nil {
if _, ok := param.([]string); !ok {
keyt = reflect.TypeOf(param)
tmp = reflect.MakeMap(reflect.MapOf(keyt, pagesType))
break
}
}
}
if !tmp.IsValid() {
return nil, nil
}
for _, e := range p {
param := resource.GetParam(e, key)
if param == nil || reflect.TypeOf(param) != keyt {
continue
}
v := reflect.ValueOf(param)
if !tmp.MapIndex(v).IsValid() {
tmp.SetMapIndex(v, reflect.MakeSlice(pagesType, 0, 0))
}
tmp.SetMapIndex(v, reflect.Append(tmp.MapIndex(v), reflect.ValueOf(e)))
}
var r []PageGroup
for _, k := range sortKeys(p[0], tmp.MapKeys(), direction) {
r = append(r, PageGroup{Key: k.Interface(), Pages: tmp.MapIndex(k).Interface().(Pages)})
}
return r, nil
}
func (p Pages) groupByDateField(format string, sorter func(p Pages) Pages, getDate func(p Page) time.Time, order ...string) (PagesGroup, error) {
if len(p) < 1 {
return nil, nil
}
sp := sorter(p)
if !(len(order) > 0 && (strings.ToLower(order[0]) == "asc" || strings.ToLower(order[0]) == "rev" || strings.ToLower(order[0]) == "reverse")) {
sp = sp.Reverse()
}
if sp == nil {
return nil, nil
}
firstPage := sp[0]
date := getDate(firstPage)
// Pages may be a mix of multiple languages, so we need to use the language
// for the currently rendered Site.
currentSite := firstPage.Site().Current()
formatter := langs.GetTimeFormatter(currentSite.Language())
formatted := formatter.Format(date, format)
var r []PageGroup
r = append(r, PageGroup{Key: formatted, Pages: make(Pages, 0)})
r[0].Pages = append(r[0].Pages, sp[0])
i := 0
for _, e := range sp[1:] {
date = getDate(e)
formatted := formatter.Format(date, format)
if r[i].Key.(string) != formatted {
r = append(r, PageGroup{Key: formatted})
i++
}
r[i].Pages = append(r[i].Pages, e)
}
return r, nil
}
// GroupByDate groups by the given page's Date value in
// the given format and with the given order.
// Valid values for order is asc, desc, rev and reverse.
// For valid format strings, see https://golang.org/pkg/time/#Time.Format
func (p Pages) GroupByDate(format string, order ...string) (PagesGroup, error) {
sorter := func(p Pages) Pages {
return p.ByDate()
}
getDate := func(p Page) time.Time {
return p.Date()
}
return p.groupByDateField(format, sorter, getDate, order...)
}
// GroupByPublishDate groups by the given page's PublishDate value in
// the given format and with the given order.
// Valid values for order is asc, desc, rev and reverse.
// For valid format strings, see https://golang.org/pkg/time/#Time.Format
func (p Pages) GroupByPublishDate(format string, order ...string) (PagesGroup, error) {
sorter := func(p Pages) Pages {
return p.ByPublishDate()
}
getDate := func(p Page) time.Time {
return p.PublishDate()
}
return p.groupByDateField(format, sorter, getDate, order...)
}
// GroupByExpiryDate groups by the given page's ExpireDate value in
// the given format and with the given order.
// Valid values for order is asc, desc, rev and reverse.
// For valid format strings, see https://golang.org/pkg/time/#Time.Format
func (p Pages) GroupByExpiryDate(format string, order ...string) (PagesGroup, error) {
sorter := func(p Pages) Pages {
return p.ByExpiryDate()
}
getDate := func(p Page) time.Time {
return p.ExpiryDate()
}
return p.groupByDateField(format, sorter, getDate, order...)
}
// GroupByLastmod groups by the given page's Lastmod value in
// the given format and with the given order.
// Valid values for order is asc, desc, rev and reverse.
// For valid format strings, see https://golang.org/pkg/time/#Time.Format
func (p Pages) GroupByLastmod(format string, order ...string) (PagesGroup, error) {
sorter := func(p Pages) Pages {
return p.ByLastmod()
}
getDate := func(p Page) time.Time {
return p.Lastmod()
}
return p.groupByDateField(format, sorter, getDate, order...)
}
// GroupByParamDate groups by a date set as a param on the page in
// the given format and with the given order.
// Valid values for order is asc, desc, rev and reverse.
// For valid format strings, see https://golang.org/pkg/time/#Time.Format
func (p Pages) GroupByParamDate(key string, format string, order ...string) (PagesGroup, error) {
// Cache the dates.
dates := make(map[Page]time.Time)
sorter := func(pages Pages) Pages {
var r Pages
for _, p := range pages {
param := resource.GetParam(p, key)
var t time.Time
if param != nil {
var ok bool
if t, ok = param.(time.Time); !ok {
// Probably a string. Try to convert it to time.Time.
t = cast.ToTime(param)
}
}
dates[p] = t
r = append(r, p)
}
pdate := func(p1, p2 Page) bool {
return dates[p1].Unix() < dates[p2].Unix()
}
pageBy(pdate).Sort(r)
return r
}
getDate := func(p Page) time.Time {
return dates[p]
}
return p.groupByDateField(format, sorter, getDate, order...)
}
// ProbablyEq wraps compare.ProbablyEqer
// For internal use.
func (p PageGroup) ProbablyEq(other any) bool {
otherP, ok := other.(PageGroup)
if !ok {
return false
}
if p.Key != otherP.Key {
return false
}
return p.Pages.ProbablyEq(otherP.Pages)
}
// Slice is for internal use.
// for the template functions. See collections.Slice.
func (p PageGroup) Slice(in any) (any, error) {
switch items := in.(type) {
case PageGroup:
return items, nil
case []any:
groups := make(PagesGroup, len(items))
for i, v := range items {
g, ok := v.(PageGroup)
if !ok {
return nil, fmt.Errorf("type %T is not a PageGroup", v)
}
groups[i] = g
}
return groups, nil
default:
return nil, fmt.Errorf("invalid slice type %T", items)
}
}
// Len returns the number of pages in the page group.
func (psg PagesGroup) Len() int {
l := 0
for _, pg := range psg {
l += len(pg.Pages)
}
return l
}
// ProbablyEq wraps compare.ProbablyEqer
func (psg PagesGroup) ProbablyEq(other any) bool {
otherPsg, ok := other.(PagesGroup)
if !ok {
return false
}
if len(psg) != len(otherPsg) {
return false
}
for i := range psg {
if !psg[i].ProbablyEq(otherPsg[i]) {
return false
}
}
return true
}
// ToPagesGroup tries to convert seq into a PagesGroup.
func ToPagesGroup(seq any) (PagesGroup, bool, error) {
switch v := seq.(type) {
case nil:
return nil, true, nil
case PagesGroup:
return v, true, nil
case []PageGroup:
return PagesGroup(v), true, nil
case []any:
l := len(v)
if l == 0 {
break
}
switch v[0].(type) {
case PageGroup:
pagesGroup := make(PagesGroup, l)
for i, ipg := range v {
if pg, ok := ipg.(PageGroup); ok {
pagesGroup[i] = pg
} else {
return nil, false, fmt.Errorf("unsupported type in paginate from slice, got %T instead of PageGroup", ipg)
}
}
return pagesGroup, true, nil
}
}
return nil, false, nil
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/page_author.go | resources/page/page_author.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
// AuthorList is a list of all authors and their metadata.
// Deprecated: Use taxonomies instead.
type AuthorList map[string]Author
// Author contains details about the author of a page.
// Deprecated: Use taxonomies instead.
type Author struct {
GivenName string
FamilyName string
DisplayName string
Thumbnail string
Image string
ShortBio string
LongBio string
Email string
Social AuthorSocial
}
// AuthorSocial is a place to put social details per author. These are the
// standard keys that themes will expect to have available, but can be
// expanded to any others on a per site basis
// - website
// - github
// - facebook
// - twitter
// - pinterest
// - instagram
// - youtube
// - linkedin
// - skype
// Deprecated: Use taxonomies instead.
type AuthorSocial map[string]string
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pages_sort_search.go | resources/page/pages_sort_search.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import "sort"
// Used in page binary search, the most common in front.
var pageLessFunctions = []func(p1, p2 Page) bool{
DefaultPageSort,
lessPageDate,
lessPagePubDate,
lessPageTitle,
lessPageLinkTitle,
}
func searchPage(p Page, pages Pages) int {
if len(pages) < 1000 {
// For smaller data sets, doing a linear search is faster.
return searchPageLinear(p, pages, 0)
}
less := isPagesProbablySorted(pages, pageLessFunctions...)
if less == nil {
return searchPageLinear(p, pages, 0)
}
i := searchPageBinary(p, pages, less)
if i != -1 {
return i
}
return searchPageLinear(p, pages, 0)
}
func searchPageLinear(p Page, pages Pages, start int) int {
for i := start; i < len(pages); i++ {
c := pages[i]
if c.Eq(p) {
return i
}
}
return -1
}
func searchPageBinary(p Page, pages Pages, less func(p1, p2 Page) bool) int {
n := len(pages)
f := func(i int) bool {
c := pages[i]
isLess := less(c, p)
return !isLess || c.Eq(p)
}
i := sort.Search(n, f)
if i == n {
return -1
}
return searchPageLinear(p, pages, i)
}
// isPagesProbablySorted tests if the pages slice is probably sorted.
func isPagesProbablySorted(pages Pages, lessFuncs ...func(p1, p2 Page) bool) func(p1, p2 Page) bool {
n := len(pages)
step := 1
if n > 500 {
step = 50
}
is := func(less func(p1, p2 Page) bool) bool {
samples := 0
for i := n - 1; i > 0; i = i - step {
if less(pages[i], pages[i-1]) {
return false
}
samples++
if samples >= 15 {
return true
}
}
return samples > 0
}
isReverse := func(less func(p1, p2 Page) bool) bool {
samples := 0
for i := 0; i < n-1; i = i + step {
if less(pages[i], pages[i+1]) {
return false
}
samples++
if samples > 15 {
return true
}
}
return samples > 0
}
for _, less := range lessFuncs {
if is(less) {
return less
}
if isReverse(less) {
return func(p1, p2 Page) bool {
return less(p2, p1)
}
}
}
return nil
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/page_markup_integration_test.go | resources/page/page_markup_integration_test.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page_test
import (
"strconv"
"strings"
"testing"
"github.com/gohugoio/hugo/hugolib"
"github.com/gohugoio/hugo/markup/asciidocext"
"github.com/gohugoio/hugo/markup/rst"
)
func TestPageMarkupMethods(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
summaryLength=2
-- content/p1.md --
---
title: "Post 1"
date: "2020-01-01"
---
{{% foo %}}
-- layouts/_shortcodes/foo.html --
Two *words*.
{{/* Test that markup scope is set in all relevant constructs. */}}
{{ if eq hugo.Context.MarkupScope "foo" }}
## Heading 1
Sint ad mollit qui Lorem ut occaecat culpa officia. Et consectetur aute voluptate non sit ullamco adipisicing occaecat. Sunt deserunt amet sit ad. Deserunt enim voluptate proident ipsum dolore dolor ut sit velit esse est mollit irure esse. Mollit incididunt veniam laboris magna et excepteur sit duis. Magna adipisicing reprehenderit tempor irure.
### Heading 2
Exercitation quis est consectetur occaecat nostrud. Ullamco aute mollit aliqua est amet. Exercitation ullamco consectetur dolor labore et non irure eu cillum Lorem.
{{ end }}
-- layouts/home.html --
Home.
{{ .Content }}
-- layouts/single.html --
Single.
Page.ContentWithoutSummmary: {{ .ContentWithoutSummary }}|
{{ template "render-scope" (dict "page" . "scope" "main") }}
{{ template "render-scope" (dict "page" . "scope" "foo") }}
{{ define "render-scope" }}
{{ $c := .page.Markup .scope }}
{{ with $c.Render }}
{{ $.scope }}: Content: {{ .Content }}|
{{ $.scope }}: ContentWithoutSummary: {{ .ContentWithoutSummary }}|
{{ $.scope }}: Plain: {{ .Plain }}|
{{ $.scope }}: PlainWords: {{ .PlainWords }}|
{{ $.scope }}: WordCount: {{ .WordCount }}|
{{ $.scope }}: FuzzyWordCount: {{ .FuzzyWordCount }}|
{{ $.scope }}: ReadingTime: {{ .ReadingTime }}|
{{ $.scope }}: Len: {{ .Len }}|
{{ $.scope }}: Summary: {{ with .Summary }}{{ . }}{{ else }}nil{{ end }}|
{{ end }}
{{ $.scope }}: Fragments: {{ $c.Fragments.Identifiers }}|
{{ end }}
`
b := hugolib.Test(t, files)
// Main scope.
b.AssertFileContent("public/p1/index.html",
"Page.ContentWithoutSummmary: |",
"main: Content: <p>Two <em>words</em>.</p>\n|",
"main: ContentWithoutSummary: |",
"main: Plain: Two words.\n|",
"PlainWords: [Two words.]|\nmain: WordCount: 2|\nmain: FuzzyWordCount: 100|\nmain: ReadingTime: 1|",
"main: Summary: <p>Two <em>words</em>.</p>|\n\nmain: Fragments: []|",
"main: Len: 27|",
)
// Foo scope (has more content).
b.AssertFileContent("public/p1/index.html",
"foo: Content: <p>Two <em>words</em>.</p>\n<h2",
"foo: ContentWithoutSummary: <h2",
"Plain: Two words.\nHeading 1",
"PlainWords: [Two words. Heading 1",
"foo: WordCount: 81|\nfoo: FuzzyWordCount: 100|\nfoo: ReadingTime: 1|\nfoo: Len: 622|",
"foo: Summary: <p>Two <em>words</em>.</p>|",
"foo: Fragments: [heading-1 heading-2]|",
)
}
func TestPageMarkupScope(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
disableKinds = ["taxonomy", "term", "rss", "section"]
-- content/p1.md --
---
title: "Post 1"
date: "2020-01-01"
---
# P1
{{< foo >}}
Begin:{{% includerendershortcodes "p2" %}}:End
Begin:{{< includecontent "p3" >}}:End
-- content/p2.md --
---
title: "Post 2"
date: "2020-01-02"
---
# P2
-- content/p3.md --
---
title: "Post 3"
date: "2020-01-03"
---
# P3
{{< foo >}}
-- layouts/home.html --
Home.
{{ with site.GetPage "p1" }}
{{ with .Markup "home" }}
{{ .Render.Content }}
{{ end }}
{{ end }}
-- layouts/single.html --
Single.
{{ with .Markup }}
{{ with .Render }}
{{ .Content }}
{{ end }}
{{ end }}
-- layouts/_markup/render-heading.html --
Render heading: title: {{ .Text}} scope: {{ hugo.Context.MarkupScope }}|
-- layouts/_shortcodes/foo.html --
Foo scope: {{ hugo.Context.MarkupScope }}|
-- layouts/_shortcodes/includerendershortcodes.html --
{{ $p := site.GetPage (.Get 0) }}
includerendershortcodes: {{ hugo.Context.MarkupScope }}|{{ $p.Markup.RenderShortcodes }}|
-- layouts/_shortcodes/includecontent.html --
{{ $p := site.GetPage (.Get 0) }}
includecontent: {{ hugo.Context.MarkupScope }}|{{ $p.Markup.Render.Content }}|
`
b := hugolib.Test(t, files)
b.AssertFileContentExact("public/p1/index.html", "Render heading: title: P1 scope: |", "Foo scope: |")
b.AssertFileContentExact("public/index.html",
"Begin:\nincludecontent: home|Render heading: title: P3 scope: home|Foo scope: home|\n|\n:End",
"Render heading: title: P1 scope: home|",
"Foo scope: home|",
"Begin:\nincluderendershortcodes: home|</p>\nRender heading: title: P2 scope: home|<p>|:End",
)
}
func TestPageContentWithoutSummary(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
summaryLength=5
-- content/p1.md --
---
title: "Post 1"
date: "2020-01-01"
---
This is summary.
<!--more-->
This is content.
-- content/p2.md --
---
title: "Post 2"
date: "2020-01-01"
---
This is some content about a summary and more.
Another paragraph.
Third paragraph.
-- content/p3.md --
---
title: "Post 3"
date: "2020-01-01"
summary: "This is summary in front matter."
---
This is content.
-- layouts/single.html --
Single.
Page.Summary: {{ .Summary }}|
{{ with .Markup.Render }}
Content: {{ .Content }}|
ContentWithoutSummary: {{ .ContentWithoutSummary }}|
WordCount: {{ .WordCount }}|
FuzzyWordCount: {{ .FuzzyWordCount }}|
{{ with .Summary }}
Summary: {{ . }}|
Summary Type: {{ .Type }}|
Summary Truncated: {{ .Truncated }}|
{{ end }}
{{ end }}
`
b := hugolib.Test(t, files)
b.AssertFileContentExact("public/p1/index.html",
"Content: <p>This is summary.</p>\n<p>This is content.</p>",
"ContentWithoutSummary: <p>This is content.</p>|",
"WordCount: 6|",
"FuzzyWordCount: 100|",
"Summary: <p>This is summary.</p>|",
"Summary Type: manual|",
"Summary Truncated: true|",
)
b.AssertFileContent("public/p2/index.html",
"Summary: <p>This is some content about a summary and more.</p>|",
"WordCount: 13|",
"FuzzyWordCount: 100|",
"Summary Type: auto",
"Summary Truncated: true",
)
b.AssertFileContentExact("public/p3/index.html",
"Summary: This is summary in front matter.|",
"ContentWithoutSummary: <p>This is content.</p>\n|",
)
}
func TestPageMarkupWithoutSummaryRST(t *testing.T) {
t.Parallel()
if !rst.Supports() {
t.Skip("Skip RST test as not supported")
}
files := `
-- hugo.toml --
summaryLength=5
[security.exec]
allow = ["rst", "python"]
-- content/p1.rst --
This is a story about a summary and more.
Another paragraph.
-- content/p2.rst --
This is summary.
<!--more-->
This is content.
-- layouts/single.html --
Single.
Page.Summary: {{ .Summary }}|
{{ with .Markup.Render }}
Content: {{ .Content }}|
ContentWithoutSummary: {{ .ContentWithoutSummary }}|
{{ with .Summary }}
Summary: {{ . }}|
Summary Type: {{ .Type }}|
Summary Truncated: {{ .Truncated }}|
{{ end }}
{{ end }}
`
b := hugolib.Test(t, files)
// Auto summary.
b.AssertFileContentExact("public/p1/index.html",
"Content: <div class=\"document\">\n\n\n<p>This is a story about a summary and more.</p>\n<p>Another paragraph.</p>\n</div>|",
"Summary: <div class=\"document\">\n\n\n<p>This is a story about a summary and more.</p></div>|\nSummary Type: auto|\nSummary Truncated: true|",
"ContentWithoutSummary: <div class=\"document\">\n<p>Another paragraph.</p>\n</div>|",
)
// Manual summary.
b.AssertFileContentExact("public/p2/index.html",
"Content: <div class=\"document\">\n\n\n<p>This is summary.</p>\n<p>This is content.</p>\n</div>|",
"ContentWithoutSummary: <div class=\"document\"><p>This is content.</p>\n</div>|",
"Summary: <div class=\"document\">\n\n\n<p>This is summary.</p>\n</div>|\nSummary Type: manual|\nSummary Truncated: true|",
)
}
func TestPageMarkupWithoutSummaryAsciiDoc(t *testing.T) {
t.Parallel()
if ok, err := asciidocext.Supports(); !ok {
t.Skip(err)
}
files := `
-- hugo.toml --
summaryLength=5
[security.exec]
allow = ["asciidoc", "python"]
-- content/p1.ad --
This is a story about a summary and more.
Another paragraph.
-- content/p2.ad --
This is summary.
<!--more-->
This is content.
-- layouts/single.html --
Single.
Page.Summary: {{ .Summary }}|
{{ with .Markup.Render }}
Content: {{ .Content }}|
ContentWithoutSummary: {{ .ContentWithoutSummary }}|
{{ with .Summary }}
Summary: {{ . }}|
Summary Type: {{ .Type }}|
Summary Truncated: {{ .Truncated }}|
{{ end }}
{{ end }}
`
b := hugolib.Test(t, files)
// Auto summary.
b.AssertFileContentExact("public/p1/index.html",
"Content: <div class=\"paragraph\">\n<p>This is a story about a summary and more.</p>\n</div>\n<div class=\"paragraph\">\n<p>Another paragraph.</p>\n</div>\n|",
"Summary: <div class=\"paragraph\">\n<p>This is a story about a summary and more.</p>\n</div>|",
"Summary Type: auto|\nSummary Truncated: true|",
"ContentWithoutSummary: <div class=\"paragraph\">\n<p>Another paragraph.</p>\n</div>|",
)
// Manual summary.
b.AssertFileContentExact("public/p2/index.html",
"Content: <div class=\"paragraph\">\n<p>This is summary.</p>\n</div>\n<div class=\"paragraph\">\n<p>This is content.</p>\n</div>|",
"ContentWithoutSummary: <div class=\"paragraph\">\n<p>This is content.</p>\n</div>|",
"Summary: <div class=\"paragraph\">\n<p>This is summary.</p>\n</div>|\nSummary Type: manual|\nSummary Truncated: true|",
)
}
func TestIssue13967(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
disableKinds = ['home','rss','section','sitemap','taxonomy','term']
-- layouts/all.html --
Title: {{ .Title }}|Summary: {{ .Summary }}|Truncated: {{ .Truncated }}|
-- content/p1.md --
---
title: p1
---
<!--more--> one two three
-- content/p2.md --
---
title: p2
---
one <!--more--> two three
-- content/p3.md --
---
title: p3
---
one two <!--more--> three
-- content/p4.md --
---
title: p4
---
one two three <!--more-->
`
b := hugolib.Test(t, files)
b.AssertFileContent("public/p1/index.html", `Title: p1|Summary: |Truncated: true|`)
b.AssertFileContent("public/p2/index.html", `Title: p2|Summary: <p>one</p>|Truncated: true|`)
b.AssertFileContent("public/p3/index.html", `Title: p3|Summary: <p>one two</p>|Truncated: true|`)
b.AssertFileContent("public/p4/index.html", `Title: p4|Summary: <p>one two three</p>|Truncated: false|`)
}
func TestIssue13968(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
disableKinds = ['page','rss','section','sitemap','taxonomy','term']
summaryLength = SUMMARY_LENGTH
-- layouts/all.html --
Title: {{ .Title }}|Summary: {{ .Summary }}|Truncated: {{ .Truncated }}|
-- content/_index.md --
---
title: home
---
one two three
`
tests := []struct {
summaryLength int
want string
}{
{0, "Title: home|Summary: |Truncated: true|"},
{1, "Title: home|Summary: <p>one two three</p>|Truncated: false|"},
{2, "Title: home|Summary: <p>one two three</p>|Truncated: false|"},
{3, "Title: home|Summary: <p>one two three</p>|Truncated: false|"},
{4, "Title: home|Summary: <p>one two three</p>|Truncated: false|"},
}
for _, tt := range tests {
f := strings.ReplaceAll(files, "SUMMARY_LENGTH", strconv.Itoa(tt.summaryLength))
b := hugolib.Test(t, f)
b.AssertFileContent("public/index.html", tt.want)
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pagination_test.go | resources/page/pagination_test.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"context"
"fmt"
"testing"
qt "github.com/frankban/quicktest"
)
func TestSplitPages(t *testing.T) {
t.Parallel()
c := qt.New(t)
pages := createTestPages(21)
chunks := splitPages(pages, 5)
c.Assert(len(chunks), qt.Equals, 5)
for i := range 4 {
c.Assert(chunks[i].Len(), qt.Equals, 5)
}
lastChunk := chunks[4]
c.Assert(lastChunk.Len(), qt.Equals, 1)
}
func TestSplitPageGroups(t *testing.T) {
t.Parallel()
c := qt.New(t)
pages := createTestPages(21)
groups, _ := pages.GroupBy(context.Background(), "Weight", "desc")
chunks := splitPageGroups(groups, 5)
c.Assert(len(chunks), qt.Equals, 5)
firstChunk := chunks[0]
// alternate weight 5 and 10
if groups, ok := firstChunk.(PagesGroup); ok {
c.Assert(groups.Len(), qt.Equals, 5)
for _, pg := range groups {
// first group 10 in weight
c.Assert(pg.Key, qt.Equals, 10)
for _, p := range pg.Pages {
c.Assert(p.FuzzyWordCount(context.Background())%2 == 0, qt.Equals, true) // magic test
}
}
} else {
t.Fatal("Excepted PageGroup")
}
lastChunk := chunks[4]
if groups, ok := lastChunk.(PagesGroup); ok {
c.Assert(groups.Len(), qt.Equals, 1)
for _, pg := range groups {
// last should have 5 in weight
c.Assert(pg.Key, qt.Equals, 5)
for _, p := range pg.Pages {
c.Assert(p.FuzzyWordCount(context.Background())%2 != 0, qt.Equals, true) // magic test
}
}
} else {
t.Fatal("Excepted PageGroup")
}
}
func TestPager(t *testing.T) {
t.Parallel()
c := qt.New(t)
pages := createTestPages(21)
groups, _ := pages.GroupBy(context.Background(), "Weight", "desc")
urlFactory := func(page int) string {
return fmt.Sprintf("page/%d/", page)
}
_, err := newPaginatorFromPages(pages, -1, urlFactory)
c.Assert(err, qt.Not(qt.IsNil))
_, err = newPaginatorFromPageGroups(groups, -1, urlFactory)
c.Assert(err, qt.Not(qt.IsNil))
pag, err := newPaginatorFromPages(pages, 5, urlFactory)
c.Assert(err, qt.IsNil)
doTestPages(t, pag)
first := pag.Pagers()[0].First()
c.Assert(first.String(), qt.Equals, "Pager 1")
c.Assert(first.Pages(), qt.Not(qt.HasLen), 0)
c.Assert(first.PageGroups(), qt.HasLen, 0)
pag, err = newPaginatorFromPageGroups(groups, 5, urlFactory)
c.Assert(err, qt.IsNil)
doTestPages(t, pag)
first = pag.Pagers()[0].First()
c.Assert(first.PageGroups(), qt.Not(qt.HasLen), 0)
c.Assert(first.Pages(), qt.HasLen, 0)
}
func doTestPages(t *testing.T, paginator *Paginator) {
c := qt.New(t)
paginatorPages := paginator.Pagers()
c.Assert(len(paginatorPages), qt.Equals, 5)
c.Assert(paginator.TotalNumberOfElements(), qt.Equals, 21)
c.Assert(paginator.PagerSize(), qt.Equals, 5)
c.Assert(paginator.TotalPages(), qt.Equals, 5)
first := paginatorPages[0]
c.Assert(first.URL(), qt.Equals, "page/1/")
c.Assert(first.First(), qt.Equals, first)
c.Assert(first.HasNext(), qt.Equals, true)
c.Assert(first.Next(), qt.Equals, paginatorPages[1])
c.Assert(first.HasPrev(), qt.Equals, false)
c.Assert(first.Prev(), qt.IsNil)
c.Assert(first.NumberOfElements(), qt.Equals, 5)
c.Assert(first.PageNumber(), qt.Equals, 1)
third := paginatorPages[2]
c.Assert(third.HasNext(), qt.Equals, true)
c.Assert(third.HasPrev(), qt.Equals, true)
c.Assert(third.Prev(), qt.Equals, paginatorPages[1])
last := paginatorPages[4]
c.Assert(last.URL(), qt.Equals, "page/5/")
c.Assert(last.Last(), qt.Equals, last)
c.Assert(last.HasNext(), qt.Equals, false)
c.Assert(last.Next(), qt.IsNil)
c.Assert(last.HasPrev(), qt.Equals, true)
c.Assert(last.NumberOfElements(), qt.Equals, 1)
c.Assert(last.PageNumber(), qt.Equals, 5)
}
func TestPagerNoPages(t *testing.T) {
t.Parallel()
c := qt.New(t)
pages := createTestPages(0)
groups, _ := pages.GroupBy(context.Background(), "Weight", "desc")
urlFactory := func(page int) string {
return fmt.Sprintf("page/%d/", page)
}
paginator, _ := newPaginatorFromPages(pages, 5, urlFactory)
doTestPagerNoPages(t, paginator)
first := paginator.Pagers()[0].First()
c.Assert(first.PageGroups(), qt.HasLen, 0)
c.Assert(first.Pages(), qt.HasLen, 0)
paginator, _ = newPaginatorFromPageGroups(groups, 5, urlFactory)
doTestPagerNoPages(t, paginator)
first = paginator.Pagers()[0].First()
c.Assert(first.PageGroups(), qt.HasLen, 0)
c.Assert(first.Pages(), qt.HasLen, 0)
}
func doTestPagerNoPages(t *testing.T, paginator *Paginator) {
paginatorPages := paginator.Pagers()
c := qt.New(t)
c.Assert(len(paginatorPages), qt.Equals, 1)
c.Assert(paginator.TotalNumberOfElements(), qt.Equals, 0)
c.Assert(paginator.PagerSize(), qt.Equals, 5)
c.Assert(paginator.TotalPages(), qt.Equals, 0)
// pageOne should be nothing but the first
pageOne := paginatorPages[0]
c.Assert(pageOne.First(), qt.Not(qt.IsNil))
c.Assert(pageOne.HasNext(), qt.Equals, false)
c.Assert(pageOne.HasPrev(), qt.Equals, false)
c.Assert(pageOne.Next(), qt.IsNil)
c.Assert(len(pageOne.Pagers()), qt.Equals, 1)
c.Assert(pageOne.Pages().Len(), qt.Equals, 0)
c.Assert(pageOne.NumberOfElements(), qt.Equals, 0)
c.Assert(pageOne.TotalNumberOfElements(), qt.Equals, 0)
c.Assert(pageOne.TotalPages(), qt.Equals, 0)
c.Assert(pageOne.PageNumber(), qt.Equals, 1)
c.Assert(pageOne.PagerSize(), qt.Equals, 5)
}
func TestProbablyEqualPageLists(t *testing.T) {
t.Parallel()
fivePages := createTestPages(5)
zeroPages := createTestPages(0)
zeroPagesByWeight, _ := createTestPages(0).GroupBy(context.Background(), "Weight", "asc")
fivePagesByWeight, _ := createTestPages(5).GroupBy(context.Background(), "Weight", "asc")
ninePagesByWeight, _ := createTestPages(9).GroupBy(context.Background(), "Weight", "asc")
for i, this := range []struct {
v1 any
v2 any
expect bool
}{
{nil, nil, true},
{"a", "b", true},
{"a", fivePages, false},
{fivePages, "a", false},
{fivePages, createTestPages(2), false},
{fivePages, fivePages, true},
{zeroPages, zeroPages, true},
{fivePagesByWeight, fivePagesByWeight, true},
{zeroPagesByWeight, fivePagesByWeight, false},
{zeroPagesByWeight, zeroPagesByWeight, true},
{fivePagesByWeight, fivePages, false},
{fivePagesByWeight, ninePagesByWeight, false},
} {
result := probablyEqualPageLists(this.v1, this.v2)
if result != this.expect {
t.Errorf("[%d] got %t but expected %t", i, result, this.expect)
}
}
}
func TestPaginationPage(t *testing.T) {
t.Parallel()
c := qt.New(t)
urlFactory := func(page int) string {
return fmt.Sprintf("page/%d/", page)
}
fivePages := createTestPages(7)
fivePagesFuzzyWordCount, _ := createTestPages(7).GroupBy(context.Background(), "FuzzyWordCount", "asc")
p1, _ := newPaginatorFromPages(fivePages, 2, urlFactory)
p2, _ := newPaginatorFromPageGroups(fivePagesFuzzyWordCount, 2, urlFactory)
f1 := p1.pagers[0].First()
f2 := p2.pagers[0].First()
page11, _ := f1.page(1)
page1Nil, _ := f1.page(3)
page21, _ := f2.page(1)
page2Nil, _ := f2.page(3)
c.Assert(page11.FuzzyWordCount(context.Background()), qt.Equals, 3)
c.Assert(page1Nil, qt.IsNil)
c.Assert(page21, qt.Not(qt.IsNil))
c.Assert(page21.FuzzyWordCount(context.Background()), qt.Equals, 3)
c.Assert(page2Nil, qt.IsNil)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pages_sort_test.go | resources/page/pages_sort_test.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"context"
"fmt"
"testing"
"time"
"github.com/gohugoio/hugo/resources/resource"
"github.com/google/go-cmp/cmp"
qt "github.com/frankban/quicktest"
)
var eq = qt.CmpEquals(
cmp.Comparer(func(p1, p2 testPage) bool {
return p1.path == p2.path && p1.weight == p2.weight
}),
)
func TestDefaultSort(t *testing.T) {
t.Parallel()
c := qt.New(t)
d1 := time.Now()
d2 := d1.Add(-1 * time.Hour)
d3 := d1.Add(-2 * time.Hour)
d4 := d1.Add(-3 * time.Hour)
p := createSortTestPages(4)
// first by weight
setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "a", "c", "d"}, [4]int{4, 3, 2, 1}, p)
SortByDefault(p)
c.Assert(p[0].Weight(), qt.Equals, 1)
// Consider zero weight, issue #2673
setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "a", "d", "c"}, [4]int{0, 0, 0, 1}, p)
SortByDefault(p)
c.Assert(p[0].Weight(), qt.Equals, 1)
// next by date
setSortVals([4]time.Time{d3, d4, d1, d2}, [4]string{"a", "b", "c", "d"}, [4]int{1, 1, 1, 1}, p)
SortByDefault(p)
c.Assert(p[0].Date(), qt.Equals, d1)
// finally by link title
setSortVals([4]time.Time{d3, d3, d3, d3}, [4]string{"b", "c", "a", "d"}, [4]int{1, 1, 1, 1}, p)
SortByDefault(p)
c.Assert(p[0].LinkTitle(), qt.Equals, "al")
c.Assert(p[1].LinkTitle(), qt.Equals, "bl")
c.Assert(p[2].LinkTitle(), qt.Equals, "cl")
}
// https://github.com/gohugoio/hugo/issues/4953
func TestSortByLinkTitle(t *testing.T) {
t.Parallel()
c := qt.New(t)
pages := createSortTestPages(6)
for i, p := range pages {
pp := p.(*testPage)
if i < 5 {
pp.title = fmt.Sprintf("title%d", i)
}
if i > 2 {
pp.linkTitle = fmt.Sprintf("linkTitle%d", i)
}
}
pages.shuffle()
bylt := pages.ByLinkTitle()
for i, p := range bylt {
if i < 3 {
c.Assert(p.LinkTitle(), qt.Equals, fmt.Sprintf("linkTitle%d", i+3))
} else {
c.Assert(p.LinkTitle(), qt.Equals, fmt.Sprintf("title%d", i-3))
}
}
}
func TestSortByN(t *testing.T) {
t.Parallel()
d1 := time.Now()
d2 := d1.Add(-2 * time.Hour)
d3 := d1.Add(-10 * time.Hour)
d4 := d1.Add(-20 * time.Hour)
p := createSortTestPages(4)
ctx := context.Background()
byLen := func(p Pages) Pages {
return p.ByLength(ctx)
}
for i, this := range []struct {
sortFunc func(p Pages) Pages
assertFunc func(p Pages) bool
}{
{(Pages).ByWeight, func(p Pages) bool { return p[0].Weight() == 1 }},
{(Pages).ByTitle, func(p Pages) bool { return p[0].Title() == "ab" }},
{(Pages).ByLinkTitle, func(p Pages) bool { return p[0].LinkTitle() == "abl" }},
{(Pages).ByDate, func(p Pages) bool { return p[0].Date() == d4 }},
{(Pages).ByPublishDate, func(p Pages) bool { return p[0].PublishDate() == d4 }},
{(Pages).ByExpiryDate, func(p Pages) bool { return p[0].ExpiryDate() == d4 }},
{(Pages).ByLastmod, func(p Pages) bool { return p[1].Lastmod() == d3 }},
{byLen, func(p Pages) bool { return p[0].(resource.LengthProvider).Len(ctx) == len(p[0].(*testPage).content) }},
} {
setSortVals([4]time.Time{d1, d2, d3, d4}, [4]string{"b", "ab", "cde", "fg"}, [4]int{0, 3, 2, 1}, p)
sorted := this.sortFunc(p)
if !this.assertFunc(sorted) {
t.Errorf("[%d] sort error", i)
}
}
}
func TestLimit(t *testing.T) {
t.Parallel()
c := qt.New(t)
p := createSortTestPages(10)
firstFive := p.Limit(5)
c.Assert(len(firstFive), qt.Equals, 5)
for i := range 5 {
c.Assert(firstFive[i], qt.Equals, p[i])
}
c.Assert(p.Limit(10), eq, p)
c.Assert(p.Limit(11), eq, p)
}
func TestPageSortReverse(t *testing.T) {
t.Parallel()
c := qt.New(t)
p1 := createSortTestPages(10)
c.Assert(p1[0].(*testPage).fuzzyWordCount, qt.Equals, 0)
c.Assert(p1[9].(*testPage).fuzzyWordCount, qt.Equals, 9)
p2 := p1.Reverse()
c.Assert(p2[0].(*testPage).fuzzyWordCount, qt.Equals, 9)
c.Assert(p2[9].(*testPage).fuzzyWordCount, qt.Equals, 0)
// cached
c.Assert(pagesEqual(p2, p1.Reverse()), qt.Equals, true)
}
func TestPageSortByParam(t *testing.T) {
t.Parallel()
c := qt.New(t)
var k any = "arbitrarily.nested"
unsorted := createSortTestPages(10)
delete(unsorted[9].Params(), "arbitrarily")
firstSetValue, _ := unsorted[0].Param(k)
secondSetValue, _ := unsorted[1].Param(k)
lastSetValue, _ := unsorted[8].Param(k)
unsetValue, _ := unsorted[9].Param(k)
c.Assert(firstSetValue, qt.Equals, "xyz100")
c.Assert(secondSetValue, qt.Equals, "xyz99")
c.Assert(lastSetValue, qt.Equals, "xyz92")
c.Assert(unsetValue, qt.Equals, nil)
sorted := unsorted.ByParam("arbitrarily.nested")
firstSetSortedValue, _ := sorted[0].Param(k)
secondSetSortedValue, _ := sorted[1].Param(k)
lastSetSortedValue, _ := sorted[8].Param(k)
unsetSortedValue, _ := sorted[9].Param(k)
c.Assert(firstSetSortedValue, qt.Equals, firstSetValue)
c.Assert(lastSetSortedValue, qt.Equals, secondSetValue)
c.Assert(secondSetSortedValue, qt.Equals, lastSetValue)
c.Assert(unsetSortedValue, qt.Equals, unsetValue)
}
func TestPageSortByParamNumeric(t *testing.T) {
t.Parallel()
c := qt.New(t)
var k any = "arbitrarily.nested"
n := 10
unsorted := createSortTestPages(n)
for i := range n {
v := 100 - i
if i%2 == 0 {
v = 100.0 - i
}
unsorted[i].(*testPage).params = map[string]any{
"arbitrarily": map[string]any{
"nested": v,
},
}
}
delete(unsorted[9].Params(), "arbitrarily")
firstSetValue, _ := unsorted[0].Param(k)
secondSetValue, _ := unsorted[1].Param(k)
lastSetValue, _ := unsorted[8].Param(k)
unsetValue, _ := unsorted[9].Param(k)
c.Assert(firstSetValue, qt.Equals, 100)
c.Assert(secondSetValue, qt.Equals, 99)
c.Assert(lastSetValue, qt.Equals, 92)
c.Assert(unsetValue, qt.Equals, nil)
sorted := unsorted.ByParam("arbitrarily.nested")
firstSetSortedValue, _ := sorted[0].Param(k)
secondSetSortedValue, _ := sorted[1].Param(k)
lastSetSortedValue, _ := sorted[8].Param(k)
unsetSortedValue, _ := sorted[9].Param(k)
c.Assert(firstSetSortedValue, qt.Equals, 92)
c.Assert(secondSetSortedValue, qt.Equals, 93)
c.Assert(lastSetSortedValue, qt.Equals, 100)
c.Assert(unsetSortedValue, qt.Equals, unsetValue)
}
func BenchmarkSortByWeightAndReverse(b *testing.B) {
p := createSortTestPages(300)
for b.Loop() {
p = p.ByWeight().Reverse()
}
}
func setSortVals(dates [4]time.Time, titles [4]string, weights [4]int, pages Pages) {
for i := range dates {
this := pages[i].(*testPage)
other := pages[len(dates)-1-i].(*testPage)
this.date = dates[i]
this.lastMod = dates[i]
this.weight = weights[i]
this.title = titles[i]
// make sure we compare apples and ... apples ...
other.linkTitle = this.Title() + "l"
other.pubDate = dates[i]
other.expiryDate = dates[i]
other.content = titles[i] + "_content"
}
lastLastMod := pages[2].Lastmod()
pages[2].(*testPage).lastMod = pages[1].Lastmod()
pages[1].(*testPage).lastMod = lastLastMod
for _, p := range pages {
p.(*testPage).content = ""
}
}
func createSortTestPages(num int) Pages {
pages := make(Pages, num)
for i := range num {
p := newTestPage()
p.path = fmt.Sprintf("/x/y/p%d.md", i)
p.title = fmt.Sprintf("Title %d", i%((num+1)/2))
p.params = map[string]any{
"arbitrarily": map[string]any{
"nested": ("xyz" + fmt.Sprintf("%v", 100-i)),
},
}
w := 5
if i%2 == 0 {
w = 10
}
p.fuzzyWordCount = i
p.weight = w
p.description = "initial"
pages[i] = p
}
return pages
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/page.go | resources/page/page.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package page contains the core interfaces and types for the Page resource,
// a core component in Hugo.
package page
import (
"context"
"fmt"
"html/template"
"github.com/gohugoio/hugo/hugolib/roles"
"github.com/gohugoio/hugo/hugolib/sitesmatrix"
"github.com/gohugoio/hugo/markup/converter"
"github.com/gohugoio/hugo/markup/tableofcontents"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/common/hstore"
"github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/compare"
"github.com/gohugoio/hugo/navigation"
"github.com/gohugoio/hugo/related"
"github.com/gohugoio/hugo/resources/resource"
"github.com/gohugoio/hugo/source"
)
// Clear clears any global package state.
func Clear() error {
spc.clear()
return nil
}
// AlternativeOutputFormatsProvider provides alternative output formats for a
// Page.
type AlternativeOutputFormatsProvider interface {
// AlternativeOutputFormats gives the alternative output formats for the
// current output.
// Note that we use the term "alternative" and not "alternate" here, as it
// does not necessarily replace the other format, it is an alternative representation.
AlternativeOutputFormats() OutputFormats
}
// ChildCareProvider provides accessors to child resources.
type ChildCareProvider interface {
// Pages returns a list of pages of all kinds.
Pages() Pages
// RegularPages returns a list of pages of kind 'Page'.
RegularPages() Pages
// RegularPagesRecursive returns all regular pages below the current
// section.
RegularPagesRecursive() Pages
resource.ResourcesProvider
}
type MarkupProvider interface {
Markup(opts ...any) Markup
}
// ContentProvider provides the content related values for a Page.
type ContentProvider interface {
Content(context.Context) (any, error)
// ContentWithoutSummary returns the Page Content stripped of the summary.
ContentWithoutSummary(ctx context.Context) (template.HTML, error)
// Plain returns the Page Content stripped of HTML markup.
Plain(context.Context) string
// PlainWords returns a string slice from splitting Plain using https://pkg.go.dev/strings#Fields.
PlainWords(context.Context) []string
// Summary returns a generated summary of the content.
// The breakpoint can be set manually by inserting a summary separator in the source file.
Summary(context.Context) template.HTML
// Truncated returns whether the Summary is truncated or not.
Truncated(context.Context) bool
// FuzzyWordCount returns the approximate number of words in the content.
FuzzyWordCount(context.Context) int
// WordCount returns the number of words in the content.
WordCount(context.Context) int
// ReadingTime returns the reading time based on the length of plain text.
ReadingTime(context.Context) int
// Len returns the length of the content.
// This is for internal use only.
Len(context.Context) int
}
// ContentRenderer provides the content rendering methods for some content.
type ContentRenderer interface {
// ParseAndRenderContent renders the given content.
// For internal use only.
ParseAndRenderContent(ctx context.Context, content []byte, enableTOC bool) (converter.ResultRender, error)
// For internal use only.
ParseContent(ctx context.Context, content []byte) (converter.ResultParse, bool, error)
// For internal use only.
RenderContent(ctx context.Context, content []byte, doc any) (converter.ResultRender, bool, error)
}
// FileProvider provides the source file.
type FileProvider interface {
// File returns the source file for this Page,
// or a zero File if this Page is not backed by a file.
File() *source.File
}
// GetPageProvider provides the GetPage method.
type GetPageProvider interface {
// GetPage looks up a page for the given ref.
// {{ with .GetPage "blog" }}{{ .Title }}{{ end }}
//
// This will return nil when no page could be found, and will return
// an error if the ref is ambiguous.
GetPage(ref string) (Page, error)
}
// GitInfoProvider provides Git info.
type GitInfoProvider interface {
// GitInfo returns the Git info for this object.
GitInfo() *source.GitInfo
// CodeOwners returns the code owners for this object.
CodeOwners() []string
}
// InSectionPositioner provides section navigation.
type InSectionPositioner interface {
// NextInSection returns the next page in the same section.
NextInSection() Page
// PrevInSection returns the previous page in the same section.
PrevInSection() Page
}
// RelatedDocsHandlerProvider is considered an internal interface.
type RelatedDocsHandlerProvider interface {
// GetInternalRelatedDocsHandler is for internal use only.
GetInternalRelatedDocsHandler() *RelatedDocsHandler
}
// OutputFormatsProvider provides the OutputFormats of a Page.
type OutputFormatsProvider interface {
// OutputFormats returns the OutputFormats for this Page.
OutputFormats() OutputFormats
}
// PageProvider provides access to a Page.
// Implemented by shortcodes and others.
type PageProvider interface {
Page() Page
}
// Page is the core interface in Hugo and what you get as the top level data context in your templates.
type Page interface {
MarkupProvider
ContentProvider
TableOfContentsProvider
PageWithoutContent
fmt.Stringer
}
type PageFragment interface {
resource.ResourceLinksProvider
resource.ResourceNameTitleProvider
}
type PageMetaResource interface {
PageMetaProvider
resource.Resource
}
type PageMetaLanguageResource interface {
PageMetaResource
resource.LanguageProvider
}
// PageMetaProvider provides page metadata, typically provided via front matter.
type PageMetaProvider interface {
// The 4 page dates
resource.Dated
// Aliases forms the base for redirects generation.
Aliases() []string
// BundleType returns the bundle type: `leaf`, `branch` or an empty string.
BundleType() string
// A configured description.
Description() string
// Whether this is a draft. Will only be true if run with the --buildDrafts (-D) flag.
Draft() bool
// IsHome returns whether this is the home page.
IsHome() bool
// Configured keywords.
Keywords() []string
// The Page Kind. One of page, home, section, taxonomy, term.
Kind() string
// The configured layout to use to render this page. Typically set in front matter.
Layout() string
// The title used for links.
LinkTitle() string
// IsNode returns whether this is an item of one of the list types in Hugo,
// i.e. not a regular content
IsNode() bool
// IsPage returns whether this is a regular content
IsPage() bool
// Path gets the relative path, including file name and extension if relevant,
// to the source of this Page. It will be relative to any content root.
Path() string
// The slug, typically defined in front matter.
Slug() string
// IsSection returns whether this is a section
IsSection() bool
// Section returns the first path element below the content root.
Section() string
// Sitemap returns the sitemap configuration for this page.
// This is for internal use only.
Sitemap() config.SitemapConfig
// Type is a discriminator used to select layouts etc. It is typically set
// in front matter, but will fall back to the root section.
Type() string
// The configured weight, used as the first sort value in the default
// page sort if non-zero.
Weight() int
}
// NamedPageMetaValue returns a named metadata value from a PageMetaResource.
// This is currently only used to generate keywords for related content.
// If nameLower is not one of the metadata interface methods, we
// look in Params.
func NamedPageMetaValue(p PageMetaLanguageResource, nameLower string) (any, bool, error) {
var (
v any
err error
)
switch nameLower {
case "kind":
v = p.Kind()
case "bundletype":
v = p.BundleType()
case "mediatype":
v = p.MediaType()
case "section":
v = p.Section()
case "lang":
v = p.Lang()
case "aliases":
v = p.Aliases()
case "name":
v = p.Name()
case "keywords":
v = p.Keywords()
case "description":
v = p.Description()
case "title":
v = p.Title()
case "linktitle":
v = p.LinkTitle()
case "slug":
v = p.Slug()
case "date":
v = p.Date()
case "publishdate":
v = p.PublishDate()
case "expirydate":
v = p.ExpiryDate()
case "lastmod":
v = p.Lastmod()
case "draft":
v = p.Draft()
case "type":
v = p.Type()
case "layout":
v = p.Layout()
case "weight":
v = p.Weight()
default:
// Try params.
v, err = resource.Param(p, nil, nameLower)
if v == nil {
return nil, false, nil
}
}
return v, err == nil, err
}
// PageMetaInternalProvider provides internal page metadata.
type PageMetaInternalProvider interface {
// This is for internal use only.
PathInfo() *paths.Path
}
// PageRenderProvider provides a way for a Page to render content.
type PageRenderProvider interface {
// Render renders the given layout with this Page as context.
Render(ctx context.Context, layout ...string) (template.HTML, error)
// RenderString renders the first value in args with the content renderer defined
// for this Page.
// It takes an optional map as a second argument:
//
// display (“inline”):
// - inline or block. If inline (default), surrounding <p></p> on short snippets will be trimmed.
// markup (defaults to the Page’s markup)
RenderString(ctx context.Context, args ...any) (template.HTML, error)
}
// PageWithoutContent is the Page without any of the content methods.
type PageWithoutContent interface {
RawContentProvider
RenderShortcodesProvider
resource.Resource
PageMetaProvider
Param(key any) (any, error)
PageMetaInternalProvider
resource.LanguageProvider
// For pages backed by a file.
FileProvider
GitInfoProvider
// Output formats
OutputFormatsProvider
AlternativeOutputFormatsProvider
// Tree navigation
ChildCareProvider
TreeProvider
// Horizontal navigation
InSectionPositioner
PageRenderProvider
PaginatorProvider
Positioner
navigation.PageMenusProvider
// Page lookups/refs
GetPageProvider
RefProvider
resource.TranslationKeyProvider
TranslationsProvider
SitesProvider
// Helper methods
ShortcodeInfoProvider
compare.Eqer
// Scratch returns a Scratch that can be used to store temporary state.
// Note that this Scratch gets reset on server rebuilds. See Store() for a variant that survives.
// Scratch returns a "scratch pad" that can be used to store state.
// Deprecated: From Hugo v0.138.0 this is just an alias for Store.
Scratch() *hstore.Scratch
hstore.StoreProvider
RelatedKeywordsProvider
// GetTerms gets the terms of a given taxonomy,
// e.g. GetTerms("categories")
GetTerms(taxonomy string) Pages
// HeadingsFiltered returns the headings for this page when a filter is set.
// This is currently only triggered with the Related content feature
// and the "fragments" type of index.
HeadingsFiltered(context.Context) tableofcontents.Headings
}
type SiteDimensionProvider interface {
Role() roles.Role
}
// Positioner provides next/prev navigation.
type Positioner interface {
// Next points up to the next regular page (sorted by Hugo’s default sort).
Next() Page
// Prev points down to the previous regular page (sorted by Hugo’s default sort).
Prev() Page
// Deprecated: Use Prev. Will be removed in Hugo 0.57
PrevPage() Page
// Deprecated: Use Next. Will be removed in Hugo 0.57
NextPage() Page
}
// RawContentProvider provides the raw, unprocessed content of the page.
type RawContentProvider interface {
// RawContent returns the raw, unprocessed content of the page excluding any front matter.
RawContent() string
}
type RenderShortcodesProvider interface {
// RenderShortcodes returns RawContent with any shortcodes rendered.
RenderShortcodes(context.Context) (template.HTML, error)
}
// RefProvider provides the methods needed to create reflinks to pages.
type RefProvider interface {
// Ref returns an absolute URl to a page.
Ref(argsm map[string]any) (string, error)
// RefFrom is for internal use only.
RefFrom(argsm map[string]any, source any) (string, error)
// RelRef returns a relative URL to a page.
RelRef(argsm map[string]any) (string, error)
// RelRefFrom is for internal use only.
RelRefFrom(argsm map[string]any, source any) (string, error)
}
// RelatedKeywordsProvider allows a Page to be indexed.
type RelatedKeywordsProvider interface {
// Make it indexable as a related.Document
// RelatedKeywords is meant for internal usage only.
RelatedKeywords(cfg related.IndexConfig) ([]related.Keyword, error)
}
// ShortcodeInfoProvider provides info about the shortcodes in a Page.
type ShortcodeInfoProvider interface {
// HasShortcode return whether the page has a shortcode with the given name.
// This method is mainly motivated with the Hugo Docs site's need for a list
// of pages with the `todo` shortcode in it.
HasShortcode(name string) bool
}
// SitesProvider provide accessors to get sites.
type SitesProvider interface {
// Site returns the current site.
Site() Site
// Sites returns all sites.
Sites() Sites
}
// TableOfContentsProvider provides the table of contents for a Page.
type TableOfContentsProvider interface {
// TableOfContents returns the table of contents for the page rendered as HTML.
TableOfContents(context.Context) template.HTML
// Fragments returns the fragments for this page.
Fragments(context.Context) *tableofcontents.Fragments
}
// TranslationsProvider provides access to any translations.
type TranslationsProvider interface {
// IsTranslated returns whether this content file is translated to
// other language(s).
IsTranslated() bool
// AllTranslations returns all translations, including the current Page.
AllTranslations() Pages
// Translations returns the translations excluding the current Page.
Translations() Pages
}
// TreeProvider provides section tree navigation.
type TreeProvider interface {
// IsAncestor returns whether the current page is an ancestor of other.
// Note that this method is not relevant for taxonomy lists and taxonomy terms pages.
IsAncestor(other any) bool
// CurrentSection returns the page's current section or the page itself if home or a section.
// Note that this will return nil for pages that is not regular, home or section pages.
CurrentSection() Page
// IsDescendant returns whether the current page is a descendant of other.
// Note that this method is not relevant for taxonomy lists and taxonomy terms pages.
IsDescendant(other any) bool
// FirstSection returns the section on level 1 below home, e.g. "/docs".
// For the home page, this will return itself.
FirstSection() Page
// InSection returns whether other is in the current section.
// Note that this will always return false for pages that are
// not either regular, home or section pages.
InSection(other any) bool
// Parent returns a section's parent section or a page's section.
// To get a section's subsections, see Page's Sections method.
Parent() Page
// Ancestors returns the ancestors of each page
Ancestors() Pages
// Sections returns this section's subsections, if any.
// Note that for non-sections, this method will always return an empty list.
Sections() Pages
// Page returns a reference to the Page itself, kept here mostly
// for legacy reasons.
Page() Page
// Returns a slice of sections (directories if it's a file) to this
// Page.
SectionsEntries() []string
// SectionsPath is SectionsEntries joined with a /.
SectionsPath() string
}
// SiteVectorProvider provides the dimensions of a Page.
type SiteVectorProvider interface {
SiteVector() sitesmatrix.Vector
}
// GetSiteVector returns the site vector for a Page.
func GetSiteVector(p Page) sitesmatrix.Vector {
if sp, ok := p.(SiteVectorProvider); ok {
return sp.SiteVector()
}
return sitesmatrix.Vector{}
}
// PageWithContext is a Page with a context.Context.
type PageWithContext struct {
Page
Ctx context.Context
}
func (p PageWithContext) Content() (any, error) {
return p.Page.Content(p.Ctx)
}
func (p PageWithContext) Plain() string {
return p.Page.Plain(p.Ctx)
}
func (p PageWithContext) PlainWords() []string {
return p.Page.PlainWords(p.Ctx)
}
func (p PageWithContext) Summary() template.HTML {
return p.Page.Summary(p.Ctx)
}
func (p PageWithContext) Truncated() bool {
return p.Page.Truncated(p.Ctx)
}
func (p PageWithContext) FuzzyWordCount() int {
return p.Page.FuzzyWordCount(p.Ctx)
}
func (p PageWithContext) WordCount() int {
return p.Page.WordCount(p.Ctx)
}
func (p PageWithContext) ReadingTime() int {
return p.Page.ReadingTime(p.Ctx)
}
func (p PageWithContext) Len() int {
return p.Page.Len(p.Ctx)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pages_prev_next.go | resources/page/pages_prev_next.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
// Next returns the next page relative to the given
func (p Pages) Next(cur Page) Page {
x := searchPage(cur, p)
if x <= 0 {
return nil
}
return p[x-1]
}
// Prev returns the previous page relative to the given
func (p Pages) Prev(cur Page) Page {
x := searchPage(cur, p)
if x == -1 || len(p)-x < 2 {
return nil
}
return p[x+1]
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/page_nop.go | resources/page/page_nop.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package page contains the core interfaces and types for the Page resource,
// a core component in Hugo.
package page
import (
"bytes"
"context"
"html/template"
"time"
"github.com/gohugoio/hugo/hugolib/roles"
"github.com/gohugoio/hugo/markup/converter"
"github.com/gohugoio/hugo/markup/tableofcontents"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/navigation"
"github.com/gohugoio/hugo/common/hstore"
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/source"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/related"
"github.com/gohugoio/hugo/resources/resource"
)
var (
NopPage Page = new(nopPage)
NopContentRenderer ContentRenderer = new(nopContentRenderer)
NopMarkup Markup = new(nopMarkup)
NopContent Content = new(nopContent)
NopCPageContentRenderer = struct {
OutputFormatPageContentProvider
ContentRenderer
}{
NopPage,
NopContentRenderer,
}
NilPage *nopPage
)
// PageNop implements Page, but does nothing.
type nopPage int
func (p *nopPage) Aliases() []string {
return nil
}
func (p *nopPage) Sitemap() config.SitemapConfig {
return config.SitemapConfig{}
}
func (p *nopPage) Layout() string {
return ""
}
func (p *nopPage) AllTranslations() Pages {
return nil
}
func (p *nopPage) LanguagePrefix() string {
return ""
}
func (p *nopPage) AlternativeOutputFormats() OutputFormats {
return nil
}
func (p *nopPage) BaseFileName() string {
return ""
}
func (p *nopPage) BundleType() string {
return ""
}
func (p *nopPage) Markup(...any) Markup {
return NopMarkup
}
func (p *nopPage) Content(context.Context) (any, error) {
return "", nil
}
func (p *nopPage) ContentWithoutSummary(ctx context.Context) (template.HTML, error) {
return "", nil
}
func (p *nopPage) ContentBaseName() string {
return ""
}
func (p *nopPage) CurrentSection() Page {
return nil
}
func (p *nopPage) Data() any {
return nil
}
func (p *nopPage) Date() (t time.Time) {
return
}
func (p *nopPage) Description() string {
return ""
}
func (p *nopPage) RefFrom(argsm map[string]any, source any) (string, error) {
return "", nil
}
func (p *nopPage) RelRefFrom(argsm map[string]any, source any) (string, error) {
return "", nil
}
func (p *nopPage) Dir() string {
return ""
}
func (p *nopPage) Draft() bool {
return false
}
func (p *nopPage) Eq(other any) bool {
return p == other
}
func (p *nopPage) ExpiryDate() (t time.Time) {
return
}
func (p *nopPage) File() *source.File {
return nil
}
func (p *nopPage) FileInfo() hugofs.FileMetaInfo {
return nil
}
func (p *nopPage) Filename() string {
return ""
}
func (p *nopPage) FirstSection() Page {
return nil
}
func (p *nopPage) FuzzyWordCount(context.Context) int {
return 0
}
func (p *nopPage) GetPage(ref string) (Page, error) {
return nil, nil
}
func (p *nopPage) GetParam(key string) any {
return nil
}
func (p *nopPage) GetTerms(taxonomy string) Pages {
return nil
}
func (p *nopPage) GitInfo() *source.GitInfo {
return nil
}
func (p *nopPage) CodeOwners() []string {
return nil
}
func (p *nopPage) HasMenuCurrent(menuID string, me *navigation.MenuEntry) bool {
return false
}
func (p *nopPage) HasShortcode(name string) bool {
return false
}
func (p *nopPage) Hugo() (h hugo.HugoInfo) {
return
}
func (p *nopPage) InSection(other any) bool {
return false
}
func (p *nopPage) IsAncestor(other any) bool {
return false
}
func (p *nopPage) IsDescendant(other any) bool {
return false
}
func (p *nopPage) IsDraft() bool {
return false
}
func (p *nopPage) IsHome() bool {
return false
}
func (p *nopPage) IsMenuCurrent(menuID string, inme *navigation.MenuEntry) bool {
return false
}
func (p *nopPage) IsNode() bool {
return false
}
func (p *nopPage) IsPage() bool {
return false
}
func (p *nopPage) IsSection() bool {
return false
}
func (p *nopPage) IsTranslated() bool {
return false
}
func (p *nopPage) Keywords() []string {
return nil
}
func (p *nopPage) Kind() string {
return ""
}
func (p *nopPage) Lang() string {
return ""
}
func (p *nopPage) Language() *langs.Language {
return nil
}
func (p *nopPage) Role() roles.Role {
return nil
}
func (p *nopPage) Lastmod() (t time.Time) {
return
}
func (p *nopPage) Len(context.Context) int {
return 0
}
func (p *nopPage) LinkTitle() string {
return ""
}
func (p *nopPage) LogicalName() string {
return ""
}
func (p *nopPage) MediaType() (m media.Type) {
return
}
func (p *nopPage) Menus() (m navigation.PageMenus) {
return
}
func (p *nopPage) Name() string {
return ""
}
func (p *nopPage) Next() Page {
return nil
}
func (p *nopPage) OutputFormats() OutputFormats {
return nil
}
func (p *nopPage) Pages() Pages {
return nil
}
func (p *nopPage) RegularPages() Pages {
return nil
}
func (p *nopPage) RegularPagesRecursive() Pages {
return nil
}
func (p *nopPage) Paginate(seq any, options ...any) (*Pager, error) {
return nil, nil
}
func (p *nopPage) Paginator(options ...any) (*Pager, error) {
return nil, nil
}
func (p *nopPage) Param(key any) (any, error) {
return nil, nil
}
func (p *nopPage) Params() maps.Params {
return nil
}
func (p *nopPage) Page() Page {
return p
}
func (p *nopPage) Parent() Page {
return nil
}
func (p *nopPage) Ancestors() Pages {
return nil
}
func (p *nopPage) Path() string {
return ""
}
func (p *nopPage) PathInfo() *paths.Path {
return nil
}
func (p *nopPage) Permalink() string {
return ""
}
func (p *nopPage) Plain(context.Context) string {
return ""
}
func (p *nopPage) PlainWords(context.Context) []string {
return nil
}
func (p *nopPage) Prev() Page {
return nil
}
func (p *nopPage) PublishDate() (t time.Time) {
return
}
func (p *nopPage) PrevInSection() Page {
return nil
}
func (p *nopPage) NextInSection() Page {
return nil
}
func (p *nopPage) PrevPage() Page {
return nil
}
func (p *nopPage) NextPage() Page {
return nil
}
func (p *nopPage) RawContent() string {
return ""
}
func (p *nopPage) RenderShortcodes(ctx context.Context) (template.HTML, error) {
return "", nil
}
func (p *nopPage) ReadingTime(context.Context) int {
return 0
}
func (p *nopPage) Ref(argsm map[string]any) (string, error) {
return "", nil
}
func (p *nopPage) RelPermalink() string {
return ""
}
func (p *nopPage) RelRef(argsm map[string]any) (string, error) {
return "", nil
}
func (p *nopPage) Render(ctx context.Context, layout ...string) (template.HTML, error) {
return "", nil
}
func (p *nopPage) RenderString(ctx context.Context, args ...any) (template.HTML, error) {
return "", nil
}
func (p *nopPage) ResourceType() string {
return ""
}
func (p *nopPage) Resources() resource.Resources {
return nil
}
func (p *nopPage) Scratch() *hstore.Scratch {
return nil
}
func (p *nopPage) Store() *hstore.Scratch {
return nil
}
func (p *nopPage) RelatedKeywords(cfg related.IndexConfig) ([]related.Keyword, error) {
return nil, nil
}
func (p *nopPage) Section() string {
return ""
}
func (p *nopPage) Sections() Pages {
return nil
}
func (p *nopPage) SectionsEntries() []string {
return nil
}
func (p *nopPage) SectionsPath() string {
return ""
}
func (p *nopPage) Site() Site {
return nil
}
func (p *nopPage) Sites() Sites {
return nil
}
func (p *nopPage) Slug() string {
return ""
}
func (p *nopPage) String() string {
return "nopPage"
}
func (p *nopPage) Summary(context.Context) template.HTML {
return ""
}
func (p *nopPage) TableOfContents(context.Context) template.HTML {
return ""
}
func (p *nopPage) Title() string {
return ""
}
func (p *nopPage) TranslationBaseName() string {
return ""
}
func (p *nopPage) TranslationKey() string {
return ""
}
func (p *nopPage) Translations() Pages {
return nil
}
func (p *nopPage) Truncated(context.Context) bool {
return false
}
func (p *nopPage) Type() string {
return ""
}
func (p *nopPage) URL() string {
return ""
}
func (p *nopPage) UniqueID() string {
return ""
}
func (p *nopPage) Weight() int {
return 0
}
func (p *nopPage) WordCount(context.Context) int {
return 0
}
func (p *nopPage) Fragments(context.Context) *tableofcontents.Fragments {
return nil
}
func (p *nopPage) HeadingsFiltered(context.Context) tableofcontents.Headings {
return nil
}
type nopContentRenderer int
func (r *nopContentRenderer) ParseAndRenderContent(ctx context.Context, content []byte, renderTOC bool) (converter.ResultRender, error) {
b := &bytes.Buffer{}
return b, nil
}
func (r *nopContentRenderer) ParseContent(ctx context.Context, content []byte) (converter.ResultParse, bool, error) {
return nil, false, nil
}
func (r *nopContentRenderer) RenderContent(ctx context.Context, content []byte, doc any) (converter.ResultRender, bool, error) {
return nil, false, nil
}
type (
nopMarkup int
nopContent int
)
var (
_ Markup = (*nopMarkup)(nil)
_ Content = (*nopContent)(nil)
)
func (c *nopMarkup) Render(context.Context) (Content, error) {
return NopContent, nil
}
func (c *nopMarkup) RenderString(ctx context.Context, args ...any) (template.HTML, error) {
return "", nil
}
func (c *nopMarkup) RenderShortcodes(context.Context) (template.HTML, error) {
return "", nil
}
func (c *nopContent) Plain(context.Context) string {
return ""
}
func (c *nopContent) PlainWords(context.Context) []string {
return nil
}
func (c *nopContent) WordCount(context.Context) int {
return 0
}
func (c *nopContent) FuzzyWordCount(context.Context) int {
return 0
}
func (c *nopContent) ReadingTime(context.Context) int {
return 0
}
func (c *nopContent) Len(context.Context) int {
return 0
}
func (c *nopContent) Content(context.Context) (template.HTML, error) {
return "", nil
}
func (c *nopContent) ContentWithoutSummary(context.Context) (template.HTML, error) {
return "", nil
}
func (c *nopMarkup) Fragments(context.Context) *tableofcontents.Fragments {
return nil
}
func (c *nopMarkup) FragmentsHTML(context.Context) template.HTML {
return ""
}
func (c *nopContent) Summary(context.Context) (Summary, error) {
return Summary{}, nil
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/testhelpers_test.go | resources/page/testhelpers_test.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"context"
"fmt"
"html/template"
"path"
"path/filepath"
"time"
"github.com/gohugoio/hugo/markup/tableofcontents"
"github.com/gohugoio/hugo/resources/resource"
"github.com/gohugoio/hugo/navigation"
"github.com/gohugoio/hugo/common/hstore"
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/related"
"github.com/gohugoio/hugo/source"
)
var (
_ resource.LengthProvider = (*testPage)(nil)
_ Page = (*testPage)(nil)
)
var relatedDocsHandler = NewRelatedDocsHandler(related.DefaultConfig)
func newTestPage() *testPage {
return newTestPageWithFile("/a/b/c.md")
}
func newTestPageWithFile(filename string) *testPage {
filename = filepath.FromSlash(filename)
file := source.NewContentFileInfoFrom(filename, filename)
l, err := langs.NewLanguage(
"en",
"en",
"UTC",
langs.LanguageConfig{
LanguageName: "English",
},
)
if err != nil {
panic(err)
}
return &testPage{
params: make(map[string]any),
data: make(map[string]any),
file: file,
pathInfo: file.FileInfo().Meta().PathInfo,
currentSection: &testPage{
sectionEntries: []string{"a", "b", "c"},
},
site: testSite{l: l},
}
}
type testPage struct {
kind string
description string
title string
linkTitle string
lang string
section string
site testSite
content string
fuzzyWordCount int
path string
pathInfo *paths.Path
slug string
// Dates
date time.Time
lastMod time.Time
expiryDate time.Time
pubDate time.Time
weight int
params map[string]any
data map[string]any
file *source.File
currentSection *testPage
sectionEntries []string
ancestors Pages
}
func (p *testPage) Aliases() []string {
panic("testpage: not implemented")
}
func (p *testPage) AllTranslations() Pages {
panic("testpage: not implemented")
}
func (p *testPage) AlternativeOutputFormats() OutputFormats {
panic("testpage: not implemented")
}
func (p *testPage) BaseFileName() string {
panic("testpage: not implemented")
}
func (p *testPage) BundleType() string {
panic("testpage: not implemented")
}
func (p *testPage) Content(context.Context) (any, error) {
panic("testpage: not implemented")
}
func (p *testPage) Markup(...any) Markup {
panic("testpage: not implemented")
}
func (p *testPage) ContentBaseName() string {
panic("testpage: not implemented")
}
func (p *testPage) CurrentSection() Page {
return p.currentSection
}
func (p *testPage) Data() any {
return p.data
}
func (p *testPage) Sitemap() config.SitemapConfig {
return config.SitemapConfig{}
}
func (p *testPage) Layout() string {
return ""
}
func (p *testPage) Date() time.Time {
return p.date
}
func (p *testPage) Description() string {
return ""
}
func (p *testPage) ContentWithoutSummary(ctx context.Context) (template.HTML, error) {
return "", nil
}
func (p *testPage) Dir() string {
panic("testpage: not implemented")
}
func (p *testPage) Draft() bool {
panic("testpage: not implemented")
}
func (p *testPage) Eq(other any) bool {
return p == other
}
func (p *testPage) ExpiryDate() time.Time {
return p.expiryDate
}
func (p *testPage) File() *source.File {
return p.file
}
func (p *testPage) FileInfo() hugofs.FileMetaInfo {
panic("testpage: not implemented")
}
func (p *testPage) Filename() string {
panic("testpage: not implemented")
}
func (p *testPage) FirstSection() Page {
// Return the current section for regular pages
// For section pages, this would be the section itself
if p.currentSection != nil {
return p.currentSection
}
return p // If no current section, assume this page is the section
}
func (p *testPage) FuzzyWordCount(context.Context) int {
return p.fuzzyWordCount
}
func (p *testPage) GetPage(ref string) (Page, error) {
panic("testpage: not implemented")
}
func (p *testPage) GetParam(key string) any {
panic("testpage: not implemented")
}
func (p *testPage) GetTerms(taxonomy string) Pages {
panic("testpage: not implemented")
}
func (p *testPage) GetInternalRelatedDocsHandler() *RelatedDocsHandler {
return relatedDocsHandler
}
func (p *testPage) GitInfo() *source.GitInfo {
return nil
}
func (p *testPage) CodeOwners() []string {
return nil
}
func (p *testPage) HasMenuCurrent(menuID string, me *navigation.MenuEntry) bool {
panic("testpage: not implemented")
}
func (p *testPage) HasShortcode(name string) bool {
panic("testpage: not implemented")
}
func (p *testPage) Hugo() hugo.HugoInfo {
panic("testpage: not implemented")
}
func (p *testPage) InSection(other any) bool {
panic("testpage: not implemented")
}
func (p *testPage) IsAncestor(other any) bool {
panic("testpage: not implemented")
}
func (p *testPage) IsDescendant(other any) bool {
panic("testpage: not implemented")
}
func (p *testPage) IsDraft() bool {
return false
}
func (p *testPage) IsHome() bool {
return p.kind == "home"
}
func (p *testPage) IsMenuCurrent(menuID string, inme *navigation.MenuEntry) bool {
panic("testpage: not implemented")
}
func (p *testPage) IsNode() bool {
panic("testpage: not implemented")
}
func (p *testPage) IsPage() bool {
panic("testpage: not implemented")
}
func (p *testPage) IsSection() bool {
return p.kind == "section"
}
func (p *testPage) IsTranslated() bool {
panic("testpage: not implemented")
}
func (p *testPage) Ancestors() Pages {
return p.ancestors
}
func (p *testPage) Keywords() []string {
return nil
}
func (p *testPage) Kind() string {
return p.kind
}
func (p *testPage) Lang() string {
return p.lang
}
func (p *testPage) Language() *langs.Language {
panic("testpage: not implemented")
}
func (p *testPage) LanguagePrefix() string {
return ""
}
func (p *testPage) Fragments(context.Context) *tableofcontents.Fragments {
return nil
}
func (p *testPage) HeadingsFiltered(context.Context) tableofcontents.Headings {
return nil
}
func (p *testPage) Lastmod() time.Time {
return p.lastMod
}
func (p *testPage) Len(context.Context) int {
return len(p.content)
}
func (p *testPage) LinkTitle() string {
if p.linkTitle == "" {
if p.title == "" {
return p.path
}
return p.title
}
return p.linkTitle
}
func (p *testPage) LogicalName() string {
panic("testpage: not implemented")
}
func (p *testPage) MediaType() media.Type {
panic("testpage: not implemented")
}
func (p *testPage) Menus() navigation.PageMenus {
return navigation.PageMenus{}
}
func (p *testPage) Name() string {
panic("testpage: not implemented")
}
func (p *testPage) Next() Page {
panic("testpage: not implemented")
}
func (p *testPage) NextInSection() Page {
return nil
}
func (p *testPage) NextPage() Page {
return nil
}
func (p *testPage) OutputFormats() OutputFormats {
panic("testpage: not implemented")
}
func (p *testPage) Pages() Pages {
panic("testpage: not implemented")
}
func (p *testPage) RegularPages() Pages {
panic("testpage: not implemented")
}
func (p *testPage) RegularPagesRecursive() Pages {
panic("testpage: not implemented")
}
func (p *testPage) Paginate(seq any, options ...any) (*Pager, error) {
return nil, nil
}
func (p *testPage) Paginator(options ...any) (*Pager, error) {
return nil, nil
}
func (p *testPage) Param(key any) (any, error) {
return resource.Param(p, nil, key)
}
func (p *testPage) Params() maps.Params {
return p.params
}
func (p *testPage) Page() Page {
return p
}
func (p *testPage) Parent() Page {
panic("testpage: not implemented")
}
func (p *testPage) Path() string {
return p.path
}
func (p *testPage) PathInfo() *paths.Path {
return p.pathInfo
}
func (p *testPage) Permalink() string {
panic("testpage: not implemented")
}
func (p *testPage) Plain(context.Context) string {
panic("testpage: not implemented")
}
func (p *testPage) PlainWords(context.Context) []string {
panic("testpage: not implemented")
}
func (p *testPage) Prev() Page {
panic("testpage: not implemented")
}
func (p *testPage) PrevInSection() Page {
return nil
}
func (p *testPage) PrevPage() Page {
return nil
}
func (p *testPage) PublishDate() time.Time {
return p.pubDate
}
func (p *testPage) RawContent() string {
panic("testpage: not implemented")
}
func (p *testPage) RenderShortcodes(context.Context) (template.HTML, error) {
panic("testpage: not implemented")
}
func (p *testPage) ReadingTime(context.Context) int {
panic("testpage: not implemented")
}
func (p *testPage) Ref(argsm map[string]any) (string, error) {
panic("testpage: not implemented")
}
func (p *testPage) RefFrom(argsm map[string]any, source any) (string, error) {
return "", nil
}
func (p *testPage) RelPermalink() string {
panic("testpage: not implemented")
}
func (p *testPage) RelRef(argsm map[string]any) (string, error) {
panic("testpage: not implemented")
}
func (p *testPage) RelRefFrom(argsm map[string]any, source any) (string, error) {
return "", nil
}
func (p *testPage) Render(ctx context.Context, layout ...string) (template.HTML, error) {
panic("testpage: not implemented")
}
func (p *testPage) RenderString(ctx context.Context, args ...any) (template.HTML, error) {
panic("testpage: not implemented")
}
func (p *testPage) ResourceType() string {
panic("testpage: not implemented")
}
func (p *testPage) Resources() resource.Resources {
panic("testpage: not implemented")
}
func (p *testPage) Scratch() *hstore.Scratch {
panic("testpage: not implemented")
}
func (p *testPage) Store() *hstore.Scratch {
panic("testpage: not implemented")
}
func (p *testPage) RelatedKeywords(cfg related.IndexConfig) ([]related.Keyword, error) {
v, err := p.Param(cfg.Name)
if err != nil {
return nil, err
}
return cfg.ToKeywords(v)
}
func (p *testPage) Section() string {
return p.section
}
func (p *testPage) Sections() Pages {
panic("testpage: not implemented")
}
func (p *testPage) SectionsEntries() []string {
return p.sectionEntries
}
func (p *testPage) SectionsPath() string {
return path.Join(p.sectionEntries...)
}
func (p *testPage) Site() Site {
return p.site
}
func (p *testPage) Sites() Sites {
panic("testpage: not implemented")
}
func (p *testPage) Slug() string {
return p.slug
}
func (p *testPage) String() string {
return p.path
}
func (p *testPage) Summary(context.Context) template.HTML {
panic("testpage: not implemented")
}
func (p *testPage) TableOfContents(context.Context) template.HTML {
panic("testpage: not implemented")
}
func (p *testPage) Title() string {
return p.title
}
func (p *testPage) TranslationBaseName() string {
panic("testpage: not implemented")
}
func (p *testPage) TranslationKey() string {
return p.path
}
func (p *testPage) Translations() Pages {
panic("testpage: not implemented")
}
func (p *testPage) Truncated(context.Context) bool {
panic("testpage: not implemented")
}
func (p *testPage) Type() string {
return p.section
}
func (p *testPage) URL() string {
return ""
}
func (p *testPage) UniqueID() string {
panic("testpage: not implemented")
}
func (p *testPage) Weight() int {
return p.weight
}
func (p *testPage) WordCount(context.Context) int {
panic("testpage: not implemented")
}
func createTestPages(num int) Pages {
pages := make(Pages, num)
for i := range num {
m := &testPage{
path: fmt.Sprintf("/x/y/z/p%d.md", i),
weight: 5,
fuzzyWordCount: i + 2, // magic
}
if i%2 == 0 {
m.weight = 10
}
pages[i] = m
}
return pages
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pagination.go | resources/page/pagination.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"errors"
"fmt"
"math"
"reflect"
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/config"
"github.com/spf13/cast"
)
// PaginatorProvider provides two ways to create a page paginator.
type PaginatorProvider interface {
// Paginator creates a paginator with the default page set.
Paginator(options ...any) (*Pager, error)
// Paginate creates a paginator with the given page set in pages.
Paginate(pages any, options ...any) (*Pager, error)
}
var _ PaginatorProvider = (*PaginatorNotSupportedFunc)(nil)
type PaginatorNotSupportedFunc func() error
func (f PaginatorNotSupportedFunc) Paginate(pages any, options ...any) (*Pager, error) {
return nil, f()
}
func (f PaginatorNotSupportedFunc) Paginator(options ...any) (*Pager, error) {
return nil, f()
}
// Pager represents one of the elements in a paginator.
// The number, starting on 1, represents its place.
type Pager struct {
number int
*Paginator
}
func (p Pager) String() string {
return fmt.Sprintf("Pager %d", p.number)
}
type paginatedElement interface {
Len() int
}
type pagers []*Pager
var (
paginatorEmptyPages Pages
paginatorEmptyPageGroups PagesGroup
)
type Paginator struct {
paginatedElements []paginatedElement
pagers
paginationURLFactory
total int
size int
}
type paginationURLFactory func(int) string
// PageNumber returns the current page's number in the pager sequence.
func (p *Pager) PageNumber() int {
return p.number
}
// URL returns the URL to the current page.
func (p *Pager) URL() string {
return p.paginationURLFactory(p.PageNumber())
}
// Pages returns the Pages on this page.
// Note: If this return a non-empty result, then PageGroups() will return empty.
func (p *Pager) Pages() Pages {
if len(p.paginatedElements) == 0 {
return paginatorEmptyPages
}
if pages, ok := p.element().(Pages); ok {
return pages
}
return paginatorEmptyPages
}
// PageGroups return Page groups for this page.
// Note: If this return non-empty result, then Pages() will return empty.
func (p *Pager) PageGroups() PagesGroup {
if len(p.paginatedElements) == 0 {
return paginatorEmptyPageGroups
}
if groups, ok := p.element().(PagesGroup); ok {
return groups
}
return paginatorEmptyPageGroups
}
func (p *Pager) element() paginatedElement {
if len(p.paginatedElements) == 0 {
return paginatorEmptyPages
}
return p.paginatedElements[p.PageNumber()-1]
}
// page returns the Page with the given index
func (p *Pager) page(index int) (Page, error) {
if pages, ok := p.element().(Pages); ok {
if pages != nil && len(pages) > index {
return pages[index], nil
}
return nil, nil
}
// must be PagesGroup
// this construction looks clumsy, but ...
// ... it is the difference between 99.5% and 100% test coverage :-)
groups := p.element().(PagesGroup)
i := 0
for _, v := range groups {
for _, page := range v.Pages {
if i == index {
return page, nil
}
i++
}
}
return nil, nil
}
// NumberOfElements gets the number of elements on this page.
func (p *Pager) NumberOfElements() int {
return p.element().Len()
}
// HasPrev tests whether there are page(s) before the current.
func (p *Pager) HasPrev() bool {
return p.PageNumber() > 1
}
// Prev returns the pager for the previous page.
func (p *Pager) Prev() *Pager {
if !p.HasPrev() {
return nil
}
return p.pagers[p.PageNumber()-2]
}
// HasNext tests whether there are page(s) after the current.
func (p *Pager) HasNext() bool {
return p.PageNumber() < len(p.paginatedElements)
}
// Next returns the pager for the next page.
func (p *Pager) Next() *Pager {
if !p.HasNext() {
return nil
}
return p.pagers[p.PageNumber()]
}
// First returns the pager for the first page.
func (p *Pager) First() *Pager {
return p.pagers[0]
}
// Last returns the pager for the last page.
func (p *Pager) Last() *Pager {
return p.pagers[len(p.pagers)-1]
}
// Pagers returns a list of pagers that can be used to build a pagination menu.
func (p *Paginator) Pagers() pagers {
return p.pagers
}
// PageSize returns the size of each paginator page.
// Deprecated: Use PagerSize instead.
func (p *Paginator) PageSize() int {
hugo.Deprecate("PageSize", "Use PagerSize instead.", "v0.128.0")
return p.size
}
// PagerSize returns the size of each paginator page.
func (p *Paginator) PagerSize() int {
return p.size
}
// TotalPages returns the number of pages in the paginator.
func (p *Paginator) TotalPages() int {
return len(p.paginatedElements)
}
// TotalNumberOfElements returns the number of elements on all pages in this paginator.
func (p *Paginator) TotalNumberOfElements() int {
return p.total
}
func splitPages(pages Pages, size int) []paginatedElement {
var split []paginatedElement
for low, j := 0, len(pages); low < j; low += size {
high := int(math.Min(float64(low+size), float64(len(pages))))
split = append(split, pages[low:high])
}
return split
}
func splitPageGroups(pageGroups PagesGroup, size int) []paginatedElement {
type keyPage struct {
key any
page Page
}
var (
split []paginatedElement
flattened []keyPage
)
for _, g := range pageGroups {
for _, p := range g.Pages {
flattened = append(flattened, keyPage{g.Key, p})
}
}
numPages := len(flattened)
for low, j := 0, numPages; low < j; low += size {
high := int(math.Min(float64(low+size), float64(numPages)))
var (
pg PagesGroup
key any
groupIndex = -1
)
for k := low; k < high; k++ {
kp := flattened[k]
if key == nil || key != kp.key {
key = kp.key
pg = append(pg, PageGroup{Key: key})
groupIndex++
}
pg[groupIndex].Pages = append(pg[groupIndex].Pages, kp.page)
}
split = append(split, pg)
}
return split
}
func ResolvePagerSize(conf config.AllProvider, options ...any) (int, error) {
if len(options) == 0 {
return conf.Pagination().PagerSize, nil
}
if len(options) > 1 {
return -1, errors.New("too many arguments, 'pager size' is currently the only option")
}
pas, err := cast.ToIntE(options[0])
if err != nil || pas <= 0 {
return -1, errors.New(("'pager size' must be a positive integer"))
}
return pas, nil
}
func Paginate(td TargetPathDescriptor, seq any, pagerSize int) (*Paginator, error) {
if pagerSize <= 0 {
return nil, errors.New("'paginate' configuration setting must be positive to paginate")
}
urlFactory := newPaginationURLFactory(td)
var paginator *Paginator
groups, ok, err := ToPagesGroup(seq)
if err != nil {
return nil, err
}
if ok {
paginator, _ = newPaginatorFromPageGroups(groups, pagerSize, urlFactory)
} else {
pages, err := ToPages(seq)
if err != nil {
return nil, err
}
paginator, _ = newPaginatorFromPages(pages, pagerSize, urlFactory)
}
return paginator, nil
}
// probablyEqual checks page lists for probable equality.
// It may return false positives.
// The motivation behind this is to avoid potential costly reflect.DeepEqual
// when "probably" is good enough.
func probablyEqualPageLists(a1 any, a2 any) bool {
if a1 == nil || a2 == nil {
return a1 == a2
}
t1 := reflect.TypeOf(a1)
t2 := reflect.TypeOf(a2)
if t1 != t2 {
return false
}
if g1, ok := a1.(PagesGroup); ok {
g2 := a2.(PagesGroup)
if len(g1) != len(g2) {
return false
}
if len(g1) == 0 {
return true
}
if g1.Len() != g2.Len() {
return false
}
return g1[0].Pages[0] == g2[0].Pages[0]
}
p1, err1 := ToPages(a1)
p2, err2 := ToPages(a2)
// probably the same wrong type
if err1 != nil && err2 != nil {
return true
}
if len(p1) != len(p2) {
return false
}
if len(p1) == 0 {
return true
}
return p1[0] == p2[0]
}
func newPaginatorFromPages(pages Pages, size int, urlFactory paginationURLFactory) (*Paginator, error) {
if size <= 0 {
return nil, errors.New("Paginator size must be positive")
}
split := splitPages(pages, size)
return newPaginator(split, len(pages), size, urlFactory)
}
func newPaginatorFromPageGroups(pageGroups PagesGroup, size int, urlFactory paginationURLFactory) (*Paginator, error) {
if size <= 0 {
return nil, errors.New("Paginator size must be positive")
}
split := splitPageGroups(pageGroups, size)
return newPaginator(split, pageGroups.Len(), size, urlFactory)
}
func newPaginator(elements []paginatedElement, total, size int, urlFactory paginationURLFactory) (*Paginator, error) {
p := &Paginator{total: total, paginatedElements: elements, size: size, paginationURLFactory: urlFactory}
var ps pagers
if len(elements) > 0 {
ps = make(pagers, len(elements))
for i := range p.paginatedElements {
ps[i] = &Pager{number: (i + 1), Paginator: p}
}
} else {
ps = make(pagers, 1)
ps[0] = &Pager{number: 1, Paginator: p}
}
p.pagers = ps
return p, nil
}
func newPaginationURLFactory(d TargetPathDescriptor) paginationURLFactory {
return func(pageNumber int) string {
pathDescriptor := d
var rel string
if pageNumber > 1 {
rel = fmt.Sprintf("/%s/%d/", d.PathSpec.Cfg.Pagination().Path, pageNumber)
pathDescriptor.Addends = rel
}
return CreateTargetPaths(pathDescriptor).RelPermalink(d.PathSpec)
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pages_sort_search_test.go | resources/page/pages_sort_search_test.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"fmt"
"math/rand"
"testing"
"time"
qt "github.com/frankban/quicktest"
)
func TestSearchPage(t *testing.T) {
t.Parallel()
c := qt.New(t)
pages := createSortTestPages(10)
for i, p := range pages {
p.(*testPage).title = fmt.Sprintf("Title %d", i%2)
}
for _, pages := range []Pages{pages.ByTitle(), pages.ByTitle().Reverse()} {
less := isPagesProbablySorted(pages, lessPageTitle)
c.Assert(less, qt.Not(qt.IsNil))
for i, p := range pages {
idx := searchPageBinary(p, pages, less)
c.Assert(idx, qt.Equals, i)
}
}
}
func BenchmarkSearchPage(b *testing.B) {
type Variant struct {
name string
preparePages func(pages Pages) Pages
search func(p Page, pages Pages) int
}
shufflePages := func(pages Pages) Pages {
rand.Shuffle(len(pages), func(i, j int) { pages[i], pages[j] = pages[j], pages[i] })
return pages
}
linearSearch := func(p Page, pages Pages) int {
return searchPageLinear(p, pages, 0)
}
createPages := func(num int) Pages {
pages := createSortTestPages(num)
for _, p := range pages {
tp := p.(*testPage)
tp.weight = rand.Intn(len(pages))
tp.title = fmt.Sprintf("Title %d", rand.Intn(len(pages)))
tp.pubDate = time.Now().Add(time.Duration(rand.Intn(len(pages)/5)) * time.Hour)
tp.date = time.Now().Add(time.Duration(rand.Intn(len(pages)/5)) * time.Hour)
}
return pages
}
for _, variant := range []Variant{
{"Shuffled", shufflePages, searchPage},
{"ByWeight", func(pages Pages) Pages {
return pages.ByWeight()
}, searchPage},
{"ByWeight.Reverse", func(pages Pages) Pages {
return pages.ByWeight().Reverse()
}, searchPage},
{"ByDate", func(pages Pages) Pages {
return pages.ByDate()
}, searchPage},
{"ByPublishDate", func(pages Pages) Pages {
return pages.ByPublishDate()
}, searchPage},
{"ByTitle", func(pages Pages) Pages {
return pages.ByTitle()
}, searchPage},
{"ByTitle Linear", func(pages Pages) Pages {
return pages.ByTitle()
}, linearSearch},
} {
for _, numPages := range []int{100, 500, 1000, 5000} {
b.Run(fmt.Sprintf("%s-%d", variant.name, numPages), func(b *testing.B) {
b.StopTimer()
pages := createPages(numPages)
if variant.preparePages != nil {
pages = variant.preparePages(pages)
}
b.StartTimer()
for b.Loop() {
j := rand.Intn(numPages)
k := variant.search(pages[j], pages)
if k != j {
b.Fatalf("%d != %d", k, j)
}
}
})
}
}
}
func TestIsPagesProbablySorted(t *testing.T) {
t.Parallel()
c := qt.New(t)
c.Assert(isPagesProbablySorted(createSortTestPages(6).ByWeight(), DefaultPageSort), qt.Not(qt.IsNil))
c.Assert(isPagesProbablySorted(createSortTestPages(300).ByWeight(), DefaultPageSort), qt.Not(qt.IsNil))
c.Assert(isPagesProbablySorted(createSortTestPages(6), DefaultPageSort), qt.IsNil)
c.Assert(isPagesProbablySorted(createSortTestPages(300).ByTitle(), pageLessFunctions...), qt.Not(qt.IsNil))
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pages_prev_next_integration_test.go | resources/page/pages_prev_next_integration_test.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page_test
import (
"strings"
"testing"
"github.com/gohugoio/hugo/hugolib"
)
func TestNextPrevConfig(t *testing.T) {
filesTemplate := `
-- hugo.toml --
-- content/mysection/_index.md --
-- content/mysection/p1.md --
---
title: "Page 1"
weight: 10
---
-- content/mysection/p2.md --
---
title: "Page 2"
weight: 20
---
-- content/mysection/p3.md --
---
title: "Page 3"
weight: 30
---
-- layouts/single.html --
{{ .Title }}|Next: {{ with .Next}}{{ .Title}}{{ end }}|Prev: {{ with .Prev}}{{ .Title}}{{ end }}|NextInSection: {{ with .NextInSection}}{{ .Title}}{{ end }}|PrevInSection: {{ with .PrevInSection}}{{ .Title}}{{ end }}|
`
b := hugolib.Test(t, filesTemplate)
b.AssertFileContent("public/mysection/p1/index.html", "Page 1|Next: |Prev: Page 2|NextInSection: |PrevInSection: Page 2|")
b.AssertFileContent("public/mysection/p2/index.html", "Page 2|Next: Page 1|Prev: Page 3|NextInSection: Page 1|PrevInSection: Page 3|")
b.AssertFileContent("public/mysection/p3/index.html", "Page 3|Next: Page 2|Prev: |NextInSection: Page 2|PrevInSection: |")
files := strings.ReplaceAll(filesTemplate, "-- hugo.toml --", `-- hugo.toml --
[page]
nextPrevSortOrder="aSc"
nextPrevInSectionSortOrder="asC"
`)
b = hugolib.Test(t, files)
b.AssertFileContent("public/mysection/p1/index.html", "Page 1|Next: Page 2|Prev: |NextInSection: Page 2|PrevInSection: |")
b.AssertFileContent("public/mysection/p2/index.html", "Page 2|Next: Page 3|Prev: Page 1|NextInSection: Page 3|PrevInSection: Page 1|")
b.AssertFileContent("public/mysection/p3/index.html", "Page 3|Next: |Prev: Page 2|NextInSection: |PrevInSection: Page 2|")
files = strings.ReplaceAll(filesTemplate, "-- hugo.toml --", `-- hugo.toml --
[page]
nextPrevSortOrder="aSc"
`)
b = hugolib.Test(t, files)
b.AssertFileContent("public/mysection/p1/index.html", "Page 1|Next: Page 2|Prev: |NextInSection: |PrevInSection: Page 2|")
b.AssertFileContent("public/mysection/p2/index.html", "Page 2|Next: Page 3|Prev: Page 1|NextInSection: Page 1|PrevInSection: Page 3|")
b.AssertFileContent("public/mysection/p3/index.html", "Page 3|Next: |Prev: Page 2|NextInSection: Page 2|PrevInSection: |")
files = strings.ReplaceAll(filesTemplate, "-- hugo.toml --", `-- hugo.toml --
[page]
nextPrevInSectionSortOrder="aSc"
`)
b = hugolib.Test(t, files)
b.AssertFileContent("public/mysection/p1/index.html", "Page 1|Next: |Prev: Page 2|NextInSection: Page 2|PrevInSection: |")
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/permalinks_integration_test.go | resources/page/permalinks_integration_test.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page_test
import (
"strings"
"testing"
"github.com/bep/logg"
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/htesting"
"github.com/gohugoio/hugo/hugolib"
)
func TestPermalinks(t *testing.T) {
t.Parallel()
files := `
-- layouts/list.html --
List|{{ .Kind }}|{{ .RelPermalink }}|
-- layouts/single.html --
Single|{{ .Kind }}|{{ .RelPermalink }}|
-- hugo.toml --
[taxonomies]
tag = "tags"
[permalinks.page]
withpageslug = '/pageslug/:slug/'
withallbutlastsection = '/:sections[:last]/:slug/'
withallbutlastsectionslug = '/:sectionslugs[:last]/:slug/'
withsectionslug = '/sectionslug/:sectionslug/:slug/'
withsectionslugs = '/sectionslugs/:sectionslugs/:slug/'
[permalinks.section]
withfilefilename = '/sectionwithfilefilename/:filename/'
withfilefiletitle = '/sectionwithfilefiletitle/:title/'
withfileslug = '/sectionwithfileslug/:slug/'
nofileslug = '/sectionnofileslug/:slug/'
nofilefilename = '/sectionnofilefilename/:filename/'
nofiletitle1 = '/sectionnofiletitle1/:title/'
nofiletitle2 = '/sectionnofiletitle2/:sections[:last]/'
[permalinks.term]
tags = '/tagsslug/tag/:slug/'
[permalinks.taxonomy]
tags = '/tagsslug/:slug/'
-- content/withpageslug/p1.md --
---
slug: "p1slugvalue"
tags: ["mytag"]
---
-- content/withfilefilename/_index.md --
-- content/withfileslug/_index.md --
---
slug: "withfileslugvalue"
---
-- content/nofileslug/p1.md --
-- content/nofilefilename/p1.md --
-- content/nofiletitle1/p1.md --
-- content/nofiletitle2/asdf/p1.md --
-- content/withallbutlastsection/subsection/p1.md --
-- content/withallbutlastsectionslug/_index.md --
---
slug: "root-section-slug"
---
-- content/withallbutlastsectionslug/subsection/_index.md --
---
slug: "sub-section-slug"
---
-- content/withallbutlastsectionslug/subsection/p1.md --
---
slug: "page-slug"
---
-- content/withsectionslug/_index.md --
---
slug: "section-root-slug"
---
-- content/withsectionslug/subsection/_index.md --
-- content/withsectionslug/subsection/p1.md --
---
slug: "page1-slug"
---
-- content/withsectionslugs/_index.md --
---
slug: "sections-root-slug"
---
-- content/withsectionslugs/level1/_index.md --
---
slug: "level1-slug"
---
-- content/withsectionslugs/level1/p1.md --
---
slug: "page1-slug"
---
-- content/tags/_index.md --
---
slug: "tagsslug"
---
-- content/tags/mytag/_index.md --
---
slug: "mytagslug"
---
`
b := hugolib.NewIntegrationTestBuilder(
hugolib.IntegrationTestConfig{
T: t,
TxtarString: files,
LogLevel: logg.LevelWarn,
}).Build()
t.Log(b.LogString())
// No .File.TranslationBaseName on zero object etc. warnings.
b.Assert(b.H.Log.LoggCount(logg.LevelWarn), qt.Equals, 0)
b.AssertFileContent("public/pageslug/p1slugvalue/index.html", "Single|page|/pageslug/p1slugvalue/|")
b.AssertFileContent("public/sectionslug/section-root-slug/page1-slug/index.html", "Single|page|/sectionslug/section-root-slug/page1-slug/|")
b.AssertFileContent("public/sectionslugs/sections-root-slug/level1-slug/page1-slug/index.html", "Single|page|/sectionslugs/sections-root-slug/level1-slug/page1-slug/|")
b.AssertFileContent("public/sectionwithfilefilename/index.html", "List|section|/sectionwithfilefilename/|")
b.AssertFileContent("public/sectionwithfileslug/withfileslugvalue/index.html", "List|section|/sectionwithfileslug/withfileslugvalue/|")
b.AssertFileContent("public/sectionnofilefilename/index.html", "List|section|/sectionnofilefilename/|")
b.AssertFileContent("public/sectionnofileslug/nofileslugs/index.html", "List|section|/sectionnofileslug/nofileslugs/|")
b.AssertFileContent("public/sectionnofiletitle1/nofiletitle1s/index.html", "List|section|/sectionnofiletitle1/nofiletitle1s/|")
b.AssertFileContent("public/sectionnofiletitle2/index.html", "List|section|/sectionnofiletitle2/|")
b.AssertFileContent("public/tagsslug/tag/mytagslug/index.html", "List|term|/tagsslug/tag/mytagslug/|")
b.AssertFileContent("public/tagsslug/tagsslug/index.html", "List|taxonomy|/tagsslug/tagsslug/|")
permalinksConf := b.H.Configs.Base.Permalinks
b.Assert(permalinksConf, qt.DeepEquals, map[string]map[string]string{
"page": {"withallbutlastsection": "/:sections[:last]/:slug/", "withallbutlastsectionslug": "/:sectionslugs[:last]/:slug/", "withpageslug": "/pageslug/:slug/", "withsectionslug": "/sectionslug/:sectionslug/:slug/", "withsectionslugs": "/sectionslugs/:sectionslugs/:slug/"},
"section": {"nofilefilename": "/sectionnofilefilename/:filename/", "nofileslug": "/sectionnofileslug/:slug/", "nofiletitle1": "/sectionnofiletitle1/:title/", "nofiletitle2": "/sectionnofiletitle2/:sections[:last]/", "withfilefilename": "/sectionwithfilefilename/:filename/", "withfilefiletitle": "/sectionwithfilefiletitle/:title/", "withfileslug": "/sectionwithfileslug/:slug/"},
"taxonomy": {"tags": "/tagsslug/:slug/"},
"term": {"tags": "/tagsslug/tag/:slug/"},
})
}
func TestPermalinksOldSetup(t *testing.T) {
t.Parallel()
files := `
-- layouts/list.html --
List|{{ .Kind }}|{{ .RelPermalink }}|
-- layouts/single.html --
Single|{{ .Kind }}|{{ .RelPermalink }}|
-- hugo.toml --
[permalinks]
withpageslug = '/pageslug/:slug/'
-- content/withpageslug/p1.md --
---
slug: "p1slugvalue"
---
`
b := hugolib.NewIntegrationTestBuilder(
hugolib.IntegrationTestConfig{
T: t,
TxtarString: files,
LogLevel: logg.LevelWarn,
}).Build()
t.Log(b.LogString())
// No .File.TranslationBaseName on zero object etc. warnings.
b.Assert(b.H.Log.LoggCount(logg.LevelWarn), qt.Equals, 0)
b.AssertFileContent("public/pageslug/p1slugvalue/index.html", "Single|page|/pageslug/p1slugvalue/|")
permalinksConf := b.H.Configs.Base.Permalinks
b.Assert(permalinksConf, qt.DeepEquals, map[string]map[string]string{
"page": {"withpageslug": "/pageslug/:slug/"},
"section": {},
"taxonomy": {},
"term": {"withpageslug": "/pageslug/:slug/"},
})
}
func TestPermalinksNestedSections(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
[permalinks.page]
books = '/libros/:sections[1:]/:filename'
[permalinks.section]
books = '/libros/:sections[1:]'
-- content/books/_index.md --
---
title: Books
---
-- content/books/fiction/_index.md --
---
title: Fiction
---
-- content/books/fiction/2023/_index.md --
---
title: 2023
---
-- content/books/fiction/2023/book1/index.md --
---
title: Book 1
---
-- layouts/single.html --
Single.
-- layouts/list.html --
List.
`
b := hugolib.NewIntegrationTestBuilder(
hugolib.IntegrationTestConfig{
T: t,
TxtarString: files,
LogLevel: logg.LevelWarn,
}).Build()
t.Log(b.LogString())
// No .File.TranslationBaseName on zero object etc. warnings.
b.Assert(b.H.Log.LoggCount(logg.LevelWarn), qt.Equals, 0)
b.AssertFileContent("public/libros/index.html", "List.")
b.AssertFileContent("public/libros/fiction/index.html", "List.")
b.AssertFileContent("public/libros/fiction/2023/book1/index.html", "Single.")
}
func TestPermalinksNestedSectionsWithSlugs(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
[permalinks.page]
books = '/libros/:sectionslugs[1:]/:slug'
[permalinks.section]
books = '/libros/:sectionslugs[1:]'
-- content/books/_index.md --
---
title: Books
---
-- content/books/fiction/_index.md --
---
title: Fiction
slug: fictionslug
---
-- content/books/fiction/2023/_index.md --
---
title: 2023
---
-- content/books/fiction/2023/book1/index.md --
---
title: Book One
---
-- layouts/single.html --
Single.
-- layouts/list.html --
List.
`
b := hugolib.NewIntegrationTestBuilder(
hugolib.IntegrationTestConfig{
T: t,
TxtarString: files,
LogLevel: logg.LevelWarn,
}).Build()
t.Log(b.LogString())
// No .File.TranslationBaseName on zero object etc. warnings.
b.Assert(b.H.Log.LoggCount(logg.LevelWarn), qt.Equals, 0)
b.AssertFileContent("public/libros/index.html", "List.")
b.AssertFileContent("public/libros/fictionslug/index.html", "List.")
b.AssertFileContent("public/libros/fictionslug/2023/book-one/index.html", "Single.")
}
func TestPermalinksUrlCascade(t *testing.T) {
t.Parallel()
files := `
-- layouts/list.html --
List|{{ .Kind }}|{{ .RelPermalink }}|
-- layouts/single.html --
Single|{{ .Kind }}|{{ .RelPermalink }}|
-- hugo.toml --
-- content/cooking/delicious-recipes/_index.md --
---
url: /delicious-recipe/
cascade:
url: /delicious-recipe/:slug/
---
-- content/cooking/delicious-recipes/example1.md --
---
title: Recipe 1
---
-- content/cooking/delicious-recipes/example2.md --
---
title: Recipe 2
slug: custom-recipe-2
---
`
b := hugolib.NewIntegrationTestBuilder(
hugolib.IntegrationTestConfig{
T: t,
TxtarString: files,
LogLevel: logg.LevelWarn,
}).Build()
t.Log(b.LogString())
b.Assert(b.H.Log.LoggCount(logg.LevelWarn), qt.Equals, 0)
b.AssertFileContent("public/delicious-recipe/index.html", "List|section|/delicious-recipe/")
b.AssertFileContent("public/delicious-recipe/recipe-1/index.html", "Single|page|/delicious-recipe/recipe-1/")
b.AssertFileContent("public/delicious-recipe/custom-recipe-2/index.html", "Single|page|/delicious-recipe/custom-recipe-2/")
}
// Issue 12948.
// Issue 12954.
func TestPermalinksWithEscapedColons(t *testing.T) {
t.Parallel()
if htesting.IsWindows() {
t.Skip("Windows does not support colons in paths")
}
files := `
-- hugo.toml --
disableKinds = ['home','rss','sitemap','taxonomy','term']
[permalinks.page]
s2 = "/c\\:d/:slug/"
-- content/s1/_index.md --
---
title: s1
url: "/a\\:b/:slug/"
---
-- content/s1/p1.md --
---
title: p1
url: "/a\\:b/:slug/"
---
-- content/s2/p2.md --
---
title: p2
---
-- layouts/single.html --
{{ .Title }}
-- layouts/list.html --
{{ .Title }}
`
b := hugolib.Test(t, files)
b.AssertFileExists("public/a:b/p1/index.html", true)
b.AssertFileExists("public/a:b/s1/index.html", true)
// The above URLs come from the URL front matter field where everything is allowed.
// We strip colons from paths constructed by Hugo (they are not supported on Windows).
b.AssertFileExists("public/cd/p2/index.html", true)
}
func TestPermalinksContentbasenameContentAdapter(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
[permalinks]
[permalinks.page]
a = "/:slugorcontentbasename/"
b = "/:sections/:contentbasename/"
-- content/_content.gotmpl --
{{ $.AddPage (dict "kind" "page" "path" "a/b/contentbasename1" "title" "My A Page No Slug") }}
{{ $.AddPage (dict "kind" "page" "path" "a/b/contentbasename2" "slug" "myslug" "title" "My A Page With Slug") }}
{{ $.AddPage (dict "kind" "section" "path" "b/c" "title" "My B Section") }}
{{ $.AddPage (dict "kind" "page" "path" "b/c/contentbasename3" "title" "My B Page No Slug") }}
-- layouts/single.html --
{{ .Title }}|{{ .RelPermalink }}|{{ .Kind }}|
`
b := hugolib.Test(t, files)
b.AssertFileContent("public/contentbasename1/index.html", "My A Page No Slug|/contentbasename1/|page|")
b.AssertFileContent("public/myslug/index.html", "My A Page With Slug|/myslug/|page|")
}
func TestPermalinksContentbasenameWithAndWithoutFile(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
[permalinks.section]
a = "/mya/:contentbasename/"
[permalinks.page]
a = "/myapage/:contentbasename/"
[permalinks.term]
categories = "/myc/:slugorcontentbasename/"
-- content/b/c/_index.md --
---
title: "C section"
---
-- content/a/b/index.md --
---
title: "My Title"
categories: ["c1", "c2"]
---
-- content/categories/c2/_index.md --
---
title: "C2"
slug: "c2slug"
---
-- layouts/single.html --
{{ .Title }}|{{ .RelPermalink }}|{{ .Kind }}|
-- layouts/list.html --
{{ .Title }}|{{ .RelPermalink }}|{{ .Kind }}|
`
b := hugolib.Test(t, files)
// Sections.
b.AssertFileContent("public/mya/a/index.html", "As|/mya/a/|section|")
// Pages.
b.AssertFileContent("public/myapage/b/index.html", "My Title|/myapage/b/|page|")
// Taxonomies.
b.AssertFileContent("public/myc/c1/index.html", "C1|/myc/c1/|term|")
b.AssertFileContent("public/myc/c2slug/index.html", "C2|/myc/c2slug/|term|")
}
func TestIssue13755(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
disableKinds = ['home','rss','section','sitemap','taxonomy','term']
disablePathToLower = false
[permalinks.page]
s1 = "/:contentbasename"
-- content/s1/aBc.md --
---
title: aBc
---
-- layouts/all.html --
{{ .Title }}
`
b := hugolib.Test(t, files)
b.AssertFileExists("public/abc/index.html", true)
files = strings.ReplaceAll(files, "disablePathToLower = false", "disablePathToLower = true")
b = hugolib.Test(t, files)
b.AssertFileExists("public/aBc/index.html", true)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pages_cache_test.go | resources/page/pages_cache_test.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"strconv"
"sync"
"sync/atomic"
"testing"
qt "github.com/frankban/quicktest"
)
func TestPageCache(t *testing.T) {
t.Parallel()
c := qt.New(t)
c1 := newPageCache()
changeFirst := func(p Pages) {
p[0].(*testPage).description = "changed"
}
var o1 uint64
var o2 uint64
var wg sync.WaitGroup
var l1 sync.Mutex
var l2 sync.Mutex
var testPageSets []Pages
for i := range 50 {
testPageSets = append(testPageSets, createSortTestPages(i+1))
}
for range 100 {
wg.Add(1)
go func() {
defer wg.Done()
for k, pages := range testPageSets {
l1.Lock()
p, ca := c1.get("k1", nil, pages)
c.Assert(ca, qt.Equals, !atomic.CompareAndSwapUint64(&o1, uint64(k), uint64(k+1)))
l1.Unlock()
p2, c2 := c1.get("k1", nil, p)
c.Assert(c2, qt.Equals, true)
c.Assert(pagesEqual(p, p2), qt.Equals, true)
c.Assert(pagesEqual(p, pages), qt.Equals, true)
c.Assert(p, qt.Not(qt.IsNil))
l2.Lock()
p3, c3 := c1.get("k2", changeFirst, pages)
c.Assert(c3, qt.Equals, !atomic.CompareAndSwapUint64(&o2, uint64(k), uint64(k+1)))
l2.Unlock()
c.Assert(p3, qt.Not(qt.IsNil))
c.Assert("changed", qt.Equals, p3[0].(*testPage).description)
}
}()
}
wg.Wait()
}
func BenchmarkPageCache(b *testing.B) {
cache := newPageCache()
pages := make(Pages, 30)
for i := range 30 {
pages[i] = &testPage{title: "p" + strconv.Itoa(i)}
}
key := "key"
for b.Loop() {
cache.getP(key, nil, pages)
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/page_data.go | resources/page/page_data.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package page contains the core interfaces and types for the Page resource,
// a core component in Hugo.
package page
import (
"fmt"
)
// Data represents the .Data element in a Page in Hugo. We make this
// a type so we can do lazy loading of .Data.Pages
type Data map[string]any
// Pages returns the pages stored with key "pages". If this is a func,
// it will be invoked.
func (d Data) Pages() Pages {
v, found := d["pages"]
if !found {
return nil
}
switch vv := v.(type) {
case Pages:
return vv
case func() Pages:
return vv()
default:
panic(fmt.Sprintf("%T is not Pages", v))
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pages_language_merge.go | resources/page/pages_language_merge.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"fmt"
)
var _ pagesLanguageMerger = (*Pages)(nil)
type pagesLanguageMerger interface {
MergeByLanguage(other Pages) Pages
// Needed for integration with the tpl package.
MergeByLanguageInterface(other any) (any, error)
}
// MergeByLanguage supplies missing translations in p1 with values from p2.
// The result is sorted by the default sort order for pages.
func (p1 Pages) MergeByLanguage(p2 Pages) Pages {
merge := func(pages *Pages) {
m := make(map[string]bool)
for _, p := range *pages {
m[p.TranslationKey()] = true
}
for _, p := range p2 {
if _, found := m[p.TranslationKey()]; !found {
*pages = append(*pages, p)
}
}
SortByDefault(*pages)
}
out, _ := spc.getP("pages.MergeByLanguage", merge, p1, p2)
return out
}
// MergeByLanguageInterface is the generic version of MergeByLanguage. It
// is here just so it can be called from the tpl package.
// This is for internal use.
func (p1 Pages) MergeByLanguageInterface(in any) (any, error) {
if in == nil {
return p1, nil
}
p2, ok := in.(Pages)
if !ok {
return nil, fmt.Errorf("%T cannot be merged by language", in)
}
return p1.MergeByLanguage(p2), nil
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pages_related_test.go | resources/page/pages_related_test.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"context"
"testing"
"time"
"github.com/gohugoio/hugo/common/types"
qt "github.com/frankban/quicktest"
)
func TestRelated(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := Pages{
&testPage{
title: "Page 1",
pubDate: mustParseDate("2017-01-03"),
params: map[string]any{
"keywords": []string{"hugo", "says"},
},
},
&testPage{
title: "Page 2",
pubDate: mustParseDate("2017-01-02"),
params: map[string]any{
"keywords": []string{"hugo", "rocks"},
},
},
&testPage{
title: "Page 3",
pubDate: mustParseDate("2017-01-01"),
params: map[string]any{
"keywords": []string{"bep", "says"},
},
},
}
ctx := context.Background()
opts := map[string]any{
"namedSlices": types.NewKeyValuesStrings("keywords", "hugo", "rocks"),
}
result, err := pages.Related(ctx, opts)
c.Assert(err, qt.IsNil)
c.Assert(len(result), qt.Equals, 2)
c.Assert(result[0].Title(), qt.Equals, "Page 2")
c.Assert(result[1].Title(), qt.Equals, "Page 1")
result, err = pages.Related(ctx, pages[0])
c.Assert(err, qt.IsNil)
c.Assert(len(result), qt.Equals, 2)
c.Assert(result[0].Title(), qt.Equals, "Page 2")
c.Assert(result[1].Title(), qt.Equals, "Page 3")
opts = map[string]any{
"document": pages[0],
"indices": []string{"keywords"},
}
result, err = pages.Related(ctx, opts)
c.Assert(err, qt.IsNil)
c.Assert(len(result), qt.Equals, 2)
c.Assert(result[0].Title(), qt.Equals, "Page 2")
c.Assert(result[1].Title(), qt.Equals, "Page 3")
opts = map[string]any{
"namedSlices": []types.KeyValues{
{
Key: "keywords",
Values: []any{"bep", "rocks"},
},
},
}
result, err = pages.Related(context.Background(), opts)
c.Assert(err, qt.IsNil)
c.Assert(len(result), qt.Equals, 2)
c.Assert(result[0].Title(), qt.Equals, "Page 2")
c.Assert(result[1].Title(), qt.Equals, "Page 3")
}
func mustParseDate(s string) time.Time {
d, err := time.Parse("2006-01-02", s)
if err != nil {
panic(err)
}
return d
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/page_markup_test.go | resources/page/page_markup_test.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"strings"
"testing"
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/common/types"
"github.com/gohugoio/hugo/media"
)
func TestExtractSummaryFromHTML(t *testing.T) {
c := qt.New(t)
tests := []struct {
mt media.Type
input string
isCJK bool
numWords int
expectSummary string
expectContentWithoutSummary string
}{
{media.Builtin.ReStructuredTextType, "<div class=\"document\">\n\n\n<p>Simple Page</p>\n</div>", false, 70, "<div class=\"document\">\n\n\n<p>Simple Page</p>\n</div>", ""},
{media.Builtin.ReStructuredTextType, "<div class=\"document\"><p>First paragraph</p><p>Second paragraph</p></div>", false, 2, `<div class="document"><p>First paragraph</p></div>`, "<div class=\"document\"><p>Second paragraph</p></div>"},
{media.Builtin.MarkdownType, "<p>First paragraph</p>", false, 10, "<p>First paragraph</p>", ""},
{media.Builtin.MarkdownType, "<p>First paragraph</p><p>Second paragraph</p>", false, 2, "<p>First paragraph</p>", "<p>Second paragraph</p>"},
{media.Builtin.MarkdownType, "<p>First paragraph</p><p>Second paragraph</p><p>Third paragraph</p>", false, 3, "<p>First paragraph</p><p>Second paragraph</p>", "<p>Third paragraph</p>"},
{media.Builtin.AsciiDocType, "<div><p>First paragraph</p></div><div><p>Second paragraph</p></div>", false, 2, "<div><p>First paragraph</p></div>", "<div><p>Second paragraph</p></div>"},
{media.Builtin.MarkdownType, "<p>这是中文,全中文</p><p>a这是中文,全中文</p>", true, 5, "<p>这是中文,全中文</p>", "<p>a这是中文,全中文</p>"},
}
for i, test := range tests {
summary := ExtractSummaryFromHTML(test.mt, test.input, test.numWords, test.isCJK)
c.Assert(summary.Summary(), qt.Equals, test.expectSummary, qt.Commentf("Summary %d", i))
c.Assert(summary.ContentWithoutSummary(), qt.Equals, test.expectContentWithoutSummary, qt.Commentf("ContentWithoutSummary %d", i))
}
}
// See https://discourse.gohugo.io/t/automatic-summarys-summarylength-seems-broken-in-the-case-of-plainify/51466/4
// Also issue 12837
func TestExtractSummaryFromHTMLLotsOfHTMLInSummary(t *testing.T) {
c := qt.New(t)
input := `
<p>
<div>
<picture>
<img src="imgs/1.jpg" alt="1"/>
</picture>
<picture>
<img src="imgs/2.jpg" alt="2"/>
</picture>
<picture>
<img src="imgs/3.jpg" alt="3"/>
</picture>
<picture>
<img src="imgs/4.jpg" alt="4"/>
</picture>
<picture>
<img src="imgs/5.jpg" alt="5"/>
</picture>
</div>
</p>
<p>
This is a story about a cat.
</p>
<p>
The cat was white and fluffy.
</p>
<p>
And it liked milk.
</p>
`
summary := ExtractSummaryFromHTML(media.Builtin.MarkdownType, input, 10, false)
c.Assert(strings.HasSuffix(summary.Summary(), "<p>\nThis is a story about a cat.\n</p>\n<p>\nThe cat was white and fluffy.\n</p>"), qt.IsTrue)
}
func TestExtractSummaryFromHTMLWithDivider(t *testing.T) {
c := qt.New(t)
const divider = "FOOO"
tests := []struct {
mt media.Type
input string
expectSummary string
expectContentWithoutSummary string
expectContent string
}{
{media.Builtin.MarkdownType, "<p>First paragraph</p><p>FOOO</p><p>Second paragraph</p>", "<p>First paragraph</p>", "<p>Second paragraph</p>", "<p>First paragraph</p><p>Second paragraph</p>"},
{media.Builtin.MarkdownType, "<p>First paragraph</p>\n<p>FOOO</p>\n<p>Second paragraph</p>", "<p>First paragraph</p>", "<p>Second paragraph</p>", "<p>First paragraph</p>\n<p>Second paragraph</p>"},
{media.Builtin.MarkdownType, "<p>FOOO</p>\n<p>First paragraph</p>", "", "<p>First paragraph</p>", "<p>First paragraph</p>"},
{media.Builtin.MarkdownType, "<p>First paragraph</p><p>Second paragraphFOOO</p><p>Third paragraph</p>", "<p>First paragraph</p><p>Second paragraph</p>", "<p>Third paragraph</p>", "<p>First paragraph</p><p>Second paragraph</p><p>Third paragraph</p>"},
{media.Builtin.MarkdownType, "<p>这是中文,全中文FOOO</p><p>a这是中文,全中文</p>", "<p>这是中文,全中文</p>", "<p>a这是中文,全中文</p>", "<p>这是中文,全中文</p><p>a这是中文,全中文</p>"},
{media.Builtin.MarkdownType, `<p>a <strong>b</strong>` + "\v" + ` c</p>` + "\n<p>FOOO</p>", "<p>a <strong>b</strong>\v c</p>", "", "<p>a <strong>b</strong>\v c</p>"},
{media.Builtin.HTMLType, "<p>First paragraph</p>FOOO<p>Second paragraph</p>", "<p>First paragraph</p>", "<p>Second paragraph</p>", "<p>First paragraph</p><p>Second paragraph</p>"},
{media.Builtin.ReStructuredTextType, "<div class=\"document\">\n\n\n<p>This is summary.</p>\n<p>FOOO</p>\n<p>This is content.</p>\n</div>", "<div class=\"document\">\n\n\n<p>This is summary.</p>\n</div>", "<div class=\"document\"><p>This is content.</p>\n</div>", "<div class=\"document\">\n\n\n<p>This is summary.</p>\n<p>This is content.</p>\n</div>"},
{media.Builtin.ReStructuredTextType, "<div class=\"document\"><p>First paragraphFOOO</p><p>Second paragraph</p></div>", "<div class=\"document\"><p>First paragraph</p></div>", "<div class=\"document\"><p>Second paragraph</p></div>", `<div class="document"><p>First paragraph</p><p>Second paragraph</p></div>`},
{media.Builtin.AsciiDocType, "<div class=\"paragraph\"><p>Summary Next Line</p></div><div class=\"paragraph\"><p>FOOO</p></div><div class=\"paragraph\"><p>Some more text</p></div>", "<div class=\"paragraph\"><p>Summary Next Line</p></div>", "<div class=\"paragraph\"><p>Some more text</p></div>", "<div class=\"paragraph\"><p>Summary Next Line</p></div><div class=\"paragraph\"><p>Some more text</p></div>"},
{media.Builtin.AsciiDocType, "<div class=\"paragraph\">\n<p>Summary Next Line</p>\n</div>\n<div class=\"paragraph\">\n<p>FOOO</p>\n</div>\n<div class=\"paragraph\">\n<p>Some more text</p>\n</div>\n", "<div class=\"paragraph\">\n<p>Summary Next Line</p>\n</div>", "<div class=\"paragraph\">\n<p>Some more text</p>\n</div>", "<div class=\"paragraph\">\n<p>Summary Next Line</p>\n</div>\n<div class=\"paragraph\">\n<p>Some more text</p>\n</div>"},
{media.Builtin.AsciiDocType, "<div><p>FOOO</p></div><div><p>First paragraph</p></div>", "", "<div><p>First paragraph</p></div>", "<div><p>First paragraph</p></div>"},
{media.Builtin.AsciiDocType, "<div><p>First paragraphFOOO</p></div><div><p>Second paragraph</p></div>", "<div><p>First paragraph</p></div>", "<div><p>Second paragraph</p></div>", "<div><p>First paragraph</p></div><div><p>Second paragraph</p></div>"},
}
for i, test := range tests {
summary := ExtractSummaryFromHTMLWithDivider(test.mt, test.input, divider)
c.Assert(summary.Summary(), qt.Equals, test.expectSummary, qt.Commentf("Summary %d", i))
c.Assert(summary.ContentWithoutSummary(), qt.Equals, test.expectContentWithoutSummary, qt.Commentf("ContentWithoutSummary %d", i))
c.Assert(summary.Content(), qt.Equals, test.expectContent, qt.Commentf("Content %d", i))
}
}
func TestExpandDivider(t *testing.T) {
c := qt.New(t)
for i, test := range []struct {
input string
divider string
ptag tagReStartEnd
expect string
expectEndMarkup string
}{
{"<p>First paragraph</p>\n<p>FOOO</p>\n<p>Second paragraph</p>", "FOOO", startEndP, "<p>FOOO</p>\n", ""},
{"<div class=\"paragraph\">\n<p>FOOO</p>\n</div>", "FOOO", startEndDiv, "<div class=\"paragraph\">\n<p>FOOO</p>\n</div>", ""},
{"<div><p>FOOO</p></div><div><p>Second paragraph</p></div>", "FOOO", startEndDiv, "<div><p>FOOO</p></div>", ""},
{"<div><p>First paragraphFOOO</p></div><div><p>Second paragraph</p></div>", "FOOO", startEndDiv, "FOOO", "</p></div>"},
{" <p> abc FOOO </p> ", "FOOO", startEndP, "FOOO", " </p>"},
{" <p> FOOO </p> ", "FOOO", startEndP, "<p> FOOO </p>", ""},
{" <p>\n \nFOOO </p> ", "FOOO", startEndP, "<p>\n \nFOOO </p>", ""},
{" <div> FOOO </div> ", "FOOO", startEndDiv, "<div> FOOO </div>", ""},
} {
l := types.LowHigh[string]{Low: strings.Index(test.input, test.divider), High: strings.Index(test.input, test.divider) + len(test.divider)}
e, t := expandSummaryDivider(test.input, test.ptag, l)
c.Assert(test.input[e.Low:e.High], qt.Equals, test.expect, qt.Commentf("[%d] Test.expect %q", i, test.input))
c.Assert(test.input[t.Low:t.High], qt.Equals, test.expectEndMarkup, qt.Commentf("[%d] Test.expectEndMarkup %q", i, test.input))
}
}
func TestIsProbablyHTMLToken(t *testing.T) {
c := qt.New(t)
for i, test := range []struct {
input string
expect bool
}{
{"<p>", true},
{"<p", true},
{"width=\"32\"", true},
{"width='32'", true},
{"<p>Æøå", false},
} {
c.Assert(isProbablyHTMLToken(test.input), qt.Equals, test.expect, qt.Commentf("[%d] Test.expect %q", i, test.input))
}
}
func BenchmarkSummaryFromHTML(b *testing.B) {
input := "<p>First paragraph</p><p>Second paragraph</p>"
for b.Loop() {
summary := ExtractSummaryFromHTML(media.Builtin.MarkdownType, input, 2, false)
if s := summary.Content(); s != input {
b.Fatalf("unexpected content: %q", s)
}
if s := summary.ContentWithoutSummary(); s != "<p>Second paragraph</p>" {
b.Fatalf("unexpected content without summary: %q", s)
}
if s := summary.Summary(); s != "<p>First paragraph</p>" {
b.Fatalf("unexpected summary: %q", s)
}
}
}
func BenchmarkSummaryFromHTMLWithDivider(b *testing.B) {
input := "<p>First paragraph</p><p>FOOO</p><p>Second paragraph</p>"
for b.Loop() {
summary := ExtractSummaryFromHTMLWithDivider(media.Builtin.MarkdownType, input, "FOOO")
if s := summary.Content(); s != "<p>First paragraph</p><p>Second paragraph</p>" {
b.Fatalf("unexpected content: %q", s)
}
if s := summary.ContentWithoutSummary(); s != "<p>Second paragraph</p>" {
b.Fatalf("unexpected content without summary: %q", s)
}
if s := summary.Summary(); s != "<p>First paragraph</p>" {
b.Fatalf("unexpected summary: %q", s)
}
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/site_integration_test.go | resources/page/site_integration_test.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page_test
import (
"testing"
"github.com/gohugoio/hugo/hugolib"
)
// Issue 12513
func TestPageSiteSitesDefault(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
disableKinds = ['page','rss','section','sitemap','taxonomy','term']
defaultContentLanguage = 'de'
defaultContentLanguageInSubdir = true
[languages.en]
languageName = 'English'
weight = 1
[languages.de]
languageName = 'Deutsch'
weight = 2
-- layouts/home.html --
{{ .Site.Sites.Default.Language.LanguageName }}
`
b := hugolib.Test(t, files)
b.AssertFileContent("public/de/index.html", "Deutsch")
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/site.go | resources/page/site.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"time"
"github.com/gohugoio/hugo/common/hstore"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/config/privacy"
"github.com/gohugoio/hugo/config/services"
"github.com/gohugoio/hugo/hugolib/roles"
"github.com/gohugoio/hugo/hugolib/versions"
"github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/langs"
"github.com/gohugoio/hugo/navigation"
)
// Site represents a site. There can be multiple sites in a multilingual setup.
type Site interface {
// Returns the Language configured for this Site.
Language() *langs.Language
// Returns the Site in one of dimensions language, version or role.
Dimension(string) SiteDimension
// Returns the role configured for this Site.
Role() roles.Role
// Returns the version configured for this Site.
Version() versions.Version
// Returns all the languages configured for all sites.
Languages() langs.Languages
GetPage(ref ...string) (Page, error)
// AllPages returns all pages for all languages.
AllPages() Pages
// Returns all the regular Pages in this Site.
RegularPages() Pages
// Returns all Pages in this Site.
Pages() Pages
// Returns all the top level sections.
Sections() Pages
// A shortcut to the home
Home() Page
// Returns the server port.
ServerPort() int
// Returns the configured title for this Site.
Title() string
// Deprecated: Use .Language.LanguageCode instead.
LanguageCode() string
// Returns the configured copyright information for this Site.
Copyright() string
// Returns all Sites for all languages.
Sites() Sites
// Returns Site currently rendering.
Current() Site
// Returns a struct with some information about the build.
Hugo() hugo.HugoInfo
// Returns the BaseURL for this Site.
BaseURL() string
// Returns a taxonomy map.
Taxonomies() TaxonomyList
// Deprecated: Use .Lastmod instead.
LastChange() time.Time
// Returns the last modification date of the content.
Lastmod() time.Time
// Returns the Menus for this site.
Menus() navigation.Menus
// The main sections in the site.
MainSections() []string
// Returns the Params configured for this site.
Params() maps.Params
// Param is a convenience method to do lookups in Params.
Param(key any) (any, error)
// Returns a map of all the data inside /data.
Data() map[string]any
// Returns the site config.
Config() SiteConfig
// Deprecated: Use taxonomies instead.
Author() map[string]any
// Deprecated: Use taxonomies instead.
Authors() AuthorList
// Deprecated: Use .Site.Params instead.
Social() map[string]string
// BuildDrafts is deprecated and will be removed in a future release.
BuildDrafts() bool
// Deprecated: Use hugo.IsMultilingual instead.
IsMultiLingual() bool
// LanguagePrefix returns the language prefix for this site.
LanguagePrefix() string
hstore.StoreProvider
// String returns a string representation of the site.
// Note that this represenetation may change in the future.
String() string
// For internal use only.
// This will panic if the site is not fully initialized.
// This is typically used to inform the user in the content adapter templates,
// as these are executed before all the page collections etc. are ready to use.
CheckReady()
}
// SiteDimension represents a dimension of the site.
type SiteDimension interface {
Name() string
}
// Sites represents an ordered list of sites (languages).
type Sites []Site
// Deprecated: Use .Sites.Default instead.
func (s Sites) First() Site {
hugo.Deprecate(".Sites.First", "Use .Sites.Default instead.", "v0.127.0")
return s.Default()
}
// Default is a convenience method to get the site corresponding to the default
// content language.
func (s Sites) Default() Site {
if len(s) == 0 {
return nil
}
for _, site := range s {
if site.Language().IsDefault() {
return site
}
}
return s[0]
}
// Some additional interfaces implemented by siteWrapper that's not on Site.
var _ identity.ForEeachIdentityByNameProvider = (*siteWrapper)(nil)
type siteWrapper struct {
s Site
}
func WrapSite(s Site) Site {
if s == nil {
panic("Site is nil")
}
return &siteWrapper{s: s}
}
func (s *siteWrapper) Key() string {
return s.s.Language().Lang
}
// Deprecated: Use .Site.Params instead.
func (s *siteWrapper) Social() map[string]string {
return s.s.Social()
}
// Deprecated: Use taxonomies instead.
func (s *siteWrapper) Author() map[string]any {
return s.s.Author()
}
// Deprecated: Use taxonomies instead.
func (s *siteWrapper) Authors() AuthorList {
return s.s.Authors()
}
func (s *siteWrapper) GetPage(ref ...string) (Page, error) {
return s.s.GetPage(ref...)
}
func (s *siteWrapper) Language() *langs.Language {
return s.s.Language()
}
func (s *siteWrapper) Languages() langs.Languages {
return s.s.Languages()
}
func (s *siteWrapper) Role() roles.Role {
return s.s.Role()
}
func (s *siteWrapper) Dimension(d string) SiteDimension {
return s.s.Dimension(d)
}
func (s *siteWrapper) Version() versions.Version {
return s.s.Version()
}
func (s *siteWrapper) AllPages() Pages {
return s.s.AllPages()
}
func (s *siteWrapper) RegularPages() Pages {
return s.s.RegularPages()
}
func (s *siteWrapper) Pages() Pages {
return s.s.Pages()
}
func (s *siteWrapper) Sections() Pages {
return s.s.Sections()
}
func (s *siteWrapper) Home() Page {
return s.s.Home()
}
func (s *siteWrapper) ServerPort() int {
return s.s.ServerPort()
}
func (s *siteWrapper) Title() string {
return s.s.Title()
}
func (s *siteWrapper) LanguageCode() string {
return s.s.LanguageCode()
}
func (s *siteWrapper) Copyright() string {
return s.s.Copyright()
}
func (s *siteWrapper) Sites() Sites {
return s.s.Sites()
}
func (s *siteWrapper) Current() Site {
return s.s.Current()
}
func (s *siteWrapper) Config() SiteConfig {
return s.s.Config()
}
func (s *siteWrapper) Hugo() hugo.HugoInfo {
return s.s.Hugo()
}
func (s *siteWrapper) BaseURL() string {
return s.s.BaseURL()
}
func (s *siteWrapper) Taxonomies() TaxonomyList {
return s.s.Taxonomies()
}
// Deprecated: Use .Site.Lastmod instead.
func (s *siteWrapper) LastChange() time.Time {
return s.s.LastChange()
}
func (s *siteWrapper) Lastmod() time.Time {
return s.s.Lastmod()
}
func (s *siteWrapper) Menus() navigation.Menus {
return s.s.Menus()
}
func (s *siteWrapper) MainSections() []string {
return s.s.MainSections()
}
func (s *siteWrapper) Params() maps.Params {
return s.s.Params()
}
func (s *siteWrapper) Param(key any) (any, error) {
return s.s.Param(key)
}
func (s *siteWrapper) Data() map[string]any {
return s.s.Data()
}
func (s *siteWrapper) BuildDrafts() bool {
return s.s.BuildDrafts()
}
// Deprecated: Use hugo.IsMultilingual instead.
func (s *siteWrapper) IsMultiLingual() bool {
return s.s.IsMultiLingual()
}
func (s *siteWrapper) LanguagePrefix() string {
return s.s.LanguagePrefix()
}
func (s *siteWrapper) Store() *hstore.Scratch {
return s.s.Store()
}
func (s *siteWrapper) String() string {
return s.s.String()
}
// For internal use only.
func (s *siteWrapper) ForEeachIdentityByName(name string, f func(identity.Identity) bool) {
s.s.(identity.ForEeachIdentityByNameProvider).ForEeachIdentityByName(name, f)
}
// For internal use only.
func (s *siteWrapper) CheckReady() {
s.s.CheckReady()
}
type testSite struct {
h hugo.HugoInfo
l *langs.Language
}
// Deprecated: Use taxonomies instead.
func (s testSite) Author() map[string]any {
return nil
}
// Deprecated: Use taxonomies instead.
func (s testSite) Authors() AuthorList {
return AuthorList{}
}
// Deprecated: Use .Site.Params instead.
func (s testSite) Social() map[string]string {
return make(map[string]string)
}
func (t testSite) Hugo() hugo.HugoInfo {
return t.h
}
func (t testSite) ServerPort() int {
return 1313
}
// Deprecated: Use .Site.Lastmod instead.
func (testSite) LastChange() (t time.Time) {
return
}
func (testSite) Lastmod() (t time.Time) {
return
}
func (t testSite) Title() string {
return "foo"
}
func (t testSite) LanguageCode() string {
return t.l.Lang
}
func (t testSite) Copyright() string {
return ""
}
func (t testSite) Sites() Sites {
return nil
}
func (t testSite) Sections() Pages {
return nil
}
func (t testSite) GetPage(ref ...string) (Page, error) {
return nil, nil
}
func (t testSite) Current() Site {
return t
}
func (s testSite) LanguagePrefix() string {
return ""
}
func (t testSite) Languages() langs.Languages {
return nil
}
func (t testSite) Dimension(d string) SiteDimension {
return nil
}
func (t testSite) MainSections() []string {
return nil
}
func (t testSite) Language() *langs.Language {
return t.l
}
func (t testSite) Role() roles.Role {
return nil
}
func (t testSite) Version() versions.Version {
return nil
}
func (t testSite) Home() Page {
return nil
}
func (t testSite) Pages() Pages {
return nil
}
func (t testSite) AllPages() Pages {
return nil
}
func (t testSite) RegularPages() Pages {
return nil
}
func (t testSite) Menus() navigation.Menus {
return nil
}
func (t testSite) Taxonomies() TaxonomyList {
return nil
}
func (t testSite) BaseURL() string {
return ""
}
func (t testSite) Params() maps.Params {
return nil
}
func (t testSite) Data() map[string]any {
return nil
}
func (s testSite) Config() SiteConfig {
return SiteConfig{}
}
func (s testSite) BuildDrafts() bool {
return false
}
// Deprecated: Use hugo.IsMultilingual instead.
func (s testSite) IsMultiLingual() bool {
return false
}
func (s testSite) Param(key any) (any, error) {
return nil, nil
}
func (s testSite) Store() *hstore.Scratch {
return hstore.NewScratch()
}
func (s testSite) String() string {
return "testSite"
}
func (s testSite) CheckReady() {
}
// NewDummyHugoSite creates a new minimal test site.
func NewDummyHugoSite(conf config.AllProvider) Site {
return testSite{
h: hugo.NewInfo(conf, nil),
l: &langs.Language{
Lang: "en",
},
}
}
// SiteConfig holds the config in site.Config.
type SiteConfig struct {
// This contains all privacy related settings that can be used to
// make the YouTube template etc. GDPR compliant.
Privacy privacy.Config
// Services contains config for services such as Google Analytics etc.
Services services.Config
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/page_data_test.go | resources/page/page_data_test.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"bytes"
"testing"
"text/template"
qt "github.com/frankban/quicktest"
)
func TestPageData(t *testing.T) {
c := qt.New(t)
data := make(Data)
c.Assert(data.Pages(), qt.IsNil)
pages := Pages{
&testPage{title: "a1"},
&testPage{title: "a2"},
}
data["pages"] = pages
c.Assert(data.Pages(), eq, pages)
data["pages"] = func() Pages {
return pages
}
c.Assert(data.Pages(), eq, pages)
templ, err := template.New("").Parse(`Pages: {{ .Pages }}`)
c.Assert(err, qt.IsNil)
var buff bytes.Buffer
c.Assert(templ.Execute(&buff, data), qt.IsNil)
c.Assert(buff.String(), qt.Contains, "Pages(2)")
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/permalinks.go | resources/page/permalinks.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"errors"
"fmt"
"os"
"path"
"path/filepath"
"regexp"
"strconv"
"strings"
"time"
"github.com/gohugoio/hugo/common/hstrings"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/resources/kinds"
)
// PermalinkExpander holds permalink mappings per section.
type PermalinkExpander struct {
// knownPermalinkAttributes maps :tags in a permalink specification to a
// function which, given a page and the tag, returns the resulting string
// to be used to replace that tag.
knownPermalinkAttributes map[string]pageToPermaAttribute
expanders map[string]map[string]func(Page) (string, error)
urlize func(uri string) string
patternCache *maps.Cache[string, func(Page) (string, error)]
}
// Time for checking date formats. Every field is different than the
// Go reference time for date formatting. This ensures that formatting this date
// with a Go time format always has a different output than the format itself.
var referenceTime = time.Date(2019, time.November, 9, 23, 1, 42, 1, time.UTC)
// Return the callback for the given permalink attribute and a boolean indicating if the attribute is valid or not.
func (p PermalinkExpander) callback(attr string) (pageToPermaAttribute, bool) {
if callback, ok := p.knownPermalinkAttributes[attr]; ok {
return callback, true
}
if strings.HasPrefix(attr, "sections[") {
fn := p.toSliceFunc(strings.TrimPrefix(attr, "sections"))
return func(p Page, s string) (string, error) {
return path.Join(fn(p.CurrentSection().SectionsEntries())...), nil
}, true
}
if strings.HasPrefix(attr, "sectionslugs[") {
fn := p.toSliceFunc(strings.TrimPrefix(attr, "sectionslugs"))
sectionSlugsFunc := p.withSectionPagesFunc(p.pageToPermalinkSlugElseTitle, func(s ...string) string {
return path.Join(fn(s)...)
})
return sectionSlugsFunc, true
}
// Make sure this comes after all the other checks.
if referenceTime.Format(attr) != attr {
return p.pageToPermalinkDate, true
}
return nil, false
}
// NewPermalinkExpander creates a new PermalinkExpander configured by the given
// urlize func.
func NewPermalinkExpander(urlize func(uri string) string, patterns map[string]map[string]string) (PermalinkExpander, error) {
p := PermalinkExpander{
urlize: urlize,
patternCache: maps.NewCache[string, func(Page) (string, error)](),
}
p.knownPermalinkAttributes = map[string]pageToPermaAttribute{
"year": p.pageToPermalinkDate,
"month": p.pageToPermalinkDate,
"monthname": p.pageToPermalinkDate,
"day": p.pageToPermalinkDate,
"weekday": p.pageToPermalinkDate,
"weekdayname": p.pageToPermalinkDate,
"yearday": p.pageToPermalinkDate,
"section": p.pageToPermalinkSection,
"sectionslug": p.pageToPermalinkSectionSlug,
"sections": p.pageToPermalinkSections,
"sectionslugs": p.pageToPermalinkSectionSlugs,
"title": p.pageToPermalinkTitle,
"slug": p.pageToPermalinkSlugElseTitle,
"slugorfilename": p.pageToPermalinkSlugElseFilename,
"filename": p.pageToPermalinkFilename,
"contentbasename": p.pageToPermalinkContentBaseName,
"slugorcontentbasename": p.pageToPermalinkSlugOrContentBaseName,
}
p.expanders = make(map[string]map[string]func(Page) (string, error))
for kind, patterns := range patterns {
e, err := p.parse(patterns)
if err != nil {
return p, err
}
p.expanders[kind] = e
}
return p, nil
}
// Escape sequence for colons in permalink patterns.
const escapePlaceholderColon = "\x00"
func (l PermalinkExpander) normalizeEscapeSequencesIn(s string) (string, bool) {
s2 := strings.ReplaceAll(s, "\\:", escapePlaceholderColon)
return s2, s2 != s
}
func (l PermalinkExpander) normalizeEscapeSequencesOut(result string) string {
return strings.ReplaceAll(result, escapePlaceholderColon, ":")
}
// ExpandPattern expands the path in p with the specified expand pattern.
func (l PermalinkExpander) ExpandPattern(pattern string, p Page) (string, error) {
expand, err := l.getOrParsePattern(pattern)
if err != nil {
return "", err
}
return expand(p)
}
// Expand expands the path in p according to the rules defined for the given key.
// If no rules are found for the given key, an empty string is returned.
func (l PermalinkExpander) Expand(key string, p Page) (string, error) {
expanders, found := l.expanders[p.Kind()]
if !found {
return "", nil
}
expand, found := expanders[key]
if !found {
return "", nil
}
return expand(p)
}
// Allow " " and / to represent the root section.
var sectionCutSet = " /"
func init() {
if string(os.PathSeparator) != "/" {
sectionCutSet += string(os.PathSeparator)
}
}
func (l PermalinkExpander) getOrParsePattern(pattern string) (func(Page) (string, error), error) {
return l.patternCache.GetOrCreate(pattern, func() (func(Page) (string, error), error) {
var normalized bool
pattern, normalized = l.normalizeEscapeSequencesIn(pattern)
matches := attributeRegexp.FindAllStringSubmatch(pattern, -1)
if matches == nil {
result := pattern
if normalized {
result = l.normalizeEscapeSequencesOut(result)
}
return func(p Page) (string, error) {
return result, nil
}, nil
}
callbacks := make([]pageToPermaAttribute, len(matches))
replacements := make([]string, len(matches))
for i, m := range matches {
replacement := m[0]
attr := replacement[1:]
replacements[i] = replacement
callback, ok := l.callback(attr)
if !ok {
return nil, &permalinkExpandError{pattern: pattern, err: errPermalinkAttributeUnknown}
}
callbacks[i] = callback
}
return func(p Page) (string, error) {
newField := pattern
for i, replacement := range replacements {
attr := replacement[1:]
callback := callbacks[i]
newAttr, err := callback(p, attr)
if err != nil {
return "", &permalinkExpandError{pattern: pattern, err: err}
}
newField = strings.Replace(newField, replacement, newAttr, 1)
}
if normalized {
newField = l.normalizeEscapeSequencesOut(newField)
}
return newField, nil
}, nil
})
}
func (l PermalinkExpander) parse(patterns map[string]string) (map[string]func(Page) (string, error), error) {
expanders := make(map[string]func(Page) (string, error))
for k, pattern := range patterns {
k = strings.Trim(k, sectionCutSet)
expander, err := l.getOrParsePattern(pattern)
if err != nil {
return nil, err
}
expanders[k] = expander
}
return expanders, nil
}
// pageToPermaAttribute is the type of a function which, given a page and a tag
// can return a string to go in that position in the page (or an error)
type pageToPermaAttribute func(Page, string) (string, error)
var attributeRegexp = regexp.MustCompile(`:\w+(\[.+?\])?`)
type permalinkExpandError struct {
pattern string
err error
}
func (pee *permalinkExpandError) Error() string {
return fmt.Sprintf("error expanding %q: %s", pee.pattern, pee.err)
}
var errPermalinkAttributeUnknown = errors.New("permalink attribute not recognised")
func (l PermalinkExpander) pageToPermalinkDate(p Page, dateField string) (string, error) {
// a Page contains a Node which provides a field Date, time.Time
switch dateField {
case "year":
return strconv.Itoa(p.Date().Year()), nil
case "month":
return fmt.Sprintf("%02d", int(p.Date().Month())), nil
case "monthname":
return p.Date().Month().String(), nil
case "day":
return fmt.Sprintf("%02d", p.Date().Day()), nil
case "weekday":
return strconv.Itoa(int(p.Date().Weekday())), nil
case "weekdayname":
return p.Date().Weekday().String(), nil
case "yearday":
return strconv.Itoa(p.Date().YearDay()), nil
}
return p.Date().Format(dateField), nil
}
// pageToPermalinkTitle returns the URL-safe form of the title
func (l PermalinkExpander) pageToPermalinkTitle(p Page, _ string) (string, error) {
return l.urlize(p.Title()), nil
}
// pageToPermalinkFilename returns the URL-safe form of the filename
func (l PermalinkExpander) pageToPermalinkFilename(p Page, _ string) (string, error) {
name := l.translationBaseName(p)
if name == "index" {
// Page bundles; the directory name will hopefully have a better name.
dir := strings.TrimSuffix(p.File().Dir(), helpers.FilePathSeparator)
_, name = filepath.Split(dir)
} else if name == "_index" {
return "", nil
}
return l.urlize(name), nil
}
// if the page has a slug, return the slug, else return the title
func (l PermalinkExpander) pageToPermalinkSlugElseTitle(p Page, a string) (string, error) {
if p.Slug() != "" {
return l.urlize(p.Slug()), nil
}
return l.pageToPermalinkTitle(p, a)
}
// if the page has a slug, return the slug, else return the filename
func (l PermalinkExpander) pageToPermalinkSlugElseFilename(p Page, a string) (string, error) {
if p.Slug() != "" {
return l.urlize(p.Slug()), nil
}
return l.pageToPermalinkFilename(p, a)
}
func (l PermalinkExpander) pageToPermalinkSection(p Page, _ string) (string, error) {
return p.Section(), nil
}
// pageToPermalinkSectionSlug returns the URL-safe form of the first section's slug or title
func (l PermalinkExpander) pageToPermalinkSectionSlug(p Page, attr string) (string, error) {
sectionPage := p.FirstSection()
if sectionPage == nil || sectionPage.IsHome() {
return "", nil
}
return l.pageToPermalinkSlugElseTitle(sectionPage, attr)
}
func (l PermalinkExpander) pageToPermalinkSections(p Page, _ string) (string, error) {
return p.CurrentSection().SectionsPath(), nil
}
// pageToPermalinkSectionSlugs returns a path built from all ancestor sections using their slugs or titles
func (l PermalinkExpander) pageToPermalinkSectionSlugs(p Page, attr string) (string, error) {
sectionSlugsFunc := l.withSectionPagesFunc(l.pageToPermalinkSlugElseTitle, path.Join)
return sectionSlugsFunc(p, attr)
}
// pageToPermalinkContentBaseName returns the URL-safe form of the content base name.
func (l PermalinkExpander) pageToPermalinkContentBaseName(p Page, _ string) (string, error) {
return l.urlize(p.PathInfo().Unnormalized().BaseNameNoIdentifier()), nil
}
// pageToPermalinkSlugOrContentBaseName returns the URL-safe form of the slug, content base name.
func (l PermalinkExpander) pageToPermalinkSlugOrContentBaseName(p Page, a string) (string, error) {
if p.Slug() != "" {
return l.urlize(p.Slug()), nil
}
name, err := l.pageToPermalinkContentBaseName(p, a)
if err != nil {
return "", nil
}
return name, nil
}
func (l PermalinkExpander) translationBaseName(p Page) string {
if p.File() == nil {
return ""
}
return p.File().TranslationBaseName()
}
// withSectionPagesFunc returns a function that builds permalink attributes from section pages.
// It applies the transformation function f to each ancestor section (Page), then joins the results with the join function.
//
// Current use is create section-based hierarchical paths using section slugs.
func (l PermalinkExpander) withSectionPagesFunc(f func(Page, string) (string, error), join func(...string) string) func(p Page, s string) (string, error) {
return func(p Page, s string) (string, error) {
var entries []string
currentSection := p.CurrentSection()
// Build section hierarchy: ancestors (reversed to root-first) + current section
sections := currentSection.Ancestors().Reverse()
sections = append(sections, currentSection)
for _, section := range sections {
if section.IsHome() {
continue
}
entry, err := f(section, s)
if err != nil {
return "", err
}
entries = append(entries, entry)
}
return join(entries...), nil
}
}
var (
nilSliceFunc = func(s []string) []string {
return nil
}
allSliceFunc = func(s []string) []string {
return s
}
)
// toSliceFunc returns a slice func that slices s according to the cut spec.
// The cut spec must be on form [low:high] (one or both can be omitted),
// also allowing single slice indices (e.g. [2]) and the special [last] keyword
// giving the last element of the slice.
// The returned function will be lenient and not panic in out of bounds situation.
//
// The current use case for this is to use parts of the sections path in permalinks.
func (l PermalinkExpander) toSliceFunc(cut string) func(s []string) []string {
cut = strings.ToLower(strings.TrimSpace(cut))
if cut == "" {
return allSliceFunc
}
if len(cut) < 3 || (cut[0] != '[' || cut[len(cut)-1] != ']') {
return nilSliceFunc
}
toNFunc := func(s string, low bool) func(ss []string) int {
if s == "" {
if low {
return func(ss []string) int {
return 0
}
} else {
return func(ss []string) int {
return len(ss)
}
}
}
if s == "last" {
return func(ss []string) int {
return len(ss) - 1
}
}
n, _ := strconv.Atoi(s)
if n < 0 {
n = 0
}
return func(ss []string) int {
// Prevent out of bound situations. It would not make
// much sense to panic here.
if n >= len(ss) {
if low {
return -1
}
return len(ss)
}
return n
}
}
opsStr := cut[1 : len(cut)-1]
opts := strings.Split(opsStr, ":")
if !strings.Contains(opsStr, ":") {
toN := toNFunc(opts[0], true)
return func(s []string) []string {
if len(s) == 0 {
return nil
}
n := toN(s)
if n < 0 {
return []string{}
}
v := s[n]
if v == "" {
return nil
}
return []string{v}
}
}
toN1, toN2 := toNFunc(opts[0], true), toNFunc(opts[1], false)
return func(s []string) []string {
if len(s) == 0 {
return nil
}
n1, n2 := toN1(s), toN2(s)
if n1 < 0 || n2 < 0 {
return []string{}
}
return s[n1:n2]
}
}
var permalinksKindsSupport = []string{kinds.KindPage, kinds.KindSection, kinds.KindTaxonomy, kinds.KindTerm}
// DecodePermalinksConfig decodes the permalinks configuration in the given map
func DecodePermalinksConfig(m map[string]any) (map[string]map[string]string, error) {
permalinksConfig := make(map[string]map[string]string)
permalinksConfig[kinds.KindPage] = make(map[string]string)
permalinksConfig[kinds.KindSection] = make(map[string]string)
permalinksConfig[kinds.KindTaxonomy] = make(map[string]string)
permalinksConfig[kinds.KindTerm] = make(map[string]string)
config := maps.CleanConfigStringMap(m)
for k, v := range config {
switch v := v.(type) {
case string:
// [permalinks]
// key = '...'
// To successfully be backward compatible, "default" patterns need to be set for both page and term
permalinksConfig[kinds.KindPage][k] = v
permalinksConfig[kinds.KindTerm][k] = v
case maps.Params:
// [permalinks.key]
// xyz = ???
if hstrings.InSlice(permalinksKindsSupport, k) {
// TODO: warn if we overwrite an already set value
for k2, v2 := range v {
switch v2 := v2.(type) {
case string:
permalinksConfig[k][k2] = v2
default:
return nil, fmt.Errorf("permalinks configuration invalid: unknown value %q for key %q for kind %q", v2, k2, k)
}
}
} else {
return nil, fmt.Errorf("permalinks configuration not supported for kind %q, supported kinds are %v", k, permalinksKindsSupport)
}
default:
return nil, fmt.Errorf("permalinks configuration invalid: unknown value %q for key %q", v, k)
}
}
return permalinksConfig, nil
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/path_integration_test.go | resources/page/path_integration_test.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page_test
import (
"strings"
"testing"
"github.com/gohugoio/hugo/hugolib"
)
// Issue 4926
// Issue 8232
// Issue 12342
func TestHashSignInPermalink(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
disableKinds = ['section','rss','sitemap','taxonomy']
[permalinks]
s1 = '/:section/:slug'
-- layouts/list.html --
{{ range site.Pages }}{{ .RelPermalink }}|{{ end }}
-- layouts/single.html --
{{ .Title }}
-- content/s1/p1.md --
---
title: p#1
tags: test#tag#
---
-- content/s2/p#2.md --
---
title: p#2
---
`
b := hugolib.Test(t, files)
b.AssertFileExists("public/s1/p#1/index.html", true)
b.AssertFileExists("public/s2/p#2/index.html", true)
b.AssertFileExists("public/tags/test#tag#/index.html", true)
b.AssertFileContentExact("public/index.html", "/|/s1/p%231/|/s2/p%232/|/tags/test%23tag%23/|")
}
// Issues: 13829, 4428, 7497.
func TestMiscPathIssues(t *testing.T) {
t.Parallel()
filesTemplate := `
-- hugo.toml --
uglyURLs = false
[outputFormats.print]
isPlainText = true
mediaType = 'text/plain'
path = 'print'
[outputs]
home = ['html','print']
page = ['html','print']
section = ['html','print']
taxonomy = ['html','print']
term = ['html','print']
[taxonomies]
tag = 'tags'
-- content/_index.md --
---
title: home
---
-- content/s1/_index.md --
---
title: s1
---
-- content/s1/p1.md --
---
title: p1
tags: ['red']
---
-- content/tags/_index.md --
---
title: tags
---
-- content/tags/red/_index.md --
---
title: red
---
`
templates := []string{
"layouts/home.html",
"layouts/home.print.txt",
"layouts/page.html",
"layouts/page.print.txt",
"layouts/section.html",
"layouts/section.print.txt",
"layouts/taxonomy.html",
"layouts/taxonomy.print.txt",
"layouts/term.html",
"layouts/term.print.txt",
}
const code string = "TITLE: {{ .Title }} | AOFRP: {{ range .AlternativeOutputFormats }}{{ .RelPermalink }}{{ end }} | TEMPLATE: {{ templates.Current.Name }}"
for _, template := range templates {
filesTemplate += "-- " + template + " --\n" + code + "\n"
}
files := filesTemplate
b := hugolib.Test(t, files)
// uglyURLs: false, outputFormat: html
b.AssertFileContent("public/index.html", "TITLE: home | AOFRP: /print/index.txt | TEMPLATE: home.html")
b.AssertFileContent("public/s1/index.html", "TITLE: s1 | AOFRP: /print/s1/index.txt | TEMPLATE: section.html")
b.AssertFileContent("public/s1/p1/index.html", "TITLE: p1 | AOFRP: /print/s1/p1/index.txt | TEMPLATE: page.html")
b.AssertFileContent("public/tags/index.html", "TITLE: tags | AOFRP: /print/tags/index.txt | TEMPLATE: taxonomy.html")
b.AssertFileContent("public/tags/red/index.html", "TITLE: red | AOFRP: /print/tags/red/index.txt | TEMPLATE: term.html")
// uglyURLs: false, outputFormat: print
b.AssertFileContent("public/print/index.txt", "TITLE: home | AOFRP: / | TEMPLATE: home.print.txt")
b.AssertFileContent("public/print/s1/index.txt", "TITLE: s1 | AOFRP: /s1/ | TEMPLATE: section.print.txt")
b.AssertFileContent("public/print/s1/p1/index.txt", "TITLE: p1 | AOFRP: /s1/p1/ | TEMPLATE: page.print.txt")
b.AssertFileContent("public/print/tags/index.txt", "TITLE: tags | AOFRP: /tags/ | TEMPLATE: taxonomy.print.txt")
b.AssertFileContent("public/print/tags/red/index.txt", "TITLE: red | AOFRP: /tags/red/ | TEMPLATE: term.print.txt")
files = strings.ReplaceAll(filesTemplate, "uglyURLs = false", "uglyURLs = true")
b = hugolib.Test(t, files)
// uglyURLs: true, outputFormat: html
b.AssertFileContent("public/index.html", "TITLE: home | AOFRP: /print/index.txt | TEMPLATE: home.html")
b.AssertFileContent("public/s1/index.html", "TITLE: s1 | AOFRP: /print/s1/index.txt | TEMPLATE: section.html")
b.AssertFileContent("public/s1/p1.html", "TITLE: p1 | AOFRP: /print/s1/p1.txt | TEMPLATE: page.html")
b.AssertFileContent("public/tags/index.html", "TITLE: tags | AOFRP: /print/tags/index.txt | TEMPLATE: taxonomy.html")
b.AssertFileContent("public/tags/red.html", "TITLE: red | AOFRP: /print/tags/red.txt | TEMPLATE: term.html")
// uglyURLs: true, outputFormat: print
b.AssertFileContent("public/print/index.txt", "TITLE: home | AOFRP: /index.html | TEMPLATE: home.print.txt")
b.AssertFileContent("public/print/s1/index.txt", "TITLE: s1 | AOFRP: /s1/index.html | TEMPLATE: section.print.txt")
b.AssertFileContent("public/print/s1/p1.txt", "TITLE: p1 | AOFRP: /s1/p1.html | TEMPLATE: page.print.txt")
b.AssertFileContent("public/print/tags/index.txt", "TITLE: tags | AOFRP: /tags/index.html | TEMPLATE: taxonomy.print.txt")
b.AssertFileContent("public/print/tags/red.txt", "TITLE: red | AOFRP: /tags/red.html | TEMPLATE: term.print.txt")
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/page_integration_test.go | resources/page/page_integration_test.go | // Copyright 2021 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page_test
import (
"testing"
"github.com/gohugoio/hugo/hugolib"
)
func TestGroupByLocalizedDate(t *testing.T) {
files := `
-- hugo.toml --
defaultContentLanguage = 'en'
defaultContentLanguageInSubdir = true
[languages]
[languages.en]
title = 'My blog'
weight = 1
[languages.fr]
title = 'Mon blogue'
weight = 2
[languages.nn]
title = 'Bloggen min'
weight = 3
-- content/p1.md --
---
title: "Post 1"
date: "2020-01-01"
---
-- content/p2.md --
---
title: "Post 2"
date: "2020-02-01"
---
-- content/p1.fr.md --
---
title: "Post 1"
date: "2020-01-01"
---
-- content/p2.fr.md --
---
title: "Post 2"
date: "2020-02-01"
---
-- layouts/home.html --
{{ range $k, $v := site.RegularPages.GroupByDate "January, 2006" }}{{ $k }}|{{ $v.Key }}|{{ $v.Pages }}{{ end }}
`
b := hugolib.NewIntegrationTestBuilder(
hugolib.IntegrationTestConfig{
T: t,
TxtarString: files,
NeedsOsFS: true,
}).Build()
b.AssertFileContent("public/en/index.html", "0|February, 2020|Pages(1)1|January, 2020|Pages(1)")
b.AssertFileContent("public/fr/index.html", "0|février, 2020|Pages(1)1|janvier, 2020|Pages(1)")
}
func TestPagesSortCollation(t *testing.T) {
files := `
-- hugo.toml --
defaultContentLanguage = 'en'
defaultContentLanguageInSubdir = true
[languages]
[languages.en]
title = 'My blog'
weight = 1
[languages.fr]
title = 'Mon blogue'
weight = 2
[languages.nn]
title = 'Bloggen min'
weight = 3
-- content/p1.md --
---
title: "zulu"
date: "2020-01-01"
param1: "xylophone"
tags: ["xylophone", "éclair", "zulu", "emma"]
---
-- content/p2.md --
---
title: "émotion"
date: "2020-01-01"
param1: "violin"
---
-- content/p3.md --
---
title: "alpha"
date: "2020-01-01"
param1: "éclair"
---
-- layouts/home.html --
ByTitle: {{ range site.RegularPages.ByTitle }}{{ .Title }}|{{ end }}
ByLinkTitle: {{ range site.RegularPages.ByLinkTitle }}{{ .Title }}|{{ end }}
ByParam: {{ range site.RegularPages.ByParam "param1" }}{{ .Params.param1 }}|{{ end }}
Tags Alphabetical: {{ range site.Taxonomies.tags.Alphabetical }}{{ .Term }}|{{ end }}
GroupBy: {{ range site.RegularPages.GroupBy "Title" }}{{ .Key }}|{{ end }}
{{ with (site.GetPage "p1").Params.tags }}
Sort: {{ sort . }}
ByWeight: {{ range site.RegularPages.ByWeight }}{{ .Title }}|{{ end }}
{{ end }}
`
b := hugolib.NewIntegrationTestBuilder(
hugolib.IntegrationTestConfig{
T: t,
TxtarString: files,
NeedsOsFS: true,
}).Build()
b.AssertFileContent("public/en/index.html", `
ByTitle: alpha|émotion|zulu|
ByLinkTitle: alpha|émotion|zulu|
ByParam: éclair|violin|xylophone
Tags Alphabetical: éclair|emma|xylophone|zulu|
GroupBy: alpha|émotion|zulu|
Sort: [éclair emma xylophone zulu]
ByWeight: alpha|émotion|zulu|
`)
}
// See #10377
func TestPermalinkExpansionSectionsRepeated(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
disableKinds = ["home", "taxonomy", "taxonomyTerm", "sitemap"]
[outputs]
home = ["HTML"]
page = ["HTML"]
section = ["HTML"]
[outputFormats]
[permalinks]
posts = '/:sections[1]/:sections[last]/:slug'
-- content/posts/_index.md --
-- content/posts/a/_index.md --
-- content/posts/a/b/_index.md --
-- content/posts/a/b/c/_index.md --
-- content/posts/a/b/c/d.md --
---
title: "D"
slug: "d"
---
D
-- layouts/single.html --
RelPermalink: {{ .RelPermalink }}
`
b := hugolib.Test(t, files)
b.AssertFileContent("public/a/c/d/index.html", "RelPermalink: /a/c/d/")
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/taxonomy_integration_test.go | resources/page/taxonomy_integration_test.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page_test
import (
"testing"
"github.com/gohugoio/hugo/hugolib"
)
func TestTaxonomiesGetAndCount(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
disableKinds = ['rss','sitemap']
[taxonomies]
author = 'authors'
-- layouts/home.html --
John Smith count: {{ site.Taxonomies.authors.Count "John Smith" }}
Robert Jones count: {{ (site.Taxonomies.authors.Get "Robert Jones").Pages.Len }}
-- layouts/single.html --
{{ .Title }}|
-- layouts/list.html --
{{ .Title }}|
-- content/p1.md --
---
title: p1
authors: [John Smith,Robert Jones]
---
-- content/p2.md --
---
title: p2
authors: [John Smith]
---
`
b := hugolib.Test(t, files)
b.AssertFileContent("public/index.html",
"John Smith count: 2",
"Robert Jones count: 1",
)
}
func TestTaxonomiesPage(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
disableKinds = ['rss','section','sitemap']
[taxonomies]
tag = 'tags'
category = 'categories'
-- content/p1.md --
---
title: p1
tags: [tag-a]
---
-- layouts/list.html --
{{- with site.Taxonomies.tags.Page }}{{ .RelPermalink }}{{ end }}|
{{- with site.Taxonomies.categories.Page }}{{ .RelPermalink }}{{ end }}|
-- layouts/single.html --
{{ .Title }}
`
b := hugolib.Test(t, files)
b.AssertFileContent("public/index.html", "/tags/||")
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pages_test.go | resources/page/pages_test.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"testing"
qt "github.com/frankban/quicktest"
)
func TestProbablyEq(t *testing.T) {
p1, p2, p3 := &testPage{title: "p1"}, &testPage{title: "p2"}, &testPage{title: "p3"}
pages12 := Pages{p1, p2}
pages21 := Pages{p2, p1}
pages123 := Pages{p1, p2, p3}
t.Run("Pages", func(t *testing.T) {
c := qt.New(t)
c.Assert(pages12.ProbablyEq(pages12), qt.Equals, true)
c.Assert(pages123.ProbablyEq(pages12), qt.Equals, false)
c.Assert(pages12.ProbablyEq(pages21), qt.Equals, false)
})
t.Run("PageGroup", func(t *testing.T) {
c := qt.New(t)
c.Assert(PageGroup{Key: "a", Pages: pages12}.ProbablyEq(PageGroup{Key: "a", Pages: pages12}), qt.Equals, true)
c.Assert(PageGroup{Key: "a", Pages: pages12}.ProbablyEq(PageGroup{Key: "b", Pages: pages12}), qt.Equals, false)
})
t.Run("PagesGroup", func(t *testing.T) {
c := qt.New(t)
pg1, pg2 := PageGroup{Key: "a", Pages: pages12}, PageGroup{Key: "b", Pages: pages123}
c.Assert(PagesGroup{pg1, pg2}.ProbablyEq(PagesGroup{pg1, pg2}), qt.Equals, true)
c.Assert(PagesGroup{pg1, pg2}.ProbablyEq(PagesGroup{pg2, pg1}), qt.Equals, false)
})
}
func TestToPages(t *testing.T) {
c := qt.New(t)
p1, p2 := &testPage{title: "p1"}, &testPage{title: "p2"}
pages12 := Pages{p1, p2}
mustToPages := func(in any) Pages {
p, err := ToPages(in)
c.Assert(err, qt.IsNil)
return p
}
c.Assert(mustToPages(nil), eq, Pages{})
c.Assert(mustToPages(pages12), eq, pages12)
c.Assert(mustToPages([]Page{p1, p2}), eq, pages12)
c.Assert(mustToPages([]any{p1, p2}), eq, pages12)
_, err := ToPages("not a page")
c.Assert(err, qt.Not(qt.IsNil))
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pagegroup_test.go | resources/page/pagegroup_test.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"context"
"github.com/google/go-cmp/cmp"
"strings"
"testing"
qt "github.com/frankban/quicktest"
"github.com/spf13/cast"
)
type pageGroupTestObject struct {
path string
weight int
date string
param string
}
var pageGroupTestSources = []pageGroupTestObject{
{"/section1/testpage1.md", 3, "2012-04-06", "foo"},
{"/section1/testpage2.md", 3, "2012-01-01", "bar"},
{"/section1/testpage3.md", 2, "2012-04-06", "foo"},
{"/section2/testpage4.md", 1, "2012-03-02", "bar"},
// date might also be a full datetime:
{"/section2/testpage5.md", 1, "2012-04-06T00:00:00Z", "baz"},
}
func preparePageGroupTestPages(t *testing.T) Pages {
var pages Pages
for _, src := range pageGroupTestSources {
p := newTestPage()
p.path = src.path
if p.path != "" {
p.section = strings.Split(strings.TrimPrefix(p.path, "/"), "/")[0]
}
p.weight = src.weight
p.date = cast.ToTime(src.date)
p.pubDate = cast.ToTime(src.date)
p.expiryDate = cast.ToTime(src.date)
p.lastMod = cast.ToTime(src.date).AddDate(3, 0, 0)
p.params["custom_param"] = src.param
p.params["custom_date"] = cast.ToTime(src.date)
p.params["custom_string_date"] = src.date
p.params["custom_object"] = map[string]any{
"param": src.param,
"date": cast.ToTime(src.date),
"string_date": src.date,
}
pages = append(pages, p)
}
return pages
}
var comparePageGroup = qt.CmpEquals(cmp.Comparer(func(a, b Page) bool {
return a == b
}))
func TestGroupByWithFieldNameArg(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: 1, Pages: Pages{pages[3], pages[4]}},
{Key: 2, Pages: Pages{pages[2]}},
{Key: 3, Pages: Pages{pages[0], pages[1]}},
}
groups, err := pages.GroupBy(context.Background(), "Weight")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByWithMethodNameArg(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: "section1", Pages: Pages{pages[0], pages[1], pages[2]}},
{Key: "section2", Pages: Pages{pages[3], pages[4]}},
}
groups, err := pages.GroupBy(context.Background(), "Type")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByWithSectionArg(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: "section1", Pages: Pages{pages[0], pages[1], pages[2]}},
{Key: "section2", Pages: Pages{pages[3], pages[4]}},
}
groups, err := pages.GroupBy(context.Background(), "Section")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByInReverseOrder(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: 3, Pages: Pages{pages[0], pages[1]}},
{Key: 2, Pages: Pages{pages[2]}},
{Key: 1, Pages: Pages{pages[3], pages[4]}},
}
groups, err := pages.GroupBy(context.Background(), "Weight", "desc")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByCalledWithEmptyPages(t *testing.T) {
c := qt.New(t)
t.Parallel()
var pages Pages
groups, err := pages.GroupBy(context.Background(), "Weight")
c.Assert(err, qt.IsNil)
c.Assert(groups, qt.IsNil)
}
func TestReverse(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
groups1, err := pages.GroupBy(context.Background(), "Weight", "desc")
c.Assert(err, qt.IsNil)
groups2, err := pages.GroupBy(context.Background(), "Weight")
c.Assert(err, qt.IsNil)
groups2 = groups2.Reverse()
c.Assert(groups2, comparePageGroup, groups1)
}
func TestGroupByParam(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: "bar", Pages: Pages{pages[1], pages[3]}},
{Key: "baz", Pages: Pages{pages[4]}},
{Key: "foo", Pages: Pages{pages[0], pages[2]}},
}
groups, err := pages.GroupByParam("custom_param")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByParamInReverseOrder(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: "foo", Pages: Pages{pages[0], pages[2]}},
{Key: "baz", Pages: Pages{pages[4]}},
{Key: "bar", Pages: Pages{pages[1], pages[3]}},
}
groups, err := pages.GroupByParam("custom_param", "desc")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByParamCalledWithCapitalLetterString(t *testing.T) {
c := qt.New(t)
testStr := "TestString"
p := newTestPage()
p.params["custom_param"] = testStr
pages := Pages{p}
groups, err := pages.GroupByParam("custom_param")
c.Assert(err, qt.IsNil)
c.Assert(groups[0].Key, qt.DeepEquals, testStr)
}
func TestGroupByParamCalledWithSomeUnavailableParams(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
delete(pages[1].Params(), "custom_param")
delete(pages[3].Params(), "custom_param")
delete(pages[4].Params(), "custom_param")
expect := PagesGroup{
{Key: "foo", Pages: Pages{pages[0], pages[2]}},
}
groups, err := pages.GroupByParam("custom_param")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByParamCalledWithEmptyPages(t *testing.T) {
c := qt.New(t)
t.Parallel()
var pages Pages
groups, err := pages.GroupByParam("custom_param")
c.Assert(err, qt.IsNil)
c.Assert(groups, qt.IsNil)
}
func TestGroupByParamCalledWithUnavailableParam(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
_, err := pages.GroupByParam("unavailable_param")
c.Assert(err, qt.IsNil)
}
func TestGroupByParamNested(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: "bar", Pages: Pages{pages[1], pages[3]}},
{Key: "baz", Pages: Pages{pages[4]}},
{Key: "foo", Pages: Pages{pages[0], pages[2]}},
}
groups, err := pages.GroupByParam("custom_object.param")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByDate(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
{Key: "2012-03", Pages: Pages{pages[3]}},
{Key: "2012-01", Pages: Pages{pages[1]}},
}
groups, err := pages.GroupByDate("2006-01")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByDateInReverseOrder(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: "2012-01", Pages: Pages{pages[1]}},
{Key: "2012-03", Pages: Pages{pages[3]}},
{Key: "2012-04", Pages: Pages{pages[0], pages[2], pages[4]}},
}
groups, err := pages.GroupByDate("2006-01", "asc")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByPublishDate(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
{Key: "2012-03", Pages: Pages{pages[3]}},
{Key: "2012-01", Pages: Pages{pages[1]}},
}
groups, err := pages.GroupByPublishDate("2006-01")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByPublishDateInReverseOrder(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: "2012-01", Pages: Pages{pages[1]}},
{Key: "2012-03", Pages: Pages{pages[3]}},
{Key: "2012-04", Pages: Pages{pages[0], pages[2], pages[4]}},
}
groups, err := pages.GroupByDate("2006-01", "asc")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByPublishDateWithEmptyPages(t *testing.T) {
c := qt.New(t)
t.Parallel()
var pages Pages
groups, err := pages.GroupByPublishDate("2006-01")
c.Assert(err, qt.IsNil)
c.Assert(groups, qt.IsNil)
}
func TestGroupByExpiryDate(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
{Key: "2012-03", Pages: Pages{pages[3]}},
{Key: "2012-01", Pages: Pages{pages[1]}},
}
groups, err := pages.GroupByExpiryDate("2006-01")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByParamDate(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
{Key: "2012-03", Pages: Pages{pages[3]}},
{Key: "2012-01", Pages: Pages{pages[1]}},
}
groups, err := pages.GroupByParamDate("custom_date", "2006-01")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByParamDateNested(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
{Key: "2012-03", Pages: Pages{pages[3]}},
{Key: "2012-01", Pages: Pages{pages[1]}},
}
groups, err := pages.GroupByParamDate("custom_object.date", "2006-01")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
// https://github.com/gohugoio/hugo/issues/3983
func TestGroupByParamDateWithStringParams(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
{Key: "2012-03", Pages: Pages{pages[3]}},
{Key: "2012-01", Pages: Pages{pages[1]}},
}
groups, err := pages.GroupByParamDate("custom_string_date", "2006-01")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByParamDateNestedWithStringParams(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: "2012-04", Pages: Pages{pages[4], pages[2], pages[0]}},
{Key: "2012-03", Pages: Pages{pages[3]}},
{Key: "2012-01", Pages: Pages{pages[1]}},
}
groups, err := pages.GroupByParamDate("custom_object.string_date", "2006-01")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByLastmod(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: "2015-04", Pages: Pages{pages[4], pages[2], pages[0]}},
{Key: "2015-03", Pages: Pages{pages[3]}},
{Key: "2015-01", Pages: Pages{pages[1]}},
}
groups, err := pages.GroupByLastmod("2006-01")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByLastmodInReverseOrder(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: "2015-01", Pages: Pages{pages[1]}},
{Key: "2015-03", Pages: Pages{pages[3]}},
{Key: "2015-04", Pages: Pages{pages[0], pages[2], pages[4]}},
}
groups, err := pages.GroupByLastmod("2006-01", "asc")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByParamDateInReverseOrder(t *testing.T) {
c := qt.New(t)
t.Parallel()
pages := preparePageGroupTestPages(t)
expect := PagesGroup{
{Key: "2012-01", Pages: Pages{pages[1]}},
{Key: "2012-03", Pages: Pages{pages[3]}},
{Key: "2012-04", Pages: Pages{pages[0], pages[2], pages[4]}},
}
groups, err := pages.GroupByParamDate("custom_date", "2006-01", "asc")
c.Assert(err, qt.IsNil)
c.Assert(groups, comparePageGroup, expect)
}
func TestGroupByParamDateWithEmptyPages(t *testing.T) {
c := qt.New(t)
t.Parallel()
var pages Pages
groups, err := pages.GroupByParamDate("custom_date", "2006-01")
c.Assert(err, qt.IsNil)
c.Assert(groups, qt.IsNil)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/page_paths.go | resources/page/page_paths.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"bytes"
"path"
"path/filepath"
"strings"
"sync"
"github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/common/urls"
"github.com/gohugoio/hugo/helpers"
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/resources/kinds"
)
const slash = "/"
// TargetPathDescriptor describes how a file path for a given resource
// should look like on the file system. The same descriptor is then later used to
// create both the permalinks and the relative links, paginator URLs etc.
//
// The big motivating behind this is to have only one source of truth for URLs,
// and by that also get rid of most of the fragile string parsing/encoding etc.
type TargetPathDescriptor struct {
PathSpec *helpers.PathSpec
Type output.Format
Kind string
Path *paths.Path
Section *paths.Path
// For regular content pages this is either
// 1) the Slug, if set,
// 2) the file base name (TranslationBaseName).
BaseName string
// Typically a language prefix added to file paths.
PrefixFilePath string
// Typically a language prefix added to links.
PrefixLink string
// If in multihost mode etc., every link/path needs to be prefixed, even
// if set in URL.
ForcePrefix bool
// URL from front matter if set. Will override any Slug etc.
URL string
// Used to create paginator links.
Addends string
// The expanded permalink if defined for the section, ready to use.
ExpandedPermalink string
// Some types cannot have uglyURLs, even if globally enabled, RSS being one example.
UglyURLs bool
}
// TODO(bep) move this type.
type TargetPaths struct {
// Where to store the file on disk relative to the publish dir. OS slashes.
TargetFilename string
// The directory to write sub-resources of the above.
SubResourceBaseTarget string
// The base for creating links to sub-resources of the above.
SubResourceBaseLink string
// The relative permalink to this resources. Unix slashes.
Link string
}
func (p TargetPaths) RelPermalink(s *helpers.PathSpec) string {
return s.PrependBasePath(p.Link, false)
}
func (p TargetPaths) PermalinkForOutputFormat(s *helpers.PathSpec, f output.Format) string {
var baseURL urls.BaseURL
var err error
if f.Protocol != "" {
baseURL, err = s.Cfg.BaseURL().WithProtocol(f.Protocol)
if err != nil {
return ""
}
} else {
baseURL = s.Cfg.BaseURL()
}
baseURLstr := baseURL.String()
return s.PermalinkForBaseURL(p.Link, baseURLstr)
}
func CreateTargetPaths(d TargetPathDescriptor) (tp TargetPaths) {
// Normalize all file Windows paths to simplify what's next.
if helpers.FilePathSeparator != "/" {
d.PrefixFilePath = filepath.ToSlash(d.PrefixFilePath)
}
if !d.Type.Root && d.URL != "" && !strings.HasPrefix(d.URL, "/") {
// Treat this as a context relative URL
d.ForcePrefix = true
}
if d.URL != "" {
d.URL = filepath.ToSlash(d.URL)
if strings.Contains(d.URL, "..") {
d.URL = path.Join("/", d.URL)
}
}
if d.Type.Root && !d.ForcePrefix {
d.PrefixFilePath = ""
d.PrefixLink = ""
}
pb := getPagePathBuilder(d)
defer putPagePathBuilder(pb)
pb.fullSuffix = d.Type.MediaType.FirstSuffix.FullSuffix
// The top level index files, i.e. the home page etc., needs
// the index base even when uglyURLs is enabled.
needsBase := true
pb.isUgly = (d.UglyURLs || d.Type.Ugly) && !d.Type.NoUgly
pb.baseNameSameAsType = !d.Path.IsBundle() && d.BaseName != "" && d.BaseName == d.Type.BaseName
indexIsUglyKind := d.Kind == kinds.KindHome || d.Kind == kinds.KindSection || d.Kind == kinds.KindTaxonomy
indexIsUglyKind = indexIsUglyKind && pb.isUgly
if d.ExpandedPermalink == "" && pb.baseNameSameAsType {
pb.isUgly = true
}
if d.Type.Path != "" {
pb.Add(d.Type.Path)
}
if d.Type == output.HTTPStatus404HTMLFormat || d.Type == output.SitemapFormat || d.Type == output.RobotsTxtFormat {
pb.noSubResources = true
} else if d.Kind != kinds.KindPage && d.URL == "" && d.Section.Base() != "/" {
if d.ExpandedPermalink != "" {
pb.Add(d.ExpandedPermalink)
} else {
pb.Add(d.Section.Base())
}
needsBase = false
}
if d.Kind != kinds.KindHome && d.URL != "" {
pb.Add(paths.FieldsSlash(d.URL)...)
if d.Addends != "" {
pb.Add(d.Addends)
}
hasDot := strings.Contains(d.URL, ".")
hasSlash := strings.HasSuffix(d.URL, "/")
if hasSlash || !hasDot {
pb.Add(d.Type.BaseName + pb.fullSuffix)
} else if hasDot {
pb.fullSuffix = paths.Ext(d.URL)
}
if pb.IsHtmlIndex() {
pb.linkUpperOffset = 1
}
if d.ForcePrefix {
// Prepend language prefix if not already set in URL
if d.PrefixFilePath != "" && !strings.HasPrefix(d.URL, "/"+d.PrefixFilePath) {
pb.prefixPath = d.PrefixFilePath
}
if d.PrefixLink != "" && !strings.HasPrefix(d.URL, "/"+d.PrefixLink) {
pb.prefixLink = d.PrefixLink
}
}
} else if !kinds.IsBranch(d.Kind) {
if d.ExpandedPermalink != "" {
pb.Add(d.ExpandedPermalink)
} else {
if dir := d.Path.ContainerDir(); dir != "" {
pb.Add(dir)
}
if d.BaseName != "" {
pb.Add(d.BaseName)
} else {
pb.Add(d.Path.BaseNameNoIdentifier())
}
}
if d.Addends != "" {
pb.Add(d.Addends)
}
if pb.isUgly {
pb.ConcatLast(pb.fullSuffix)
} else {
pb.Add(d.Type.BaseName + pb.fullSuffix)
}
if pb.IsHtmlIndex() {
pb.linkUpperOffset = 1
}
if d.PrefixFilePath != "" {
pb.prefixPath = d.PrefixFilePath
}
if d.PrefixLink != "" {
pb.prefixLink = d.PrefixLink
}
} else {
if d.Addends != "" {
pb.Add(d.Addends)
}
needsBase = needsBase && d.Addends == ""
if needsBase || (!pb.isUgly || indexIsUglyKind) {
pb.Add(d.Type.BaseName + pb.fullSuffix)
} else {
pb.ConcatLast(pb.fullSuffix)
}
if !indexIsUglyKind && pb.IsHtmlIndex() {
pb.linkUpperOffset = 1
}
if d.PrefixFilePath != "" {
pb.prefixPath = d.PrefixFilePath
}
if d.PrefixLink != "" {
pb.prefixLink = d.PrefixLink
}
}
// if page URL is explicitly set in frontmatter,
// preserve its value without sanitization
if d.URL == "" {
// Note: MakePathSanitized will lower case the path if
// disablePathToLower isn't set.
pb.Sanitize()
}
link := pb.Link()
pagePath := pb.PathFile()
tp.TargetFilename = filepath.FromSlash(pagePath)
if !pb.noSubResources {
tp.SubResourceBaseTarget = pb.PathDir()
tp.SubResourceBaseLink = pb.LinkDir()
}
// paths.{URL,Path}Escape rely on url.Parse which
// will consider # a fragment identifier, so it and
// and everything after it will be stripped from
// `link`, so we need to escape it first.
link = strings.ReplaceAll(link, "#", "%23")
if d.URL != "" {
tp.Link = paths.URLEscape(link)
} else {
// This is slightly faster for when we know we don't have any
// query or scheme etc.
tp.Link = paths.PathEscape(link)
}
if tp.Link == "" {
tp.Link = "/"
}
return
}
// When adding state here, remember to update putPagePathBuilder.
type pagePathBuilder struct {
els []string
b bytes.Buffer
d TargetPathDescriptor
// Builder state.
isUgly bool
baseNameSameAsType bool
noSubResources bool
fullSuffix string // File suffix including any ".".
prefixLink string
prefixPath string
linkUpperOffset int
}
func (p *pagePathBuilder) Add(el ...string) {
// Filter empty and slashes.
n := 0
for _, e := range el {
if e != "" && e != slash {
el[n] = e
n++
}
}
el = el[:n]
p.els = append(p.els, el...)
}
func (p *pagePathBuilder) ConcatLast(s string) {
if len(p.els) == 0 {
p.Add(s)
return
}
old := p.els[len(p.els)-1]
if old == "" {
p.els[len(p.els)-1] = s
return
}
if old[len(old)-1] == '/' {
old = old[:len(old)-1]
}
p.els[len(p.els)-1] = old + s
}
func (p *pagePathBuilder) IsHtmlIndex() bool {
return p.Last() == "index.html"
}
func (p *pagePathBuilder) Last() string {
if p.els == nil {
return ""
}
return p.els[len(p.els)-1]
}
func (p *pagePathBuilder) Link() string {
link := p.Path(p.linkUpperOffset)
if p.baseNameSameAsType {
link = strings.TrimSuffix(link, p.d.BaseName)
}
if p.prefixLink != "" {
link = "/" + p.prefixLink + link
}
if p.linkUpperOffset > 0 && !strings.HasSuffix(link, "/") {
link += "/"
}
return link
}
func (p *pagePathBuilder) LinkDir() string {
if p.noSubResources {
return ""
}
pathDir := p.PathDirBase()
if p.prefixLink != "" {
pathDir = "/" + p.prefixLink + pathDir
}
return pathDir
}
func (p *pagePathBuilder) Path(upperOffset int) string {
upper := len(p.els)
if upperOffset > 0 {
upper -= upperOffset
}
p.b.Reset()
var hadTrailingSlash bool
for _, el := range p.els[:upper] {
if !hadTrailingSlash && !strings.HasPrefix(el, "/") {
p.b.WriteByte('/')
}
hadTrailingSlash = strings.HasSuffix(el, "/")
p.b.WriteString(el)
}
return p.b.String()
}
func (p *pagePathBuilder) PathDir() string {
dir := p.PathDirBase()
if p.prefixPath != "" {
dir = "/" + p.prefixPath + dir
}
return dir
}
func (p *pagePathBuilder) PathDirBase() string {
if p.noSubResources {
return ""
}
dir := p.Path(0)
isIndex := strings.HasPrefix(p.Last(), p.d.Type.BaseName+".")
if isIndex {
dir = paths.Dir(dir)
} else {
dir = strings.TrimSuffix(dir, p.fullSuffix)
}
if dir == "/" {
dir = ""
}
return dir
}
func (p *pagePathBuilder) PathFile() string {
dir := p.Path(0)
if p.prefixPath != "" {
dir = "/" + p.prefixPath + dir
}
return dir
}
func (p *pagePathBuilder) Prepend(el ...string) {
p.els = append(p.els[:0], append(el, p.els[0:]...)...)
}
func (p *pagePathBuilder) Sanitize() {
for i, el := range p.els {
p.els[i] = p.d.PathSpec.MakePathSanitized(el)
}
}
var pagePathBuilderPool = &sync.Pool{
New: func() any {
return &pagePathBuilder{}
},
}
func getPagePathBuilder(d TargetPathDescriptor) *pagePathBuilder {
b := pagePathBuilderPool.Get().(*pagePathBuilder)
b.d = d
return b
}
func putPagePathBuilder(b *pagePathBuilder) {
b.els = b.els[:0]
b.fullSuffix = ""
b.baseNameSameAsType = false
b.isUgly = false
b.noSubResources = false
b.prefixLink = ""
b.prefixPath = ""
b.linkUpperOffset = 0
pagePathBuilderPool.Put(b)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/taxonomy.go | resources/page/taxonomy.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"fmt"
"sort"
"strings"
"github.com/gohugoio/hugo/compare"
"github.com/gohugoio/hugo/langs"
)
// The TaxonomyList is a list of all taxonomies and their values
// e.g. List['tags'] => TagTaxonomy (from above)
type TaxonomyList map[string]Taxonomy
func (tl TaxonomyList) String() string {
return fmt.Sprintf("TaxonomyList(%d)", len(tl))
}
// A Taxonomy is a map of keywords to a list of pages.
// For example
//
// TagTaxonomy['technology'] = WeightedPages
// TagTaxonomy['go'] = WeightedPages
type Taxonomy map[string]WeightedPages
// OrderedTaxonomy is another representation of an Taxonomy using an array rather than a map.
// Important because you can't order a map.
type OrderedTaxonomy []OrderedTaxonomyEntry
// getOneOPage returns one page in the taxonomy,
// nil if there is none.
func (t OrderedTaxonomy) getOneOPage() Page {
if len(t) == 0 {
return nil
}
return t[0].Pages()[0]
}
// OrderedTaxonomyEntry is similar to an element of a Taxonomy, but with the key embedded (as name)
// e.g: {Name: Technology, WeightedPages: TaxonomyPages}
type OrderedTaxonomyEntry struct {
Name string
WeightedPages
}
// Get the weighted pages for the given key.
func (i Taxonomy) Get(key string) WeightedPages {
return i[strings.ToLower(key)]
}
// Count the weighted pages for the given key.
func (i Taxonomy) Count(key string) int { return len(i[strings.ToLower(key)]) }
// TaxonomyArray returns an ordered taxonomy with a non defined order.
func (i Taxonomy) TaxonomyArray() OrderedTaxonomy {
ies := make([]OrderedTaxonomyEntry, len(i))
count := 0
for k, v := range i {
ies[count] = OrderedTaxonomyEntry{Name: k, WeightedPages: v}
count++
}
return ies
}
// Alphabetical returns an ordered taxonomy sorted by key name.
func (i Taxonomy) Alphabetical() OrderedTaxonomy {
ia := i.TaxonomyArray()
p := ia.getOneOPage()
if p == nil {
return ia
}
currentSite := p.Site().Current()
coll := langs.GetCollator1(currentSite.Language())
coll.Lock()
defer coll.Unlock()
name := func(i1, i2 *OrderedTaxonomyEntry) bool {
return coll.CompareStrings(i1.Name, i2.Name) < 0
}
oiBy(name).Sort(ia)
return ia
}
// ByCount returns an ordered taxonomy sorted by # of pages per key.
// If taxonomies have the same # of pages, sort them alphabetical
func (i Taxonomy) ByCount() OrderedTaxonomy {
count := func(i1, i2 *OrderedTaxonomyEntry) bool {
li1 := len(i1.WeightedPages)
li2 := len(i2.WeightedPages)
if li1 == li2 {
return compare.LessStrings(i1.Name, i2.Name)
}
return li1 > li2
}
ia := i.TaxonomyArray()
oiBy(count).Sort(ia)
return ia
}
// Page returns the taxonomy page or nil if the taxonomy has no terms.
func (i Taxonomy) Page() Page {
for _, v := range i {
return v.Page().Parent()
}
return nil
}
// Pages returns the Pages for this taxonomy.
func (ie OrderedTaxonomyEntry) Pages() Pages {
return ie.WeightedPages.Pages()
}
// Count returns the count the pages in this taxonomy.
func (ie OrderedTaxonomyEntry) Count() int {
return len(ie.WeightedPages)
}
// Term returns the name given to this taxonomy.
func (ie OrderedTaxonomyEntry) Term() string {
return ie.Name
}
// Reverse reverses the order of the entries in this taxonomy.
func (t OrderedTaxonomy) Reverse() OrderedTaxonomy {
for i, j := 0, len(t)-1; i < j; i, j = i+1, j-1 {
t[i], t[j] = t[j], t[i]
}
return t
}
// A type to implement the sort interface for TaxonomyEntries.
type orderedTaxonomySorter struct {
taxonomy OrderedTaxonomy
by oiBy
}
// Closure used in the Sort.Less method.
type oiBy func(i1, i2 *OrderedTaxonomyEntry) bool
func (by oiBy) Sort(taxonomy OrderedTaxonomy) {
ps := &orderedTaxonomySorter{
taxonomy: taxonomy,
by: by, // The Sort method's receiver is the function (closure) that defines the sort order.
}
sort.Stable(ps)
}
// Len is part of sort.Interface.
func (s *orderedTaxonomySorter) Len() int {
return len(s.taxonomy)
}
// Swap is part of sort.Interface.
func (s *orderedTaxonomySorter) Swap(i, j int) {
s.taxonomy[i], s.taxonomy[j] = s.taxonomy[j], s.taxonomy[i]
}
// Less is part of sort.Interface. It is implemented by calling the "by" closure in the sorter.
func (s *orderedTaxonomySorter) Less(i, j int) bool {
return s.by(&s.taxonomy[i], &s.taxonomy[j])
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/page_kinds.go | resources/page/page_kinds.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/page_paths_test.go | resources/page/page_paths_test.go | // Copyright 2025 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page
import (
"testing"
qt "github.com/frankban/quicktest"
)
func TestPagePathsBuilder(t *testing.T) {
c := qt.New(t)
d := TargetPathDescriptor{}
b := getPagePathBuilder(d)
defer putPagePathBuilder(b)
b.Add("foo", "bar")
c.Assert(b.Path(0), qt.Equals, "/foo/bar")
}
func BenchmarkPagePathsBuilderPath(b *testing.B) {
d := TargetPathDescriptor{}
pb := getPagePathBuilder(d)
defer putPagePathBuilder(pb)
pb.Add("foo", "bar")
for b.Loop() {
_ = pb.Path(0)
}
}
func BenchmarkPagePathsBuilderPathDir(b *testing.B) {
d := TargetPathDescriptor{}
pb := getPagePathBuilder(d)
defer putPagePathBuilder(pb)
pb.Add("foo", "bar")
pb.prefixPath = "foo/"
for b.Loop() {
_ = pb.PathDir()
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/page_wrappers.autogen.go | resources/page/page_wrappers.autogen.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// This file is autogenerated.
package page
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/page_generate/generate_page_wrappers.go | resources/page/page_generate/generate_page_wrappers.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package page_generate
import (
"errors"
"fmt"
"os"
"path/filepath"
"reflect"
"github.com/gohugoio/hugo/codegen"
"github.com/gohugoio/hugo/resources/page"
)
const header = `// Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// This file is autogenerated.
`
var (
pageInterface = reflect.TypeOf((*page.PageMetaProvider)(nil)).Elem()
packageDir = filepath.FromSlash("resources/page")
)
func Generate(c *codegen.Inspector) error {
if err := generateMarshalJSON(c); err != nil {
return fmt.Errorf("failed to generate JSON marshaler: %w", err)
}
return nil
}
func generateMarshalJSON(c *codegen.Inspector) error {
filename := filepath.Join(c.ProjectRootDir, packageDir, "page_marshaljson.autogen.go")
f, err := os.Create(filename)
if err != nil {
return err
}
defer f.Close()
includes := []reflect.Type{pageInterface}
excludes := []reflect.Type{}
methods := c.MethodsFromTypes(
includes,
excludes)
if len(methods) == 0 {
return errors.New("no methods found")
}
marshalJSON, pkgImports := methods.ToMarshalJSON(
"Page",
"github.com/gohugoio/hugo/resources/page",
// Exclusion regexps. Matches method names.
`\bPage\b`,
)
fmt.Fprintf(f, `%s
package page
%s
%s
`, header, importsString(pkgImports), marshalJSON)
return nil
}
func importsString(imps []string) string {
if len(imps) == 0 {
return ""
}
if len(imps) == 1 {
return fmt.Sprintf("import %q", imps[0])
}
impsStr := "import (\n"
for _, imp := range imps {
impsStr += fmt.Sprintf("%q\n", imp)
}
return impsStr + ")"
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pagemeta/pagemeta.go | resources/page/pagemeta/pagemeta.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pagemeta
import (
"github.com/mitchellh/mapstructure"
)
const (
Never = "never"
Always = "always"
ListLocally = "local"
Link = "link"
)
var DefaultBuildConfig = BuildConfig{
List: Always,
Render: Always,
PublishResources: true,
}
// BuildConfig holds configuration options about how to handle a Page in Hugo's
// build process.
type BuildConfig struct {
// Whether to add it to any of the page collections.
// Note that the page can always be found with .Site.GetPage.
// Valid values: never, always, local.
// Setting it to 'local' means they will be available via the local
// page collections, e.g. $section.Pages.
// Note: before 0.57.2 this was a bool, so we accept those too.
List string
// Whether to render it.
// Valid values: never, always, link.
// The value link means it will not be rendered, but it will get a RelPermalink/Permalink.
// Note that before 0.76.0 this was a bool, so we accept those too.
Render string
// Whether to publish its resources. These will still be published on demand,
// but enabling this can be useful if the originals (e.g. images) are
// never used.
PublishResources bool
}
// Disable sets all options to their off value.
func (b *BuildConfig) Disable() {
b.List = Never
b.Render = Never
b.PublishResources = false
}
func (b BuildConfig) IsZero() bool {
return b == BuildConfig{}
}
func DecodeBuildConfig(m any) (BuildConfig, error) {
b := DefaultBuildConfig
if m == nil {
return b, nil
}
err := mapstructure.WeakDecode(m, &b)
// In 0.67.1 we changed the list attribute from a bool to a string (enum).
// Bool values will become 0 or 1.
switch b.List {
case "0":
b.List = Never
case "1":
b.List = Always
case Always, Never, ListLocally:
default:
b.List = Always
}
// In 0.76.0 we changed the Render from bool to a string.
switch b.Render {
case "0":
b.Render = Never
case "1":
b.Render = Always
case Always, Never, Link:
default:
b.Render = Always
}
return b, err
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pagemeta/page_frontmatter_test.go | resources/page/pagemeta/page_frontmatter_test.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pagemeta_test
import (
"strings"
"testing"
"time"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/config/testconfig"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/resources/page/pagemeta"
qt "github.com/frankban/quicktest"
)
func newTestFd() *pagemeta.FrontMatterDescriptor {
return &pagemeta.FrontMatterDescriptor{
PageConfigEarly: &pagemeta.PageConfigEarly{
Frontmatter: make(map[string]any),
},
PageConfigLate: &pagemeta.PageConfigLate{
Params: make(map[string]any),
},
Location: time.UTC,
}
}
func TestFrontMatterNewConfig(t *testing.T) {
c := qt.New(t)
cfg := config.New()
cfg.Set("frontmatter", map[string]any{
"date": []string{"publishDate", "LastMod"},
"Lastmod": []string{"publishDate"},
"expiryDate": []string{"lastMod"},
"publishDate": []string{"date"},
})
fc, err := pagemeta.DecodeFrontMatterConfig(cfg)
c.Assert(err, qt.IsNil)
c.Assert(fc.Date, qt.DeepEquals, []string{"publishdate", "pubdate", "published", "lastmod", "modified"})
c.Assert(fc.Lastmod, qt.DeepEquals, []string{"publishdate", "pubdate", "published"})
c.Assert(fc.ExpiryDate, qt.DeepEquals, []string{"lastmod", "modified"})
c.Assert(fc.PublishDate, qt.DeepEquals, []string{"date"})
// Default
cfg = config.New()
fc, err = pagemeta.DecodeFrontMatterConfig(cfg)
c.Assert(err, qt.IsNil)
c.Assert(fc.Date, qt.DeepEquals, []string{"date", "publishdate", "pubdate", "published", "lastmod", "modified"})
c.Assert(fc.Lastmod, qt.DeepEquals, []string{":git", "lastmod", "modified", "date", "publishdate", "pubdate", "published"})
c.Assert(fc.ExpiryDate, qt.DeepEquals, []string{"expirydate", "unpublishdate"})
c.Assert(fc.PublishDate, qt.DeepEquals, []string{"publishdate", "pubdate", "published", "date"})
// :default keyword
cfg.Set("frontmatter", map[string]any{
"date": []string{"d1", ":default"},
"lastmod": []string{"d2", ":default"},
"expiryDate": []string{"d3", ":default"},
"publishDate": []string{"d4", ":default"},
})
fc, err = pagemeta.DecodeFrontMatterConfig(cfg)
c.Assert(err, qt.IsNil)
c.Assert(fc.Date, qt.DeepEquals, []string{"d1", "date", "publishdate", "pubdate", "published", "lastmod", "modified"})
c.Assert(fc.Lastmod, qt.DeepEquals, []string{"d2", ":git", "lastmod", "modified", "date", "publishdate", "pubdate", "published"})
c.Assert(fc.ExpiryDate, qt.DeepEquals, []string{"d3", "expirydate", "unpublishdate"})
c.Assert(fc.PublishDate, qt.DeepEquals, []string{"d4", "publishdate", "pubdate", "published", "date"})
}
func TestFrontMatterDatesHandlers(t *testing.T) {
c := qt.New(t)
for _, handlerID := range []string{":filename", ":fileModTime", ":git"} {
cfg := config.New()
cfg.Set("frontmatter", map[string]any{
"date": []string{handlerID, "date"},
})
conf := testconfig.GetTestConfig(nil, cfg)
handler, err := pagemeta.NewFrontmatterHandler(nil, conf.GetConfigSection("frontmatter").(pagemeta.FrontmatterConfig))
c.Assert(err, qt.IsNil)
d1, _ := time.Parse("2006-01-02", "2018-02-01")
d2, _ := time.Parse("2006-01-02", "2018-02-02")
d := newTestFd()
switch strings.ToLower(handlerID) {
case ":filename":
d.BaseFilename = "2018-02-01-page.md"
case ":filemodtime":
d.ModTime = d1
case ":git":
d.GitAuthorDate = d1
}
d.PageConfigEarly.Frontmatter["date"] = d2
c.Assert(handler.HandleDates(d), qt.IsNil)
c.Assert(d.PageConfigLate.Dates.Date, qt.Equals, d1)
c.Assert(d.PageConfigLate.Params["date"], qt.Equals, d2)
d = newTestFd()
d.PageConfigEarly.Frontmatter["date"] = d2
c.Assert(handler.HandleDates(d), qt.IsNil)
c.Assert(d.PageConfigLate.Dates.Date, qt.Equals, d2)
c.Assert(d.PageConfigLate.Params["date"], qt.Equals, d2)
}
}
func TestFrontMatterDatesDefaultKeyword(t *testing.T) {
t.Parallel()
c := qt.New(t)
cfg := config.New()
cfg.Set("frontmatter", map[string]any{
"date": []string{"mydate", ":default"},
"publishdate": []string{":default", "mypubdate"},
})
conf := testconfig.GetTestConfig(nil, cfg)
handler, err := pagemeta.NewFrontmatterHandler(nil, conf.GetConfigSection("frontmatter").(pagemeta.FrontmatterConfig))
c.Assert(err, qt.IsNil)
testDate, _ := time.Parse("2006-01-02", "2018-02-01")
d := newTestFd()
d.PageConfigEarly.Frontmatter["mydate"] = testDate
d.PageConfigEarly.Frontmatter["date"] = testDate.Add(1 * 24 * time.Hour)
d.PageConfigEarly.Frontmatter["mypubdate"] = testDate.Add(2 * 24 * time.Hour)
d.PageConfigEarly.Frontmatter["publishdate"] = testDate.Add(3 * 24 * time.Hour)
c.Assert(handler.HandleDates(d), qt.IsNil)
c.Assert(d.PageConfigLate.Dates.Date.Day(), qt.Equals, 1)
c.Assert(d.PageConfigLate.Dates.Lastmod.Day(), qt.Equals, 2)
c.Assert(d.PageConfigLate.Dates.PublishDate.Day(), qt.Equals, 4)
c.Assert(d.PageConfigLate.Dates.ExpiryDate.IsZero(), qt.Equals, true)
}
func TestContentMediaTypeFromMarkup(t *testing.T) {
c := qt.New(t)
logger := loggers.NewDefault()
for _, test := range []struct {
in string
expected string
}{
{"", "text/markdown"},
{"md", "text/markdown"},
{"markdown", "text/markdown"},
{"mdown", "text/markdown"},
{"goldmark", "text/markdown"},
{"html", "text/html"},
{"htm", "text/html"},
{"asciidoc", "text/asciidoc"},
{"asciidocext", "text/asciidoc"},
{"adoc", "text/asciidoc"},
{"pandoc", "text/pandoc"},
{"pdc", "text/pandoc"},
{"rst", "text/rst"},
} {
var pc pagemeta.PageConfigEarly
pc.Content.Markup = test.in
c.Assert(pc.CompileForPagesFromDataPre("", logger, media.DefaultTypes), qt.IsNil)
c.Assert(pc.ContentMediaType.Type, qt.Equals, test.expected)
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pagemeta/pagemeta_test.go | resources/page/pagemeta/pagemeta_test.go | // Copyright 2020 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pagemeta
import (
"fmt"
"testing"
"time"
"github.com/gohugoio/hugo/htesting/hqt"
"github.com/gohugoio/hugo/config"
qt "github.com/frankban/quicktest"
)
func TestDecodeBuildConfig(t *testing.T) {
t.Parallel()
c := qt.New(t)
configTempl := `
[build]
render = %s
list = %s
publishResources = true`
for _, test := range []struct {
args []any
expect BuildConfig
}{
{
[]any{"true", "true"},
BuildConfig{
Render: Always,
List: Always,
PublishResources: true,
},
},
{[]any{"true", "false"}, BuildConfig{
Render: Always,
List: Never,
PublishResources: true,
}},
{[]any{`"always"`, `"always"`}, BuildConfig{
Render: Always,
List: Always,
PublishResources: true,
}},
{[]any{`"never"`, `"never"`}, BuildConfig{
Render: Never,
List: Never,
PublishResources: true,
}},
{[]any{`"link"`, `"local"`}, BuildConfig{
Render: Link,
List: ListLocally,
PublishResources: true,
}},
{[]any{`"always"`, `"asdfadf"`}, BuildConfig{
Render: Always,
List: Always,
PublishResources: true,
}},
} {
cfg, err := config.FromConfigString(fmt.Sprintf(configTempl, test.args...), "toml")
c.Assert(err, qt.IsNil)
bcfg, err := DecodeBuildConfig(cfg.Get("build"))
c.Assert(err, qt.IsNil)
eq := qt.CmpEquals(hqt.DeepAllowUnexported(BuildConfig{}))
c.Assert(bcfg, eq, test.expect)
}
}
func TestDateAndSlugFromBaseFilename(t *testing.T) {
t.Parallel()
c := qt.New(t)
tests := []struct {
name string
date string
slug string
}{
// date
{"2025-07-04 page.md", "2025-07-04T00:00:00+02:00", "page"},
{"2025-07-04-page.md", "2025-07-04T00:00:00+02:00", "page"},
{"2025-07-04_page.md", "2025-07-04T00:00:00+02:00", "page"},
{"2025-07-04page.md", "2025-07-04T00:00:00+02:00", "page"},
{"2025-07-04", "2025-07-04T00:00:00+02:00", ""},
{"2025-07-04-.md", "2025-07-04T00:00:00+02:00", ""},
{"2025-07-04.md", "2025-07-04T00:00:00+02:00", ""},
// date and time
{"2025-07-04-22-17-13 page.md", "2025-07-04T22:17:13+02:00", "page"},
{"2025-07-04-22-17-13-page.md", "2025-07-04T22:17:13+02:00", "page"},
{"2025-07-04-22-17-13_page.md", "2025-07-04T22:17:13+02:00", "page"},
{"2025-07-04-22-17-13page.md", "2025-07-04T22:17:13+02:00", "page"},
{"2025-07-04-22-17-13", "2025-07-04T22:17:13+02:00", ""},
{"2025-07-04-22-17-13-.md", "2025-07-04T22:17:13+02:00", ""},
{"2025-07-04-22-17-13.md", "2025-07-04T22:17:13+02:00", ""},
// date and time with other separators between the two
{"2025-07-04T22-17-13.md", "2025-07-04T22:17:13+02:00", ""},
{"2025-07-04 22-17-13.md", "2025-07-04T22:17:13+02:00", ""},
// no date or time
{"something.md", "0001-01-01T00:00:00+00:00", ""}, // 9 chars
{"some-thing-.md", "0001-01-01T00:00:00+00:00", ""}, // 10 chars
{"somethingsomething.md", "0001-01-01T00:00:00+00:00", ""}, // 18 chars
{"something-something.md", "0001-01-01T00:00:00+00:00", ""}, // 19 chars
{"something-something-else.md", "0001-01-01T00:00:00+00:00", ""}, // 27 chars
// invalid
{"2025-07-4-page.md", "0001-01-01T00:00:00+00:00", ""},
{"2025-07-4-22-17-13-page.md", "0001-01-01T00:00:00+00:00", ""},
{"asdfasdf.md", "0001-01-01T00:00:00+00:00", ""},
}
location, err := time.LoadLocation("Europe/Oslo")
if err != nil {
t.Error("Unable to determine location from given time zone")
}
for _, test := range tests {
gotDate, gotSlug := dateAndSlugFromBaseFilename(location, test.name)
c.Assert(gotDate.Format("2006-01-02T15:04:05-07:00"), qt.Equals, test.date)
c.Assert(gotSlug, qt.Equals, test.slug)
}
}
func TestExpandDefaultValues(t *testing.T) {
c := qt.New(t)
c.Assert(expandDefaultValues([]string{"a", ":default", "d"}, []string{"b", "c"}), qt.DeepEquals, []string{"a", "b", "c", "d"})
c.Assert(expandDefaultValues([]string{"a", "b", "c"}, []string{"a", "b", "c"}), qt.DeepEquals, []string{"a", "b", "c"})
c.Assert(expandDefaultValues([]string{":default", "a", ":default", "d"}, []string{"b", "c"}), qt.DeepEquals, []string{"b", "c", "a", "b", "c", "d"})
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pagemeta/page_frontmatter.go | resources/page/pagemeta/page_frontmatter.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pagemeta
import (
"errors"
"fmt"
"path"
"strings"
"time"
"github.com/gohugoio/hugo/common/hreflect"
"github.com/gohugoio/hugo/common/hstrings"
"github.com/gohugoio/hugo/common/htime"
"github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/hugolib/sitesmatrix"
"github.com/gohugoio/hugo/markup"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/output"
"github.com/gohugoio/hugo/resources/kinds"
"github.com/gohugoio/hugo/resources/page"
"github.com/gohugoio/hugo/resources/resource"
"github.com/mitchellh/mapstructure"
"github.com/gohugoio/hugo/config"
"github.com/spf13/cast"
)
type DatesStrings struct {
Date string `json:"date"`
Lastmod string `json:"lastMod"`
PublishDate string `json:"publishDate"`
ExpiryDate string `json:"expiryDate"`
}
type Dates struct {
Date time.Time
Lastmod time.Time
PublishDate time.Time
ExpiryDate time.Time
}
func (d Dates) String() string {
fmtDate := func(t time.Time) string {
if t.IsZero() {
return "<zero>"
}
return t.Format(time.RFC3339)
}
return fmt.Sprintf("Date: %s, Lastmod: %s, PublishDate: %s, ExpiryDate: %s",
fmtDate(d.Date), fmtDate(d.Lastmod), fmtDate(d.PublishDate), fmtDate(d.ExpiryDate))
}
func (d Dates) IsDateOrLastModAfter(in Dates) bool {
return d.Date.After(in.Date) || d.Lastmod.After(in.Lastmod)
}
func (d *Dates) UpdateDateAndLastmodAndPublishDateIfAfter(in Dates) {
if in.Date.After(d.Date) {
d.Date = in.Date
}
if in.Lastmod.After(d.Lastmod) {
d.Lastmod = in.Lastmod
}
if in.PublishDate.After(d.PublishDate) && in.PublishDate.Before(htime.Now()) {
d.PublishDate = in.PublishDate
}
}
func (d Dates) IsAllDatesZero() bool {
return d.Date.IsZero() && d.Lastmod.IsZero() && d.PublishDate.IsZero() && d.ExpiryDate.IsZero()
}
const (
pageMetaKeySites = "sites"
pageMetaKeyMarkup = "markup"
)
func (pcfg *PageConfigEarly) SetMetaPreFromMap(ext string, frontmatter map[string]any, logger loggers.Logger, conf config.AllProvider) error {
if frontmatter != nil {
if err := pcfg.setFromFrontMatter(frontmatter); err != nil {
return err
}
}
return pcfg.resolveContentType(ext, conf.GetConfigSection("mediaTypes").(media.Types))
}
func (pcfg *PageConfigEarly) setFromFrontMatter(frontmatter map[string]any) error {
// Needed for case insensitive fetching of params values.
maps.PrepareParams(frontmatter)
pcfg.Frontmatter = frontmatter
if v, found := frontmatter[pageMetaKeyMarkup]; found {
pcfg.Content.Markup = cast.ToString(v)
}
if v, found := frontmatter[pageMetaKeySites]; found {
if err := mapstructure.WeakDecode(v, &pcfg.Sites); err != nil {
return fmt.Errorf("failed to decode sites from front matter: %w", err)
}
}
return nil
}
func (p *PageConfigEarly) setCascadeEarlyValueIfNotSet(key string, value any) (done bool) {
switch key {
case pageMetaKeySites:
p.Sites.SetFromParamsIfNotSet(value.(maps.Params))
}
return !p.Sites.Matrix.IsZero()
}
// Page config that needs to be set early.
type PageConfigEarly struct {
Kind string // The kind of page, e.g. "page", "section", "home" etc. This is usually derived from the content path.
Path string // The canonical path to the page, e.g. /sect/mypage. Note: Leading slash, no trailing slash, no extensions or language identifiers.
SourceEntryHash uint64 // The source entry hash for content adapters.
Sites sitesmatrix.Sites
Content Source // Content holds the content for this page.
Frontmatter maps.Params `mapstructure:"-" json:"-"` // The original front matter or content adapter map.
// Compiled values.
SitesMatrixAndComplements `mapstructure:"-" json:"-"`
IsFromContentAdapter bool `mapstructure:"-" json:"-"`
ContentMediaType media.Type `mapstructure:"-" json:"-"`
}
// PageConfig configures a Page, typically from front matter.
// Note that all the top level fields are reserved Hugo keywords.
// Any custom configuration needs to be set in the Params map.
type PageConfigLate struct {
Dates Dates `json:"-"` // Dates holds the four core dates for this page.
DatesStrings
Params maps.Params // User defined params.
Title string // The title of the page.
LinkTitle string // The link title of the page.
Type string // The content type of the page.
Layout string // The layout to use for to render this page.
Weight int // The weight of the page, used in sorting if set to a non-zero value.
URL string // The URL to the rendered page, e.g. /sect/mypage.html.
Slug string // The slug for this page.
Description string // The description for this page.
Summary string // The summary for this page.
Draft bool // Whether or not the content is a draft.
Headless bool `json:"-"` // Whether or not the page should be rendered.
IsCJKLanguage bool // Whether or not the content is in a CJK language.
TranslationKey string // The translation key for this page.
Keywords []string // The keywords for this page.
Aliases []string // The aliases for this page.
Outputs []string // The output formats to render this page in. If not set, the site's configured output formats for this page kind will be used.
FrontMatterOnlyValues `mapstructure:"-" json:"-"`
Sitemap config.SitemapConfig
Build BuildConfig
Menus any // Can be a string, []string or map[string]any.
// Set only for pages created from data files.
ContentAdapterData map[string]any `mapstructure:"-" json:"-"`
// Compiled values.
ConfiguredOutputFormats output.Formats `mapstructure:"-" json:"-"`
}
// SitesMatrixAndComplements holds a sites matrix and a sites complements configuration.
type SitesMatrixAndComplements struct {
SitesMatrix sitesmatrix.VectorStore `mapstructure:"-" json:"-"`
SitesComplements sitesmatrix.VectorStore `mapstructure:"-" json:"-"`
}
// MatchSiteVector checks whether the site vector matches the sites matrix.
func (p *SitesMatrixAndComplements) MatchSiteVector(siteVector sitesmatrix.Vector) bool {
return p.SitesMatrix.HasAnyVector(siteVector)
}
// MatchLanguageCoarse checks whether the language dimension matches either
// the sites matrix or the sites complements.
func (p *SitesMatrixAndComplements) MatchLanguageCoarse(siteVector sitesmatrix.Vector) bool {
i := siteVector.Language()
return p.SitesMatrix.HasLanguage(i) || p.SitesComplements.HasLanguage(i)
}
// MatchRoleCoarse checks whether the role dimension matches either
// the sites matrix or the sites complements.
func (p *SitesMatrixAndComplements) MatchRoleCoarse(siteVector sitesmatrix.Vector) bool {
i := siteVector.Role()
return p.SitesMatrix.HasRole(i) || p.SitesComplements.HasRole(i)
}
// MatchVersionCoarse checks whether the version dimension matches either
// the sites matrix or the sites complements.
func (p *SitesMatrixAndComplements) MatchVersionCoarse(siteVector sitesmatrix.Vector) bool {
i := siteVector.Version()
return p.SitesMatrix.HasVersion(i) || p.SitesComplements.HasVersion(i)
}
func DefaultPageConfig() *PageConfigLate {
return &PageConfigLate{
Build: DefaultBuildConfig,
}
}
func (p *PageConfigEarly) Init(pagesFromData bool) error {
if pagesFromData {
p.Path = strings.TrimPrefix(p.Path, "/")
if p.Path == "" && p.Kind != kinds.KindHome {
return fmt.Errorf("empty path is reserved for the home page")
}
if p.Content.Markup != "" {
return errors.New("markup must not be set, use mediaType")
}
}
return nil
}
func (p *PageConfigLate) Init() error {
return nil
}
func buildSitesComplementsFromSitesConfig(
conf config.AllProvider,
fim *hugofs.FileMeta,
sitesConfig sitesmatrix.Sites,
) sitesmatrix.VectorStore {
if sitesConfig.Complements.IsZero() {
if fim != nil && fim.SitesComplements != nil {
return fim.SitesComplements
}
return sitesmatrix.NilStore
}
intsetsCfg := sitesmatrix.IntSetsConfig{
Globs: sitesConfig.Complements,
}
sitesComplements := sitesmatrix.NewIntSetsBuilder(conf.ConfiguredDimensions()).WithConfig(intsetsCfg)
if fim != nil && fim.SitesComplements != nil {
sitesComplements.WithDimensionsFromOtherIfNotSet(fim.SitesComplements)
}
return sitesComplements.Build()
}
func buildSitesMatrixFromSitesConfig(
conf config.AllProvider,
sitesMatrixBase sitesmatrix.VectorIterator,
sitesConfig sitesmatrix.Sites,
) sitesmatrix.VectorStore {
if sitesConfig.Matrix.IsZero() && sitesMatrixBase != nil {
if sitesMatrixBase.LenVectors() == 1 {
return conf.ConfiguredDimensions().GetOrCreateSingleVectorStore(sitesMatrixBase.VectorSample())
}
return sitesmatrix.VectorIteratorToStore(sitesMatrixBase)
}
if conf.ConfiguredDimensions().IsSingleVector() {
return conf.ConfiguredDimensions().CommonSitesMatrix.DefaultSite
}
intsetsCfg := sitesmatrix.IntSetsConfig{
Globs: sitesConfig.Matrix,
}
sitesMatrixPage := sitesmatrix.NewIntSetsBuilder(conf.ConfiguredDimensions()).WithConfig(intsetsCfg)
if sitesMatrixBase != nil {
sitesMatrixPage.WithDimensionsFromOtherIfNotSet(sitesMatrixBase)
}
sitesMatrixPage.WithDefaultsIfNotSet()
matrix := sitesMatrixPage.Build()
return matrix
}
func (p *PageConfigEarly) CompileEarly(pi *paths.Path, cascades *page.PageMatcherParamsConfigs,
conf config.AllProvider, fim *hugofs.FileMeta, sitesMatrixBase sitesmatrix.VectorIterator, sitesMatrixBaseOnly bool,
) error {
// First apply the cascades with no site filtering.
for cascade := range cascades.All() {
if cascade.Target.SitesMatrixCompiled != nil {
continue
}
if !cascade.Target.Match(p.Kind, pi.Base(), conf.Environment(), p.SitesMatrix) {
continue
}
for ck, cv := range cascade.Fields {
if done := p.setCascadeEarlyValueIfNotSet(ck, cv); done {
break
}
}
}
if sitesMatrixBaseOnly {
p.SitesMatrix = sitesmatrix.VectorIteratorToStore(sitesMatrixBase)
} else {
p.SitesMatrix = buildSitesMatrixFromSitesConfig(
conf,
sitesMatrixBase,
p.Sites,
)
}
// Finally apply the cascades with site filtering.
// These may also contain site specific settings,
// so preserve the original matrix for comparison.
sitesMatrixBefore := p.Sites.Matrix
var hadCascadeMatch bool
for cascade := range cascades.All() {
if cascade.Target.SitesMatrixCompiled == nil {
continue
}
if !cascade.Target.Match(p.Kind, pi.Base(), conf.Environment(), p.SitesMatrix) {
continue
}
hadCascadeMatch = true
for ck, cv := range cascade.Fields {
if done := p.setCascadeEarlyValueIfNotSet(ck, cv); done {
break
}
}
}
if hadCascadeMatch && !sitesMatrixBaseOnly && !sitesMatrixBefore.Equal(p.Sites.Matrix) {
// Matrix has changed, rebuild.
p.SitesMatrix = buildSitesMatrixFromSitesConfig(
conf,
sitesMatrixBase,
p.Sites,
)
}
p.SitesComplements = buildSitesComplementsFromSitesConfig(
conf,
fim,
p.Sites,
)
mediaTypes := conf.GetConfigSection("mediaTypes").(media.Types)
if err := p.resolveContentType(pi.Ext(), mediaTypes); err != nil {
return err
}
return nil
}
func (p *PageConfigEarly) CompileForPagesFromDataPre(basePath string, logger loggers.Logger, mediaTypes media.Types) error {
// In content adapters, we always get relative paths.
if basePath != "" {
p.Path = path.Join(basePath, p.Path)
}
if p.Kind == "" {
p.Kind = kinds.KindPage
}
// Note that NormalizePathStringBasic will make sure that we don't preserve the unnormalized path.
// We do that when we create pages from the file system; mostly for backward compatibility,
// but also because people tend to use use the filename to name their resources (with spaces and all),
// and this isn't relevant when creating resources from an API where it's easy to add textual meta data.
p.Path = paths.NormalizePathStringBasic(p.Path)
return p.resolveContentType("", mediaTypes)
}
func (p *PageConfigEarly) resolveContentType(ext string, mediaTypes media.Types) error {
if p.Content.Markup == "" && p.Content.MediaType == "" {
if ext == "" {
ext = "md"
}
p.ContentMediaType = MarkupToMediaType(ext, mediaTypes)
if p.ContentMediaType.IsZero() {
return fmt.Errorf("failed to resolve media type for suffix %q", ext)
}
}
var s string
if p.ContentMediaType.IsZero() {
if p.Content.MediaType != "" {
s = p.Content.MediaType
p.ContentMediaType, _ = mediaTypes.GetByType(s)
}
if p.ContentMediaType.IsZero() && p.Content.Markup != "" {
s = p.Content.Markup
p.ContentMediaType = MarkupToMediaType(s, mediaTypes)
}
}
if p.ContentMediaType.IsZero() {
return fmt.Errorf("failed to resolve media type for %q", s)
}
if p.Content.Markup == "" {
p.Content.Markup = p.ContentMediaType.SubType
}
return nil
}
// Compile sets up the page configuration after all fields have been set.
func (p *PageConfigLate) Compile(e *PageConfigEarly, logger loggers.Logger, outputFormats output.Formats) error {
if e.IsFromContentAdapter {
if err := mapstructure.WeakDecode(p.ContentAdapterData, p); err != nil {
err = fmt.Errorf("failed to decode page map: %w", err)
return err
}
}
if p.Params == nil {
p.Params = make(maps.Params)
} else {
maps.PrepareParams(p.Params)
}
if len(p.Outputs) > 0 {
outFormats, err := outputFormats.GetByNames(p.Outputs...)
if err != nil {
return fmt.Errorf("failed to resolve output formats %v: %w", p.Outputs, err)
} else {
p.ConfiguredOutputFormats = outFormats
}
}
return nil
}
// MarkupToMediaType converts a markup string to a media type.
func MarkupToMediaType(s string, mediaTypes media.Types) media.Type {
s = strings.ToLower(s)
mt, _ := mediaTypes.GetBestMatch(markup.ResolveMarkup(s))
return mt
}
type ResourceConfig struct {
Path string
Name string
Title string
Params maps.Params
Content Source
Sites sitesmatrix.Sites
// Compiled values.
ContentAdapterSourceEntryHash uint64 `mapstructure:"-" json:"-"`
PathInfo *paths.Path `mapstructure:"-" json:"-"`
ContentMediaType media.Type `mapstructure:"-" json:"-"`
SitesMatrixAndComplements `mapstructure:"-" json:"-"`
}
func (rc *ResourceConfig) Validate() error {
if rc.Content.Markup != "" {
return errors.New("markup must not be set, use mediaType")
}
return nil
}
func (rc *ResourceConfig) Compile(basePath string, fim hugofs.FileMetaInfo, conf config.AllProvider, mediaTypes media.Types) error {
if rc.Params != nil {
maps.PrepareParams(rc.Params)
}
// Note that NormalizePathStringBasic will make sure that we don't preserve the unnormalized path.
// We do that when we create resources from the file system; mostly for backward compatibility,
// but also because people tend to use use the filename to name their resources (with spaces and all),
// and this isn't relevant when creating resources from an API where it's easy to add textual meta data.
rc.Path = paths.NormalizePathStringBasic(path.Join(basePath, rc.Path))
rc.PathInfo = conf.PathParser().Parse(files.ComponentFolderContent, rc.Path)
if rc.Content.MediaType != "" {
var found bool
rc.ContentMediaType, found = mediaTypes.GetByType(rc.Content.MediaType)
if !found {
return fmt.Errorf("media type %q not found", rc.Content.MediaType)
}
}
var sitesMatrixFile sitesmatrix.VectorStore
if fim != nil {
sitesMatrixFile = fim.Meta().SitesMatrix
}
rc.SitesMatrix = buildSitesMatrixFromSitesConfig(
conf,
sitesMatrixFile,
rc.Sites,
)
rc.SitesComplements = buildSitesComplementsFromSitesConfig(
conf,
fim.Meta(),
rc.Sites,
)
return nil
}
type Source struct {
// MediaType is the media type of the content.
MediaType string
// The markup used in Value. Only used in front matter.
Markup string
// The content.
Value any
}
func (s Source) IsZero() bool {
return !hreflect.IsTruthful(s.Value)
}
func (s Source) IsResourceValue() bool {
_, ok := s.Value.(resource.Resource)
return ok
}
func (s Source) ValueAsString() string {
if s.Value == nil {
return ""
}
ss, err := cast.ToStringE(s.Value)
if err != nil {
panic(fmt.Errorf("content source: failed to convert %T to string: %s", s.Value, err))
}
return ss
}
func (s Source) ValueAsOpenReadSeekCloser() hugio.OpenReadSeekCloser {
return hugio.NewOpenReadSeekCloser(hugio.NewReadSeekerNoOpCloserFromString(s.ValueAsString()))
}
// FrontMatterOnlyValues holds values that can only be set via front matter.
type FrontMatterOnlyValues struct {
ResourcesMeta []map[string]any
}
// FrontMatterHandler maps front matter into Page fields and .Params.
// Note that we currently have only extracted the date logic.
type FrontMatterHandler struct {
fmConfig FrontmatterConfig
contentAdapterDatesHandler func(d *FrontMatterDescriptor) error
dateHandler frontMatterFieldHandler
lastModHandler frontMatterFieldHandler
publishDateHandler frontMatterFieldHandler
expiryDateHandler frontMatterFieldHandler
// A map of all date keys configured, including any custom.
allDateKeys map[string]bool
logger loggers.Logger
}
// FrontMatterDescriptor describes how to handle front matter for a given Page.
// It has pointers to values in the receiving page which gets updated.
type FrontMatterDescriptor struct {
// This is the Page's base filename (BaseFilename), e.g. page.md., or
// if page is a leaf bundle, the bundle folder name (ContentBaseName).
BaseFilename string
// The Page's path if the page is backed by a file, else its title.
PathOrTitle string
// The content file's mod time.
ModTime time.Time
// May be set from the author date in Git.
GitAuthorDate time.Time
PageConfigEarly *PageConfigEarly
// The below will be modified.
PageConfigLate *PageConfigLate
Location *time.Location // The Location to use to parse dates without time zone info.
}
var dateFieldAliases = map[string][]string{
fmDate: {},
fmLastmod: {"modified"},
fmPubDate: {"pubdate", "published"},
fmExpiryDate: {"unpublishdate"},
}
// HandleDates updates all the dates given the current configuration and the
// supplied front matter params. Note that this requires all lower-case keys
// in the params map.
func (f FrontMatterHandler) HandleDates(d *FrontMatterDescriptor) error {
if d.PageConfigLate == nil {
panic("missing pageConfig")
}
if d.PageConfigEarly.IsFromContentAdapter {
if f.contentAdapterDatesHandler == nil {
panic("missing content adapter date handler")
}
return f.contentAdapterDatesHandler(d)
}
if f.dateHandler == nil {
panic("missing date handler")
}
if _, err := f.dateHandler(d); err != nil {
return err
}
if _, err := f.lastModHandler(d); err != nil {
return err
}
if _, err := f.publishDateHandler(d); err != nil {
return err
}
if _, err := f.expiryDateHandler(d); err != nil {
return err
}
return nil
}
// IsDateKey returns whether the given front matter key is considered a date by the current
// configuration.
func (f FrontMatterHandler) IsDateKey(key string) bool {
return f.allDateKeys[key]
}
// dateAndSlugFromBaseFilename returns a time.Time value (resolved to the
// default system location) and a slug, extracted by parsing the provided path.
// Parsing supports YYYY-MM-DD-HH-MM-SS and YYYY-MM-DD date/time formats.
// Within the YYYY-MM-DD-HH-MM-SS format, the date and time values may be
// separated by any character including a space (e.g., YYYY-MM-DD HH-MM-SS).
func dateAndSlugFromBaseFilename(location *time.Location, path string) (time.Time, string) {
base, _ := paths.FileAndExt(path)
if len(base) < 10 {
// Not long enough to start with a YYYY-MM-DD date.
return time.Time{}, ""
}
// Delimiters allowed between the date and the slug.
delimiters := " -_"
if len(base) >= 19 {
// Attempt to parse a YYYY-MM-DD-HH-MM-SS date-time prefix.
ds := base[:10]
ts := strings.ReplaceAll(base[11:19], "-", ":")
d, err := htime.ToTimeInDefaultLocationE(ds+"T"+ts, location)
if err == nil {
return d, strings.Trim(base[19:], delimiters)
}
}
// Attempt to parse a YYYY-MM-DD date prefix.
ds := base[:10]
d, err := htime.ToTimeInDefaultLocationE(ds, location)
if err == nil {
return d, strings.Trim(base[10:], delimiters)
}
// If no date is defined, return the zero time instant.
return time.Time{}, ""
}
type frontMatterFieldHandler func(d *FrontMatterDescriptor) (bool, error)
func (f FrontMatterHandler) newChainedFrontMatterFieldHandler(handlers ...frontMatterFieldHandler) frontMatterFieldHandler {
return func(d *FrontMatterDescriptor) (bool, error) {
for _, h := range handlers {
// First successful handler wins.
success, err := h(d)
if err != nil {
f.logger.Errorln(err)
} else if success {
return true, nil
}
}
return false, nil
}
}
type FrontmatterConfig struct {
// Controls how the Date is set from front matter.
Date []string
// Controls how the Lastmod is set from front matter.
Lastmod []string
// Controls how the PublishDate is set from front matter.
PublishDate []string
// Controls how the ExpiryDate is set from front matter.
ExpiryDate []string
}
const (
// These are all the date handler identifiers
// All identifiers not starting with a ":" maps to a front matter parameter.
fmDate = "date"
fmPubDate = "publishdate"
fmLastmod = "lastmod"
fmExpiryDate = "expirydate"
// Gets date from filename, e.g 218-02-22-mypage.md
fmFilename = ":filename"
// Gets date from file OS mod time.
fmModTime = ":filemodtime"
// Gets date from Git
fmGitAuthorDate = ":git"
)
// This is the config you get when doing nothing.
func newDefaultFrontmatterConfig() FrontmatterConfig {
return FrontmatterConfig{
Date: []string{fmDate, fmPubDate, fmLastmod},
Lastmod: []string{fmGitAuthorDate, fmLastmod, fmDate, fmPubDate},
PublishDate: []string{fmPubDate, fmDate},
ExpiryDate: []string{fmExpiryDate},
}
}
func DecodeFrontMatterConfig(cfg config.Provider) (FrontmatterConfig, error) {
c := newDefaultFrontmatterConfig()
defaultConfig := c
if cfg.IsSet("frontmatter") {
fm := cfg.GetStringMap("frontmatter")
for k, v := range fm {
loki := strings.ToLower(k)
switch loki {
case fmDate:
c.Date = toLowerSlice(v)
case fmPubDate:
c.PublishDate = toLowerSlice(v)
case fmLastmod:
c.Lastmod = toLowerSlice(v)
case fmExpiryDate:
c.ExpiryDate = toLowerSlice(v)
}
}
}
expander := func(c, d []string) []string {
out := expandDefaultValues(c, d)
out = addDateFieldAliases(out)
return out
}
c.Date = expander(c.Date, defaultConfig.Date)
c.PublishDate = expander(c.PublishDate, defaultConfig.PublishDate)
c.Lastmod = expander(c.Lastmod, defaultConfig.Lastmod)
c.ExpiryDate = expander(c.ExpiryDate, defaultConfig.ExpiryDate)
return c, nil
}
func addDateFieldAliases(values []string) []string {
var complete []string
for _, v := range values {
complete = append(complete, v)
if aliases, found := dateFieldAliases[v]; found {
complete = append(complete, aliases...)
}
}
return hstrings.UniqueStringsReuse(complete)
}
func expandDefaultValues(values []string, defaults []string) []string {
var out []string
for _, v := range values {
if v == ":default" {
out = append(out, defaults...)
} else {
out = append(out, v)
}
}
return out
}
func toLowerSlice(in any) []string {
out := cast.ToStringSlice(in)
for i := range out {
out[i] = strings.ToLower(out[i])
}
return out
}
// NewFrontmatterHandler creates a new FrontMatterHandler with the given logger and configuration.
// If no logger is provided, one will be created.
func NewFrontmatterHandler(logger loggers.Logger, frontMatterConfig FrontmatterConfig) (FrontMatterHandler, error) {
if logger == nil {
logger = loggers.NewDefault()
}
allDateKeys := make(map[string]bool)
addKeys := func(vals []string) {
for _, k := range vals {
if !strings.HasPrefix(k, ":") {
allDateKeys[k] = true
}
}
}
addKeys(frontMatterConfig.Date)
addKeys(frontMatterConfig.ExpiryDate)
addKeys(frontMatterConfig.Lastmod)
addKeys(frontMatterConfig.PublishDate)
f := FrontMatterHandler{logger: logger, fmConfig: frontMatterConfig, allDateKeys: allDateKeys}
if err := f.createHandlers(); err != nil {
return f, err
}
return f, nil
}
func (f *FrontMatterHandler) createHandlers() error {
var err error
if f.contentAdapterDatesHandler, err = f.createContentAdapterDatesHandler(f.fmConfig); err != nil {
return err
}
if f.dateHandler, err = f.createDateHandler(f.fmConfig.Date,
func(d *FrontMatterDescriptor, t time.Time) {
d.PageConfigLate.Dates.Date = t
setParamIfNotSet(fmDate, t, d)
}); err != nil {
return err
}
if f.lastModHandler, err = f.createDateHandler(f.fmConfig.Lastmod,
func(d *FrontMatterDescriptor, t time.Time) {
setParamIfNotSet(fmLastmod, t, d)
d.PageConfigLate.Dates.Lastmod = t
}); err != nil {
return err
}
if f.publishDateHandler, err = f.createDateHandler(f.fmConfig.PublishDate,
func(d *FrontMatterDescriptor, t time.Time) {
setParamIfNotSet(fmPubDate, t, d)
d.PageConfigLate.Dates.PublishDate = t
}); err != nil {
return err
}
if f.expiryDateHandler, err = f.createDateHandler(f.fmConfig.ExpiryDate,
func(d *FrontMatterDescriptor, t time.Time) {
setParamIfNotSet(fmExpiryDate, t, d)
d.PageConfigLate.Dates.ExpiryDate = t
}); err != nil {
return err
}
return nil
}
func setParamIfNotSet(key string, value any, d *FrontMatterDescriptor) {
if _, found := d.PageConfigLate.Params[key]; found {
return
}
d.PageConfigLate.Params[key] = value
}
func (f FrontMatterHandler) createContentAdapterDatesHandler(fmcfg FrontmatterConfig) (func(d *FrontMatterDescriptor) error, error) {
setTime := func(key string, value time.Time, in *PageConfigLate) {
switch key {
case fmDate:
in.Dates.Date = value
case fmLastmod:
in.Dates.Lastmod = value
case fmPubDate:
in.Dates.PublishDate = value
case fmExpiryDate:
in.Dates.ExpiryDate = value
}
}
getTime := func(key string, in *PageConfigLate) time.Time {
switch key {
case fmDate:
return in.Dates.Date
case fmLastmod:
return in.Dates.Lastmod
case fmPubDate:
return in.Dates.PublishDate
case fmExpiryDate:
return in.Dates.ExpiryDate
}
return time.Time{}
}
createSetter := func(identifiers []string, date string) func(pcfg *PageConfigLate) {
var getTimes []func(in *PageConfigLate) time.Time
for _, identifier := range identifiers {
if strings.HasPrefix(identifier, ":") {
continue
}
switch identifier {
case fmDate:
getTimes = append(getTimes, func(in *PageConfigLate) time.Time {
return getTime(fmDate, in)
})
case fmLastmod:
getTimes = append(getTimes, func(in *PageConfigLate) time.Time {
return getTime(fmLastmod, in)
})
case fmPubDate:
getTimes = append(getTimes, func(in *PageConfigLate) time.Time {
return getTime(fmPubDate, in)
})
case fmExpiryDate:
getTimes = append(getTimes, func(in *PageConfigLate) time.Time {
return getTime(fmExpiryDate, in)
})
}
}
return func(pcfg *PageConfigLate) {
for _, get := range getTimes {
if t := get(pcfg); !t.IsZero() {
setTime(date, t, pcfg)
return
}
}
}
}
setDate := createSetter(fmcfg.Date, fmDate)
setLastmod := createSetter(fmcfg.Lastmod, fmLastmod)
setPublishDate := createSetter(fmcfg.PublishDate, fmPubDate)
setExpiryDate := createSetter(fmcfg.ExpiryDate, fmExpiryDate)
fn := func(d *FrontMatterDescriptor) error {
pcfg := d.PageConfigLate
setDate(pcfg)
setLastmod(pcfg)
setPublishDate(pcfg)
setExpiryDate(pcfg)
return nil
}
return fn, nil
}
func (f FrontMatterHandler) createDateHandler(identifiers []string, setter func(d *FrontMatterDescriptor, t time.Time)) (frontMatterFieldHandler, error) {
var h *frontmatterFieldHandlers
var handlers []frontMatterFieldHandler
for _, identifier := range identifiers {
switch identifier {
case fmFilename:
handlers = append(handlers, h.newDateFilenameHandler(setter))
case fmModTime:
handlers = append(handlers, h.newDateModTimeHandler(setter))
case fmGitAuthorDate:
handlers = append(handlers, h.newDateGitAuthorDateHandler(setter))
default:
handlers = append(handlers, h.newDateFieldHandler(identifier, setter))
}
}
return f.newChainedFrontMatterFieldHandler(handlers...), nil
}
type frontmatterFieldHandlers int
func (f *frontmatterFieldHandlers) newDateFieldHandler(key string, setter func(d *FrontMatterDescriptor, t time.Time)) frontMatterFieldHandler {
return func(d *FrontMatterDescriptor) (bool, error) {
v, found := d.PageConfigEarly.Frontmatter[key]
if found {
d.PageConfigLate.Params[key] = v
} else {
// Reentry from previous handlers.
v, found = d.PageConfigLate.Params[key]
}
if !found || v == "" || v == nil {
return false, nil
}
var date time.Time
if vt, ok := v.(time.Time); ok && vt.Location() == d.Location {
date = vt
} else {
var err error
date, err = htime.ToTimeInDefaultLocationE(v, d.Location)
if err != nil {
return false, fmt.Errorf("the %q front matter field is not a parsable date: see %s", key, d.PathOrTitle)
}
d.PageConfigLate.Params[key] = date
}
// We map several date keys to one, so, for example,
// "expirydate", "unpublishdate" will all set .ExpiryDate (first found).
setter(d, date)
return true, nil
}
}
func (f *frontmatterFieldHandlers) newDateFilenameHandler(setter func(d *FrontMatterDescriptor, t time.Time)) frontMatterFieldHandler {
return func(d *FrontMatterDescriptor) (bool, error) {
date, slug := dateAndSlugFromBaseFilename(d.Location, d.BaseFilename)
if date.IsZero() {
return false, nil
}
setter(d, date)
if _, found := d.PageConfigEarly.Frontmatter["slug"]; !found {
// Use slug from filename
d.PageConfigLate.Slug = slug
}
return true, nil
}
}
func (f *frontmatterFieldHandlers) newDateModTimeHandler(setter func(d *FrontMatterDescriptor, t time.Time)) frontMatterFieldHandler {
return func(d *FrontMatterDescriptor) (bool, error) {
if d.ModTime.IsZero() {
return false, nil
}
setter(d, d.ModTime)
return true, nil
}
}
func (f *frontmatterFieldHandlers) newDateGitAuthorDateHandler(setter func(d *FrontMatterDescriptor, t time.Time)) frontMatterFieldHandler {
return func(d *FrontMatterDescriptor) (bool, error) {
if d.GitAuthorDate.IsZero() {
return false, nil
}
setter(d, d.GitAuthorDate)
return true, nil
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/pagemeta/pagemeta_integration_test.go | resources/page/pagemeta/pagemeta_integration_test.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pagemeta_test
import (
"strings"
"testing"
"github.com/gohugoio/hugo/hugolib"
)
func TestLastModEq(t *testing.T) {
files := `
-- hugo.toml --
timeZone = "Europe/London"
-- content/p1.md --
---
title: p1
date: 2024-03-13T06:00:00
---
-- layouts/single.html --
Date: {{ .Date }}
Lastmod: {{ .Lastmod }}
Eq: {{ eq .Date .Lastmod }}
`
b := hugolib.Test(t, files)
b.AssertFileContent("public/p1/index.html", `
Date: 2024-03-13 06:00:00 +0000 GMT
Lastmod: 2024-03-13 06:00:00 +0000 GMT
Eq: true
`)
}
func TestDateValidation(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
disableKinds = ['page','rss','section','sitemap','taxonomy','term']
-- content/_index.md --
FRONT_MATTER
-- layouts/home.html --
{{ .Date.UTC.Format "2006-01-02" }}
--
`
errorMsg := `ERROR the "date" front matter field is not a parsable date`
// TOML: unquoted date/time (valid)
f := strings.ReplaceAll(files, "FRONT_MATTER", `
+++
date = 2024-10-01
+++
`)
b := hugolib.Test(t, f)
b.AssertFileContent("public/index.html", "2024-10-01")
// TOML: string (valid)
f = strings.ReplaceAll(files, "FRONT_MATTER", `
+++
date = "2024-10-01"
+++
`)
b = hugolib.Test(t, f)
b.AssertFileContent("public/index.html", "2024-10-01")
// TOML: empty string (valid)
f = strings.ReplaceAll(files, "FRONT_MATTER", `
+++
date = ""
+++
`)
b = hugolib.Test(t, f)
b.AssertFileContent("public/index.html", "0001-01-01")
// TOML: int (valid)
f = strings.ReplaceAll(files, "FRONT_MATTER", `
+++
date = 0
+++
`)
b = hugolib.Test(t, f)
b.AssertFileContent("public/index.html", "1970-01-01")
// TOML: string (invalid)
f = strings.ReplaceAll(files, "FRONT_MATTER", `
+++
date = "2024-42-42"
+++
`)
b, _ = hugolib.TestE(t, f)
b.AssertLogContains(errorMsg)
// TOML: bool (invalid)
f = strings.ReplaceAll(files, "FRONT_MATTER", `
+++
date = true
+++
`)
b, _ = hugolib.TestE(t, f)
b.AssertLogContains(errorMsg)
// TOML: float (invalid)
f = strings.ReplaceAll(files, "FRONT_MATTER", `
+++
date = 6.7
+++
`)
b, _ = hugolib.TestE(t, f)
b.AssertLogContains(errorMsg)
// JSON: null (valid)
f = strings.ReplaceAll(files, "FRONT_MATTER", `
{
"date": null
}
`)
b = hugolib.Test(t, f)
b.AssertFileContent("public/index.html", "0001-01-01")
// YAML: null (valid)
f = strings.ReplaceAll(files, "FRONT_MATTER", `
---
date:
---
`)
b = hugolib.Test(t, f)
b.AssertFileContent("public/index.html", "0001-01-01")
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/page/siteidentities/identities.go | resources/page/siteidentities/identities.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package siteidentities
import (
"github.com/gohugoio/hugo/identity"
)
const (
// Identifies site.Data.
// The change detection in /data is currently very coarse grained.
Data = identity.StringIdentity("site.Data")
)
// FromString returns the identity from the given string,
// or identity.Anonymous if not found.
func FromString(name string) (identity.Identity, bool) {
switch name {
case "Data":
return Data, true
}
return identity.Anonymous, false
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/text.go | resources/images/text.go | // Copyright 2021 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images
import (
"image"
"image/color"
"image/draw"
"io"
"strings"
"github.com/disintegration/gift"
"github.com/gohugoio/hugo/common/hugio"
"golang.org/x/image/font"
"golang.org/x/image/font/gofont/goregular"
"golang.org/x/image/font/opentype"
"golang.org/x/image/math/fixed"
)
var _ gift.Filter = (*textFilter)(nil)
type textFilter struct {
text string
color color.Color
x, y int
alignx string
aligny string
size float64
linespacing int
fontSource hugio.ReadSeekCloserProvider
}
func (f textFilter) Draw(dst draw.Image, src image.Image, options *gift.Options) {
// Load and parse font
ttf := goregular.TTF
if f.fontSource != nil {
rs, err := f.fontSource.ReadSeekCloser()
if err != nil {
panic(err)
}
defer rs.Close()
ttf, err = io.ReadAll(rs)
if err != nil {
panic(err)
}
}
otf, err := opentype.Parse(ttf)
if err != nil {
panic(err)
}
// Set font options
face, err := opentype.NewFace(otf, &opentype.FaceOptions{
Size: f.size,
DPI: 72,
Hinting: font.HintingNone,
})
if err != nil {
panic(err)
}
d := font.Drawer{
Dst: dst,
Src: image.NewUniform(f.color),
Face: face,
}
gift.New().Draw(dst, src)
maxWidth := dst.Bounds().Dx() - 20
var availableWidth int
switch f.alignx {
case "right":
availableWidth = f.x
case "center":
availableWidth = min((maxWidth-f.x), f.x) * 2
case "left":
availableWidth = maxWidth - f.x
}
fontHeight := face.Metrics().Ascent.Ceil()
// Calculate lines, consider and include linebreaks
finalLines := []string{}
f.text = strings.ReplaceAll(f.text, "\r", "")
for line := range strings.SplitSeq(f.text, "\n") {
currentLine := ""
// Break each line at the maximum width.
for str := range strings.FieldsSeq(line) {
fieldStrWidth := font.MeasureString(face, str)
currentLineStrWidth := font.MeasureString(face, currentLine)
if (currentLineStrWidth.Ceil() + fieldStrWidth.Ceil()) >= availableWidth {
finalLines = append(finalLines, currentLine)
currentLine = ""
}
currentLine += str + " "
}
finalLines = append(finalLines, currentLine)
}
// Total height of the text from the top of the first line to the baseline of the last line
totalHeight := len(finalLines)*fontHeight + (len(finalLines)-1)*f.linespacing
// Correct y position based on font and size
y := f.y + fontHeight
switch f.aligny {
case "top":
// Do nothing
case "center":
y = y - totalHeight/2
case "bottom":
y = y - totalHeight
}
// Draw text line by line
for _, line := range finalLines {
line = strings.TrimSpace(line)
strWidth := font.MeasureString(face, line)
var x int
switch f.alignx {
case "right":
x = f.x - strWidth.Ceil()
case "center":
x = f.x - (strWidth.Ceil() / 2)
case "left":
x = f.x
}
d.Dot = fixed.P(x, y)
d.DrawString(line)
y = y + fontHeight + f.linespacing
}
}
func (f textFilter) Bounds(srcBounds image.Rectangle) image.Rectangle {
return image.Rect(0, 0, srcBounds.Dx(), srcBounds.Dy())
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/images_golden_integration_test.go | resources/images/images_golden_integration_test.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images_test
import (
_ "image/jpeg"
"strings"
"testing"
"github.com/gohugoio/hugo/resources/images/imagetesting"
)
const goldenProcess = `
{{ define "process"}}
{{ $img := .img.Process .spec }}
{{ $ext := path.Ext $img.RelPermalink }}
{{ $name := printf "images/%s%s" (.spec | anchorize) $ext }}
{{ with $img | resources.Copy $name }}
{{ .Publish }}
{{ end }}
{{ end }}
`
// Note, if you're enabling writeGoldenFiles on a MacOS ARM 64 you need to run the test with GOARCH=amd64, e.g.
func TestImagesGoldenFiltersMisc(t *testing.T) {
t.Parallel()
if imagetesting.SkipGoldenTests {
t.Skip("Skip golden test on this architecture")
}
// Will be used as the base folder for generated images.
name := "filters/misc"
files := `
-- hugo.toml --
-- assets/rotate270.jpg --
sourcefilename: ../testdata/exif/orientation6.jpg
-- assets/sunset.jpg --
sourcefilename: ../testdata/sunset.jpg
-- assets/gopher.png --
sourcefilename: ../testdata/gopher-hero8.png
-- layouts/home.html --
Home.
{{ $sunset := (resources.Get "sunset.jpg").Resize "x300" }}
{{ $sunsetGrayscale := $sunset.Filter (images.Grayscale) }}
{{ $gopher := (resources.Get "gopher.png").Resize "x80" }}
{{ $overlayFilter := images.Overlay $gopher 20 20 }}
{{ $textOpts := dict
"color" "#fbfaf5"
"linespacing" 8
"size" 40
"x" 25
"y" 190
}}
{{/* These are sorted. */}}
{{ template "filters" (dict "name" "brightness-40.jpg" "img" $sunset "filters" (images.Brightness 40)) }}
{{ template "filters" (dict "name" "contrast-50.jpg" "img" $sunset "filters" (images.Contrast 50)) }}
{{ template "filters" (dict "name" "dither-default.jpg" "img" $sunset "filters" (images.Dither)) }}
{{ template "filters" (dict "name" "gamma-1.667.jpg" "img" $sunset "filters" (images.Gamma 1.667)) }}
{{ template "filters" (dict "name" "gaussianblur-5.jpg" "img" $sunset "filters" (images.GaussianBlur 5)) }}
{{ template "filters" (dict "name" "grayscale.jpg" "img" $sunset "filters" (images.Grayscale)) }}
{{ template "filters" (dict "name" "grayscale+colorize-180-50-20.jpg" "img" $sunset "filters" (slice images.Grayscale (images.Colorize 180 50 20))) }}
{{ template "filters" (dict "name" "colorbalance-180-50-20.jpg" "img" $sunset "filters" (images.ColorBalance 180 50 20)) }}
{{ template "filters" (dict "name" "hue--15.jpg" "img" $sunset "filters" (images.Hue -15)) }}
{{ template "filters" (dict "name" "invert.jpg" "img" $sunset "filters" (images.Invert)) }}
{{ template "filters" (dict "name" "opacity-0.65.jpg" "img" $sunset "filters" (images.Opacity 0.65)) }}
{{ template "filters" (dict "name" "overlay-20-20.jpg" "img" $sunset "filters" ($overlayFilter)) }}
{{ template "filters" (dict "name" "padding-20-40-#976941.jpg" "img" $sunset "filters" (images.Padding 20 40 "#976941" )) }}
{{ template "filters" (dict "name" "pixelate-10.jpg" "img" $sunset "filters" (images.Pixelate 10)) }}
{{ template "filters" (dict "name" "rotate270.jpg" "img" (resources.Get "rotate270.jpg") "filters" images.AutoOrient) }}
{{ template "filters" (dict "name" "saturation-65.jpg" "img" $sunset "filters" (images.Saturation 65)) }}
{{ template "filters" (dict "name" "sepia-80.jpg" "img" $sunsetGrayscale "filters" (images.Sepia 80)) }}
{{ template "filters" (dict "name" "sigmoid-0.6--4.jpg" "img" $sunset "filters" (images.Sigmoid 0.6 -4 )) }}
{{ template "filters" (dict "name" "text.jpg" "img" $sunset "filters" (images.Text "Hugo Rocks!" $textOpts )) }}
{{ template "filters" (dict "name" "unsharpmask.jpg" "img" $sunset "filters" (images.UnsharpMask 10 0.4 0.03)) }}
{{ define "filters"}}
{{ if lt (len (path.Ext .name)) 4 }}
{{ errorf "No extension in %q" .name }}
{{ end }}
{{ $img := .img.Filter .filters }}
{{ $name := printf "images/%s" .name }}
{{ with $img | resources.Copy $name }}
{{ .Publish }}
{{ end }}
{{ end }}
`
opts := imagetesting.DefaultGoldenOpts
opts.T = t
opts.Name = name
opts.Files = files
imagetesting.RunGolden(opts)
}
func TestImagesGoldenFiltersMask(t *testing.T) {
t.Parallel()
if imagetesting.SkipGoldenTests {
t.Skip("Skip golden test on this architecture")
}
// Will be used as the base folder for generated images.
name := "filters/mask"
files := `
-- hugo.toml --
[imaging]
bgColor = '#ebcc34'
hint = 'photo'
quality = 75
resampleFilter = 'Lanczos'
-- assets/sunset.jpg --
sourcefilename: ../testdata/sunset.jpg
-- assets/mask.png --
sourcefilename: ../testdata/mask.png
-- layouts/home.html --
Home.
{{ $sunset := resources.Get "sunset.jpg" }}
{{ $mask := resources.Get "mask.png" }}
{{ template "mask" (dict "name" "transparant.png" "base" $sunset "mask" $mask) }}
{{ template "mask" (dict "name" "yellow.jpg" "base" $sunset "mask" $mask) }}
{{ template "mask" (dict "name" "wide.jpg" "base" $sunset "mask" $mask "spec" "resize 600x200") }}
{{/* This looks a little odd, but is correct and the recommended way to do this.
This will 1. Scale the image to x300, 2. Apply the mask, 3. Create the final image with background color #323ea.
It's possible to have multiple images.Process filters in the chain, but for the options for the final image (target format, bgGolor etc.),
the last entry will win.
*/}}
{{ template "mask" (dict "name" "blue.jpg" "base" $sunset "mask" $mask "spec" "resize x300 #323ea8") }}
{{ define "mask"}}
{{ $ext := path.Ext .name }}
{{ if lt (len (path.Ext .name)) 4 }}
{{ errorf "No extension in %q" .name }}
{{ end }}
{{ $format := strings.TrimPrefix "." $ext }}
{{ $spec := .spec | default (printf "resize x300 %s" $format) }}
{{ $filters := slice (images.Process $spec) (images.Mask .mask) }}
{{ $name := printf "images/%s" .name }}
{{ $img := .base.Filter $filters }}
{{ with $img | resources.Copy $name }}
{{ .Publish }}
{{ end }}
{{ end }}
`
opts := imagetesting.DefaultGoldenOpts
opts.T = t
opts.Name = name
opts.Files = files
imagetesting.RunGolden(opts)
}
// Issue 13272, 13273.
func TestImagesGoldenFiltersMaskCacheIssues(t *testing.T) {
if imagetesting.SkipGoldenTests {
t.Skip("Skip golden test on this architecture")
}
// Will be used as the base folder for generated images.
name := "filters/mask2"
files := `
-- hugo.toml --
[caches]
[caches.images]
dir = ':cacheDir/golden_images'
maxAge = "30s"
[imaging]
bgColor = '#33ff44'
hint = 'photo'
quality = 75
resampleFilter = 'Lanczos'
-- assets/sunset.jpg --
sourcefilename: ../testdata/sunset.jpg
-- assets/mask.png --
sourcefilename: ../testdata/mask.png
-- layouts/home.html --
Home.
{{ $sunset := resources.Get "sunset.jpg" }}
{{ $mask := resources.Get "mask.png" }}
{{ template "mask" (dict "name" "green.jpg" "base" $sunset "mask" $mask) }}
{{ define "mask"}}
{{ $ext := path.Ext .name }}
{{ if lt (len (path.Ext .name)) 4 }}
{{ errorf "No extension in %q" .name }}
{{ end }}
{{ $format := strings.TrimPrefix "." $ext }}
{{ $spec := .spec | default (printf "resize x300 %s" $format) }}
{{ $filters := slice (images.Process $spec) (images.Mask .mask) }}
{{ $name := printf "images/%s" .name }}
{{ $img := .base.Filter $filters }}
{{ with $img | resources.Copy $name }}
{{ .Publish }}
{{ end }}
{{ end }}
`
tempDir := t.TempDir()
opts := imagetesting.DefaultGoldenOpts
opts.WorkingDir = tempDir
opts.T = t
opts.Name = name
opts.Files = files
opts.SkipAssertions = true
imagetesting.RunGolden(opts)
files = strings.Replace(files, "#33ff44", "#a83269", -1)
files = strings.Replace(files, "green", "pink", -1)
files = strings.Replace(files, "mask.png", "mask2.png", -1)
opts.Files = files
opts.SkipAssertions = false
opts.Rebuild = true
imagetesting.RunGolden(opts)
}
func TestImagesGoldenFiltersText(t *testing.T) {
t.Parallel()
if imagetesting.SkipGoldenTests {
t.Skip("Skip golden test on this architecture")
}
// Will be used as the base folder for generated images.
name := "filters/text"
files := `
-- hugo.toml --
-- assets/sunset.jpg --
sourcefilename: ../testdata/sunset.jpg
-- layouts/home.html --
Home.
{{ $sunset := resources.Get "sunset.jpg" }}
{{ $textOpts := dict
"color" "#fbfaf5"
"linespacing" 8
"size" 28
"x" (div $sunset.Width 2 | int)
"y" (div $sunset.Height 2 | int)
"alignx" "center"
}}
{{ $text := "Pariatur deserunt sunt nisi sunt tempor quis eu. Sint et nulla enim officia sunt cupidatat. Eu amet ipsum qui velit cillum cillum ad Lorem in non ad aute." }}
{{ template "filters" (dict "name" "text_alignx-center.jpg" "img" $sunset "filters" (images.Text $text $textOpts )) }}
{{ $textOpts = (dict "alignx" "right") | merge $textOpts }}
{{ template "filters" (dict "name" "text_alignx-right.jpg" "img" $sunset "filters" (images.Text $text $textOpts )) }}
{{ $textOpts = (dict "alignx" "left") | merge $textOpts }}
{{ template "filters" (dict "name" "text_alignx-left.jpg" "img" $sunset "filters" (images.Text $text $textOpts )) }}
{{ $textOpts = (dict "alignx" "center" "aligny" "center") | merge $textOpts }}
{{ $text = "Est exercitation deserunt exercitation nostrud magna. Eiusmod anim deserunt sit elit dolore ea incididunt nisi. Ea ullamco excepteur voluptate occaecat duis pariatur proident cupidatat. Eu id esse qui consectetur commodo ad ex esse cupidatat velit duis cupidatat. Aliquip irure tempor consequat non amet in mollit ipsum officia tempor laborum." }}
{{ template "filters" (dict "name" "text_alignx-center_aligny-center.jpg" "img" $sunset "filters" (images.Text $text $textOpts )) }}
{{ $textOpts = (dict "alignx" "center" "aligny" "bottom") | merge $textOpts }}
{{ template "filters" (dict "name" "text_alignx-center_aligny-bottom.jpg" "img" $sunset "filters" (images.Text $text $textOpts )) }}
{{ define "filters"}}
{{ if lt (len (path.Ext .name)) 4 }}
{{ errorf "No extension in %q" .name }}
{{ end }}
{{ $img := .img.Filter .filters }}
{{ $name := printf "images/%s" .name }}
{{ with $img | resources.Copy $name }}
{{ .Publish }}
{{ end }}
{{ end }}
`
opts := imagetesting.DefaultGoldenOpts
opts.T = t
opts.Name = name
opts.Files = files
// opts.WriteFiles = true
// opts.DevMode = true
imagetesting.RunGolden(opts)
}
func TestImagesGoldenProcessMisc(t *testing.T) {
t.Parallel()
if imagetesting.SkipGoldenTests {
t.Skip("Skip golden test on this architecture")
}
// Will be used as the base folder for generated images.
name := "process/misc"
files := `
-- hugo.toml --
-- assets/giphy.gif --
sourcefilename: ../testdata/giphy.gif
-- assets/sunset.jpg --
sourcefilename: ../testdata/sunset.jpg
-- assets/gopher.png --
sourcefilename: ../testdata/gopher-hero8.png
-- layouts/home.html --
Home.
{{ $sunset := resources.Get "sunset.jpg" }}
{{ $sunsetGrayscale := $sunset.Filter (images.Grayscale) }}
{{ $gopher := resources.Get "gopher.png" }}
{{ $giphy := resources.Get "giphy.gif" }}
{{/* These are sorted. The end file name will be created from the spec + extension, so make sure these are unique. */}}
{{ template "process" (dict "spec" "crop 500x200 smart" "img" $sunset) }}
{{ template "process" (dict "spec" "fill 500x200 smart" "img" $sunset) }}
{{ template "process" (dict "spec" "fit 500x200 smart" "img" $sunset) }}
{{ template "process" (dict "spec" "resize 100x100 gif" "img" $giphy) }}
{{ template "process" (dict "spec" "resize 100x100 r180" "img" $gopher) }}
{{ template "process" (dict "spec" "resize 300x300 jpg #b31280" "img" $gopher) }}
` + goldenProcess
opts := imagetesting.DefaultGoldenOpts
opts.T = t
opts.Name = name
opts.Files = files
imagetesting.RunGolden(opts)
}
func TestImagesGoldenProcessWebP(t *testing.T) {
t.Parallel()
if imagetesting.SkipGoldenTests {
t.Skip("Skip golden test on this architecture")
}
// Will be used as the base folder for generated images.
name := "process/webp"
files := `
-- hugo.toml --
-- assets/highcontrast.webp --
sourcefilename: ../testdata/webp/highcontrast.webp
-- assets/anim.webp --
sourcefilename: ../testdata/webp/anim.webp
-- assets/fuzzycircle.webp --
sourcefilename: ../testdata/webp/fuzzy-cirlcle-transparent-32.webp
-- assets/fuzzycircle.png --
sourcefilename: ../testdata/fuzzy-cirlcle.png
-- assets/giphy.gif --
sourcefilename: ../testdata/giphy.gif
-- assets/sunset.jpg --
sourcefilename: ../testdata/sunset.jpg
-- layouts/home.html --
Home.
{{ $fuzzyCircle := resources.Get "fuzzycircle.webp" }}
{{ $highContrast := resources.Get "highcontrast.webp" }}
{{ $sunset := resources.Get "sunset.jpg" }}
{{ $sunsetGrayscale := $sunset.Filter (images.Grayscale) }}
{{ $animWebp := resources.Get "anim.webp" }}
{{ $giphy := resources.Get "giphy.gif" }}
{{/* These are sorted. The end file name will be created from the spec + extension, so make sure these are unique. */}}
{{ template "process" (dict "spec" "crop 300x300 gif" "img" $animWebp) }}
{{ template "process" (dict "spec" "crop 300x300 smart" "img" $fuzzyCircle) }}
{{ template "process" (dict "spec" "crop 300x300 smart #ff9999" "img" $fuzzyCircle) }}
{{ template "process" (dict "spec" "crop 300x300" "img" $animWebp) }}
{{ template "process" (dict "spec" "crop 500x200 smart webp" "img" $sunset) }}
{{ template "process" (dict "spec" "crop 500x200 smart webp" "img" $sunset) }}
{{ template "process" (dict "spec" "fit 300x400 webp" "img" $sunsetGrayscale) }}
{{ template "process" (dict "spec" "fit 400x500 webp" "img" $sunset) }}
{{ template "process" (dict "spec" "gif" "img" $highContrast) }}
{{ template "process" (dict "spec" "png" "img" $highContrast) }}
{{ template "process" (dict "spec" "resize 300x300" "img" $giphy) }}
{{ template "process" (dict "spec" "resize 300x300 webp" "img" $giphy) }}
{{ template "process" (dict "spec" "resize 300x300 webp lossless" "img" $sunset) }}
{{ template "process" (dict "spec" "resize 300x300 webp q1" "img" $sunset) }}
{{ template "process" (dict "spec" "resize 300x300 webp q33" "img" $sunset) }}
{{ template "process" (dict "spec" "resize 300x300 webp q75" "img" $sunset) }}
{{ template "process" (dict "spec" "resize 300x300 webp q100" "img" $sunset) }}
{{ template "process" (dict "spec" "resize 300x300 webp drawing" "img" $sunset) }}
{{ template "process" (dict "spec" "resize 300x300 webp icon" "img" $sunset) }}
{{ template "process" (dict "spec" "resize 300x300 webp q50 drawing" "img" $sunset) }}
{{ template "process" (dict "spec" "resize 400x" "img" $highContrast) }}
{{ define "process"}}
{{ $img := .img.Process .spec }}
{{ $ext := path.Ext $img.RelPermalink }}
{{ $name := printf "images/%s%s" (.spec | anchorize) $ext }}
{{ with $img | resources.Copy $name }}
{{ .Publish }}
{{ end }}
{{ end }}
`
opts := imagetesting.DefaultGoldenOpts
opts.T = t
opts.Name = name
opts.Files = files
imagetesting.RunGolden(opts)
}
func TestImagesGoldenWebPAnimation(t *testing.T) {
t.Parallel()
if imagetesting.SkipGoldenTests {
t.Skip("Skip golden test on this architecture")
}
// Will be used as the base folder for generated images.
name := "webp/animation"
files := `
-- hugo.toml --
disableKinds = ["page", "section", "taxonomy", "term", "sitemap", "robotsTXT", "404"]
-- assets/images/anim.webp --
sourcefilename: ../testdata/webp/anim.webp
-- assets/images/giphy.gif --
sourcefilename: ../testdata/giphy.gif
-- layouts/home.html --
Home.
{{ $webpAnim := resources.Get "images/anim.webp" }}
{{ $gifAnim := resources.Get "images/giphy.gif" }}
{{ ($webpAnim.Resize "100x100 webp").Publish }}
{{ ($webpAnim.Resize "100x100 gif").Publish }}
{{ ($gifAnim.Resize "100x100 gif").Publish }}
{{ ($gifAnim.Resize "100x100 webp").Publish }}
`
opts := imagetesting.DefaultGoldenOpts
opts.T = t
opts.Name = name
opts.Files = files
imagetesting.RunGolden(opts)
}
func TestImagesGoldenMethods(t *testing.T) {
t.Parallel()
if imagetesting.SkipGoldenTests {
t.Skip("Skip golden test on this architecture")
}
// Will be used as the base folder for generated images.
name := "methods"
files := `
-- hugo.toml --
[imaging]
bgColor = '#ebcc34'
hint = 'photo'
quality = 75
resampleFilter = 'MitchellNetravali'
-- assets/sunset.jpg --
sourcefilename: ../testdata/sunset.jpg
-- assets/gopher.png --
sourcefilename: ../testdata/gopher-hero8.png
-- layouts/home.html --
Home.
{{ $sunset := resources.Get "sunset.jpg" }}
{{ $gopher := resources.Get "gopher.png" }}
{{ template "invoke" (dict "copyFormat" "jpg" "base" $sunset "method" "resize" "spec" "300x" ) }}
{{ template "invoke" (dict "copyFormat" "jpg" "base" $sunset "method" "resize" "spec" "x200" ) }}
{{ template "invoke" (dict "copyFormat" "jpg" "base" $sunset "method" "fill" "spec" "90x120 left" ) }}
{{ template "invoke" (dict "copyFormat" "jpg" "base" $sunset "method" "fill" "spec" "90x120 right" ) }}
{{ template "invoke" (dict "copyFormat" "jpg" "base" $sunset "method" "fit" "spec" "200x200" ) }}
{{ template "invoke" (dict "copyFormat" "jpg" "base" $sunset "method" "crop" "spec" "200x200" ) }}
{{ template "invoke" (dict "copyFormat" "jpg" "base" $sunset "method" "crop" "spec" "350x400 center" ) }}
{{ template "invoke" (dict "copyFormat" "jpg" "base" $sunset "method" "crop" "spec" "350x400 smart" ) }}
{{ template "invoke" (dict "copyFormat" "jpg" "base" $sunset "method" "crop" "spec" "350x400 center r90" ) }}
{{ template "invoke" (dict "copyFormat" "jpg" "base" $sunset "method" "crop" "spec" "350x400 center q20" ) }}
{{ template "invoke" (dict "copyFormat" "png" "base" $gopher "method" "resize" "spec" "100x" ) }}
{{ template "invoke" (dict "copyFormat" "png" "base" $gopher "method" "resize" "spec" "100x #fc03ec" ) }}
{{ template "invoke" (dict "copyFormat" "jpg" "base" $gopher "method" "resize" "spec" "100x #03fc56 jpg" ) }}
{{ define "invoke"}}
{{ $spec := .spec }}
{{ $name := printf "images/%s-%s-%s.%s" .method ((trim .base.Name "/") | lower | anchorize) ($spec | anchorize) .copyFormat }}
{{ $img := ""}}
{{ if eq .method "resize" }}
{{ $img = .base.Resize $spec }}
{{ else if eq .method "fill" }}
{{ $img = .base.Fill $spec }}
{{ else if eq .method "fit" }}
{{ $img = .base.Fit $spec }}
{{ else if eq .method "crop" }}
{{ $img = .base.Crop $spec }}
{{ else }}
{{ errorf "Unknown method %q" .method }}
{{ end }}
{{ with $img | resources.Copy $name }}
{{ .Publish }}
{{ end }}
{{ end }}
`
opts := imagetesting.DefaultGoldenOpts
opts.T = t
opts.Name = name
opts.Files = files
imagetesting.RunGolden(opts)
}
func TestImagesGoldenConfigLossyVsQuality(t *testing.T) {
t.Parallel()
if imagetesting.SkipGoldenTests {
t.Skip("Skip golden test on this architecture")
}
files := `
-- hugo.toml --
[imaging]
quality = 90 # will only apply to jpeg in this setup.
compression = "lossless" # for webp
-- assets/sunset.jpg --
sourcefilename: ../testdata/sunset.jpg
-- layouts/home.html --
Home.
{{ $sunset := resources.Get "sunset.jpg" }}
{{ template "process" (dict "spec" "resize 300x300 webp" "img" $sunset) }}
{{ template "process" (dict "spec" "resize 300x300 webp lossy" "img" $sunset) }}
{{ template "process" (dict "spec" "resize 300x300 jpeg" "img" $sunset) }}
` + goldenProcess
// Will be used as the base folder for generated images.
name := "losslessvsquality"
opts := imagetesting.DefaultGoldenOpts
opts.T = t
opts.Name = name
opts.Files = files
imagetesting.RunGolden(opts)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/smartcrop.go | resources/images/smartcrop.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images
import (
"image"
"math"
"github.com/disintegration/gift"
"github.com/muesli/smartcrop"
)
const (
// Do not change.
smartCropIdentifier = "smart"
SmartCropAnchor = 1000
// This is just a increment, starting on 0. If Smart Crop improves its cropping, we
// need a way to trigger a re-generation of the crops in the wild, so increment this.
smartCropVersionNumber = 0
)
func (p *ImageProcessor) newSmartCropAnalyzer(filter gift.Resampling) smartcrop.Analyzer {
return smartcrop.NewAnalyzer(imagingResizer{p: p, filter: filter})
}
// Needed by smartcrop
type imagingResizer struct {
p *ImageProcessor
filter gift.Resampling
}
func (r imagingResizer) Resize(img image.Image, width, height uint) image.Image {
// See https://github.com/gohugoio/hugo/issues/7955#issuecomment-861710681
scaleX, scaleY := calcFactorsNfnt(width, height, float64(img.Bounds().Dx()), float64(img.Bounds().Dy()))
if width == 0 {
width = uint(math.Ceil(float64(img.Bounds().Dx()) / scaleX))
}
if height == 0 {
height = uint(math.Ceil(float64(img.Bounds().Dy()) / scaleY))
}
result, _ := r.p.Filter(img, gift.Resize(int(width), int(height), r.filter))
return result
}
func (p *ImageProcessor) smartCrop(img image.Image, width, height int, filter gift.Resampling) (image.Rectangle, error) {
if width <= 0 || height <= 0 {
return image.Rectangle{}, nil
}
srcBounds := img.Bounds()
srcW := srcBounds.Dx()
srcH := srcBounds.Dy()
if srcW <= 0 || srcH <= 0 {
return image.Rectangle{}, nil
}
if srcW == width && srcH == height {
return srcBounds, nil
}
smart := p.newSmartCropAnalyzer(filter)
rect, err := smart.FindBestCrop(img, width, height)
if err != nil {
return image.Rectangle{}, err
}
return img.Bounds().Intersect(rect), nil
}
// Calculates scaling factors using old and new image sitesmatrix.
// Code borrowed from https://github.com/nfnt/resize/blob/83c6a9932646f83e3267f353373d47347b6036b2/resize.go#L593
func calcFactorsNfnt(width, height uint, oldWidth, oldHeight float64) (scaleX, scaleY float64) {
if width == 0 {
if height == 0 {
scaleX = 1.0
scaleY = 1.0
} else {
scaleY = oldHeight / float64(height)
scaleX = scaleY
}
} else {
scaleX = oldWidth / float64(width)
if height == 0 {
scaleY = scaleX
} else {
scaleY = oldHeight / float64(height)
}
}
return
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/opacity.go | resources/images/opacity.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images
import (
"image"
"image/color"
"image/draw"
"github.com/disintegration/gift"
)
var _ gift.Filter = (*opacityFilter)(nil)
type opacityFilter struct {
opacity float32
}
func (f opacityFilter) Draw(dst draw.Image, src image.Image, options *gift.Options) {
// 0 is fully transparent and 255 is opaque.
alpha := uint8(f.opacity * 255)
mask := image.NewUniform(color.Alpha{alpha})
draw.DrawMask(dst, dst.Bounds(), src, image.Point{}, mask, image.Point{}, draw.Over)
}
func (f opacityFilter) Bounds(srcBounds image.Rectangle) image.Rectangle {
return image.Rect(0, 0, srcBounds.Dx(), srcBounds.Dy())
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/mask.go | resources/images/mask.go | package images
import (
"fmt"
"image"
"image/color"
"image/draw"
"github.com/disintegration/gift"
)
// maskFilter applies a mask image to a base image.
type maskFilter struct {
mask ImageSource
}
// Draw applies the mask to the base image.
func (f maskFilter) Draw(dst draw.Image, baseImage image.Image, options *gift.Options) {
maskImage, err := f.mask.DecodeImage()
if err != nil {
panic(fmt.Sprintf("failed to decode image: %s", err))
}
// Ensure the mask is the same size as the base image
baseBounds := baseImage.Bounds()
maskBounds := maskImage.Bounds()
// Resize mask to match base image size if necessary
if maskBounds.Dx() != baseBounds.Dx() || maskBounds.Dy() != baseBounds.Dy() {
g := gift.New(gift.Resize(baseBounds.Dx(), baseBounds.Dy(), gift.LanczosResampling))
resizedMask := image.NewRGBA(g.Bounds(maskImage.Bounds()))
g.Draw(resizedMask, maskImage)
maskImage = resizedMask
}
// Use gift to convert the resized mask to grayscale
g := gift.New(gift.Grayscale())
grayscaleMask := image.NewGray(g.Bounds(maskImage.Bounds()))
g.Draw(grayscaleMask, maskImage)
// Convert grayscale mask to alpha mask
alphaMask := image.NewAlpha(baseBounds)
for y := baseBounds.Min.Y; y < baseBounds.Max.Y; y++ {
for x := baseBounds.Min.X; x < baseBounds.Max.X; x++ {
grayValue := grayscaleMask.GrayAt(x, y).Y
alphaMask.SetAlpha(x, y, color.Alpha{A: grayValue})
}
}
// Create an RGBA output image
outputImage := image.NewRGBA(baseBounds)
// Apply the mask using draw.DrawMask
draw.DrawMask(outputImage, baseBounds, baseImage, image.Point{}, alphaMask, image.Point{}, draw.Over)
// Copy the result to the destination
gift.New().Draw(dst, outputImage)
}
// Bounds returns the bounds of the resulting image.
func (f maskFilter) Bounds(imgBounds image.Rectangle) image.Rectangle {
return image.Rect(0, 0, imgBounds.Dx(), imgBounds.Dy())
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/padding.go | resources/images/padding.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images
import (
"image"
"image/color"
"image/draw"
"github.com/disintegration/gift"
)
var _ gift.Filter = (*paddingFilter)(nil)
type paddingFilter struct {
top, right, bottom, left int
ccolor color.Color // canvas color
}
func (f paddingFilter) Draw(dst draw.Image, src image.Image, options *gift.Options) {
w := src.Bounds().Dx() + f.left + f.right
h := src.Bounds().Dy() + f.top + f.bottom
if w < 1 {
panic("final image width will be less than 1 pixel: check padding values")
}
if h < 1 {
panic("final image height will be less than 1 pixel: check padding values")
}
i := image.NewRGBA(image.Rect(0, 0, w, h))
draw.Draw(i, i.Bounds(), image.NewUniform(f.ccolor), image.Point{}, draw.Src)
gift.New().Draw(dst, i)
gift.New().DrawAt(dst, src, image.Pt(f.left, f.top), gift.OverOperator)
}
func (f paddingFilter) Bounds(srcBounds image.Rectangle) image.Rectangle {
return image.Rect(0, 0, srcBounds.Dx()+f.left+f.right, srcBounds.Dy()+f.top+f.bottom)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/config.go | resources/images/config.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images
import (
"errors"
"fmt"
"image/color"
"strconv"
"strings"
"github.com/gohugoio/hugo/common/hashing"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/media"
"github.com/mitchellh/mapstructure"
"github.com/disintegration/gift"
)
const (
ActionResize = "resize"
ActionCrop = "crop"
ActionFit = "fit"
ActionFill = "fill"
)
var Actions = map[string]bool{
ActionResize: true,
ActionCrop: true,
ActionFit: true,
ActionFill: true,
}
var (
imageFormats = map[string]Format{
".jpg": JPEG,
".jpeg": JPEG,
".jpe": JPEG,
".jif": JPEG,
".jfif": JPEG,
".png": PNG,
".tif": TIFF,
".tiff": TIFF,
".bmp": BMP,
".gif": GIF,
".webp": WEBP,
}
// These are the image types we can process.
processableImageSubTypes = map[string]Format{
media.Builtin.JPEGType.SubType: JPEG,
media.Builtin.PNGType.SubType: PNG,
media.Builtin.TIFFType.SubType: TIFF,
media.Builtin.BMPType.SubType: BMP,
media.Builtin.GIFType.SubType: GIF,
media.Builtin.WEBPType.SubType: WEBP,
}
// Increment to mark all processed images as stale. Only use when absolutely needed.
// See the finer grained smartCropVersionNumber.
mainImageVersionNumber = 1
)
var anchorPositions = map[string]gift.Anchor{
strings.ToLower("Center"): gift.CenterAnchor,
strings.ToLower("TopLeft"): gift.TopLeftAnchor,
strings.ToLower("Top"): gift.TopAnchor,
strings.ToLower("TopRight"): gift.TopRightAnchor,
strings.ToLower("Left"): gift.LeftAnchor,
strings.ToLower("Right"): gift.RightAnchor,
strings.ToLower("BottomLeft"): gift.BottomLeftAnchor,
strings.ToLower("Bottom"): gift.BottomAnchor,
strings.ToLower("BottomRight"): gift.BottomRightAnchor,
smartCropIdentifier: SmartCropAnchor,
}
var compressionMethods = map[string]bool{
"lossy": true,
"lossless": true,
}
// These encoding hints are currently only relevant for Webp.
var hints = map[string]bool{
"picture": true,
"photo": true,
"drawing": true,
"icon": true,
"text": true,
}
var imageFilters = map[string]gift.Resampling{
strings.ToLower("NearestNeighbor"): gift.NearestNeighborResampling,
strings.ToLower("Box"): gift.BoxResampling,
strings.ToLower("Linear"): gift.LinearResampling,
strings.ToLower("Hermite"): hermiteResampling,
strings.ToLower("MitchellNetravali"): mitchellNetravaliResampling,
strings.ToLower("CatmullRom"): catmullRomResampling,
strings.ToLower("BSpline"): bSplineResampling,
strings.ToLower("Gaussian"): gaussianResampling,
strings.ToLower("Lanczos"): gift.LanczosResampling,
strings.ToLower("Hann"): hannResampling,
strings.ToLower("Hamming"): hammingResampling,
strings.ToLower("Blackman"): blackmanResampling,
strings.ToLower("Bartlett"): bartlettResampling,
strings.ToLower("Welch"): welchResampling,
strings.ToLower("Cosine"): cosineResampling,
}
func ImageFormatFromExt(ext string) (Format, bool) {
f, found := imageFormats[ext]
return f, found
}
func ImageFormatFromMediaSubType(sub string) (Format, bool) {
f, found := processableImageSubTypes[sub]
return f, found
}
const (
defaultJPEGQuality = 75
defaultResampleFilter = "box"
defaultBgColor = "#ffffff"
defaultHint = "photo"
defaultCompression = "lossy"
)
var (
defaultImaging = map[string]any{
"resampleFilter": defaultResampleFilter,
"bgColor": defaultBgColor,
"hint": defaultHint,
"quality": defaultJPEGQuality,
"compression": defaultCompression,
}
defaultImageConfig *config.ConfigNamespace[ImagingConfig, ImagingConfigInternal]
)
func init() {
var err error
defaultImageConfig, err = DecodeConfig(defaultImaging)
if err != nil {
panic(err)
}
}
func DecodeConfig(in map[string]any) (*config.ConfigNamespace[ImagingConfig, ImagingConfigInternal], error) {
if in == nil {
in = make(map[string]any)
}
buildConfig := func(in any) (ImagingConfigInternal, any, error) {
m, err := maps.ToStringMapE(in)
if err != nil {
return ImagingConfigInternal{}, nil, err
}
// Merge in the defaults.
maps.MergeShallow(m, defaultImaging)
var i ImagingConfigInternal
if err := mapstructure.Decode(m, &i.Imaging); err != nil {
return i, nil, err
}
if err := i.Imaging.init(); err != nil {
return i, nil, err
}
i.BgColor, err = hexStringToColorGo(i.Imaging.BgColor)
if err != nil {
return i, nil, err
}
if i.Imaging.Anchor != "" {
anchor, found := anchorPositions[i.Imaging.Anchor]
if !found {
return i, nil, fmt.Errorf("invalid anchor value %q in imaging config", i.Anchor)
}
i.Anchor = anchor
}
filter, found := imageFilters[i.Imaging.ResampleFilter]
if !found {
return i, nil, fmt.Errorf("%q is not a valid resample filter", filter)
}
i.ResampleFilter = filter
return i, nil, nil
}
ns, err := config.DecodeNamespace[ImagingConfig](in, buildConfig)
if err != nil {
return nil, fmt.Errorf("failed to decode media types: %w", err)
}
return ns, nil
}
func DecodeImageConfig(options []string, defaults *config.ConfigNamespace[ImagingConfig, ImagingConfigInternal], sourceFormat Format) (ImageConfig, error) {
var (
c ImageConfig = GetDefaultImageConfig(defaults)
err error
)
// Make to lower case, trim space and remove any empty strings.
n := 0
for _, s := range options {
s = strings.TrimSpace(s)
if s != "" {
options[n] = strings.ToLower(s)
n++
}
}
options = options[:n]
for _, part := range options {
if _, ok := Actions[part]; ok {
c.Action = part
} else if pos, ok := anchorPositions[part]; ok {
c.Anchor = pos
} else if filter, ok := imageFilters[part]; ok {
c.Filter = filter
} else if _, ok := hints[part]; ok {
c.Hint = part
} else if _, ok := compressionMethods[part]; ok {
c.Compression = part
} else if part[0] == '#' {
c.BgColor, err = hexStringToColorGo(part[1:])
if err != nil {
return c, err
}
} else if part[0] == 'q' {
c.Quality, err = strconv.Atoi(part[1:])
if err != nil {
return c, err
}
if c.Quality < 1 || c.Quality > 100 {
return c, errors.New("quality ranges from 1 to 100 inclusive")
}
} else if part[0] == 'r' {
c.Rotate, err = strconv.Atoi(part[1:])
if err != nil {
return c, err
}
} else if strings.Contains(part, "x") {
widthHeight := strings.Split(part, "x")
if len(widthHeight) <= 2 {
first := widthHeight[0]
if first != "" {
c.Width, err = strconv.Atoi(first)
if err != nil {
return c, err
}
}
if len(widthHeight) == 2 {
second := widthHeight[1]
if second != "" {
c.Height, err = strconv.Atoi(second)
if err != nil {
return c, err
}
}
}
} else {
return c, errors.New("invalid image dimensions")
}
} else if f, ok := ImageFormatFromExt("." + part); ok {
c.TargetFormat = f
}
}
switch c.Action {
case ActionCrop, ActionFill, ActionFit:
if c.Width == 0 || c.Height == 0 {
return c, errors.New("must provide Width and Height")
}
case ActionResize:
if c.Width == 0 && c.Height == 0 {
return c, errors.New("must provide Width or Height")
}
default:
if c.Width != 0 || c.Height != 0 {
return c, errors.New("width or height are not supported for this action")
}
}
if c.Action != "" && c.Filter == nil {
c.Filter = defaults.Config.ResampleFilter
}
if c.Hint == "" {
c.Hint = "photo"
}
if c.Action != "" && c.Anchor == -1 {
c.Anchor = defaults.Config.Anchor
}
// default to the source format
if c.TargetFormat == 0 {
c.TargetFormat = sourceFormat
}
if c.Quality <= 0 && c.TargetFormat.RequiresDefaultQuality() {
// We need a quality setting for all JPEGs and WEBPs,
// unless the user explicitly set quality.
c.Quality = defaults.Config.Imaging.Quality
}
if c.Compression == "" {
c.Compression = defaults.Config.Imaging.Compression
}
if c.BgColor == nil && c.TargetFormat != sourceFormat {
if sourceFormat.SupportsTransparency() && !c.TargetFormat.SupportsTransparency() {
c.BgColor = defaults.Config.BgColor
}
}
if mainImageVersionNumber > 0 {
options = append(options, strconv.Itoa(mainImageVersionNumber))
}
if smartCropVersionNumber > 0 && c.Anchor == SmartCropAnchor {
options = append(options, strconv.Itoa(smartCropVersionNumber))
}
c.Key = hashing.HashStringHex(options)
return c, nil
}
// ImageConfig holds configuration to create a new image from an existing one, resize etc.
type ImageConfig struct {
// This defines the output format of the output image. It defaults to the source format.
TargetFormat Format
Action string
// If set, this will be used as the key in filenames etc.
Key string
// Quality ranges from 0 to 100 inclusive, higher is better.
// This is only relevant for JPEG and WEBP images.
// For WebP, 0 means lossless.
// Default is 75.
Quality int
// Rotate rotates an image by the given angle counter-clockwise.
// The rotation will be performed first.
Rotate int
// Used to fill any transparency.
// When set in site config, it's used when converting to a format that does
// not support transparency.
// When set per image operation, it's used even for formats that does support
// transparency.
BgColor color.Color
// Hint about what type of picture this is. Used to optimize encoding
// when target is set to webp.
Hint string
Compression string
Width int
Height int
Filter gift.Resampling
Anchor gift.Anchor
}
func (cfg ImageConfig) Reanchor(a gift.Anchor) ImageConfig {
cfg.Anchor = a
cfg.Key = hashing.HashStringHex(cfg.Key, "reanchor", a)
return cfg
}
type ImagingConfigInternal struct {
BgColor color.Color
ResampleFilter gift.Resampling
Anchor gift.Anchor
Imaging ImagingConfig
}
func (i *ImagingConfigInternal) Compile(externalCfg *ImagingConfig) error {
var err error
i.BgColor, err = hexStringToColorGo(externalCfg.BgColor)
if err != nil {
return err
}
if externalCfg.Anchor != "" {
anchor, found := anchorPositions[externalCfg.Anchor]
if !found {
return fmt.Errorf("invalid anchor value %q in imaging config", i.Anchor)
}
i.Anchor = anchor
}
filter, found := imageFilters[externalCfg.ResampleFilter]
if !found {
return fmt.Errorf("%q is not a valid resample filter", filter)
}
i.ResampleFilter = filter
return nil
}
// ImagingConfig contains default image processing configuration. This will be fetched
// from site (or language) config.
type ImagingConfig struct {
// Default image quality setting (1-100). Only used for JPEG and WebP images.
Quality int
// Compression method to use.
// One of "lossy" or "lossless".
// Note that lossless is currently only supported for WebP.
Compression string
// Resample filter to use in resize operations.
ResampleFilter string
// Hint about what type of image this is.
// Currently only used when encoding to Webp.
// Default is "photo".
// Valid values are "picture", "photo", "drawing", "icon", or "text".
Hint string
// The anchor to use in Fill. Default is "smart", i.e. Smart Crop.
Anchor string
// Default color used in fill operations (e.g. "fff" for white).
BgColor string
Exif ExifConfig
}
func (cfg *ImagingConfig) init() error {
if cfg.Quality < 1 || cfg.Quality > 100 {
return errors.New("image quality must be a number between 1 and 100")
}
cfg.BgColor = strings.ToLower(strings.TrimPrefix(cfg.BgColor, "#"))
cfg.Anchor = strings.ToLower(cfg.Anchor)
cfg.ResampleFilter = strings.ToLower(cfg.ResampleFilter)
cfg.Hint = strings.ToLower(cfg.Hint)
cfg.Compression = strings.ToLower(cfg.Compression)
if cfg.Anchor == "" {
cfg.Anchor = smartCropIdentifier
}
if strings.TrimSpace(cfg.Exif.IncludeFields) == "" && strings.TrimSpace(cfg.Exif.ExcludeFields) == "" {
// Don't change this for no good reason. Please don't.
cfg.Exif.ExcludeFields = "GPS|Exif|Exposure[M|P|B]|Contrast|Resolution|Sharp|JPEG|Metering|Sensing|Saturation|ColorSpace|Flash|WhiteBalance"
}
return nil
}
type ExifConfig struct {
// Regexp matching the Exif fields you want from the (massive) set of Exif info
// available. As we cache this info to disk, this is for performance and
// disk space reasons more than anything.
// If you want it all, put ".*" in this config setting.
// Note that if neither this or ExcludeFields is set, Hugo will return a small
// default set.
IncludeFields string
// Regexp matching the Exif fields you want to exclude. This may be easier to use
// than IncludeFields above, depending on what you want.
ExcludeFields string
// Hugo extracts the "photo taken" date/time into .Date by default.
// Set this to true to turn it off.
DisableDate bool
// Hugo extracts the "photo taken where" (GPS latitude and longitude) into
// .Long and .Lat. Set this to true to turn it off.
DisableLatLong bool
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/dither.go | resources/images/dither.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images
import (
"image"
"image/draw"
"github.com/disintegration/gift"
"github.com/makeworld-the-better-one/dither/v2"
)
var _ gift.Filter = (*ditherFilter)(nil)
type ditherFilter struct {
ditherer *dither.Ditherer
}
var ditherMethodsErrorDiffusion = map[string]dither.ErrorDiffusionMatrix{
"atkinson": dither.Atkinson,
"burkes": dither.Burkes,
"falsefloydsteinberg": dither.FalseFloydSteinberg,
"floydsteinberg": dither.FloydSteinberg,
"jarvisjudiceninke": dither.JarvisJudiceNinke,
"sierra": dither.Sierra,
"sierra2": dither.Sierra2,
"sierra2_4a": dither.Sierra2_4A,
"sierra3": dither.Sierra3,
"sierralite": dither.SierraLite,
"simple2d": dither.Simple2D,
"stevenpigeon": dither.StevenPigeon,
"stucki": dither.Stucki,
"tworowsierra": dither.TwoRowSierra,
}
var ditherMethodsOrdered = map[string]dither.OrderedDitherMatrix{
"clustereddot4x4": dither.ClusteredDot4x4,
"clustereddot6x6": dither.ClusteredDot6x6,
"clustereddot6x6_2": dither.ClusteredDot6x6_2,
"clustereddot6x6_3": dither.ClusteredDot6x6_3,
"clustereddot8x8": dither.ClusteredDot8x8,
"clustereddotdiagonal16x16": dither.ClusteredDotDiagonal16x16,
"clustereddotdiagonal6x6": dither.ClusteredDotDiagonal6x6,
"clustereddotdiagonal8x8": dither.ClusteredDotDiagonal8x8,
"clustereddotdiagonal8x8_2": dither.ClusteredDotDiagonal8x8_2,
"clustereddotdiagonal8x8_3": dither.ClusteredDotDiagonal8x8_3,
"clustereddothorizontalline": dither.ClusteredDotHorizontalLine,
"clustereddotspiral5x5": dither.ClusteredDotSpiral5x5,
"clustereddotverticalline": dither.ClusteredDotVerticalLine,
"horizontal3x5": dither.Horizontal3x5,
"vertical5x3": dither.Vertical5x3,
}
func (f ditherFilter) Draw(dst draw.Image, src image.Image, options *gift.Options) {
gift.New().Draw(dst, f.ditherer.Dither(src))
}
func (f ditherFilter) Bounds(srcBounds image.Rectangle) image.Rectangle {
return image.Rect(0, 0, srcBounds.Dx(), srcBounds.Dy())
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/image.go | resources/images/image.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images
import (
"errors"
"fmt"
"image"
"image/color"
"image/draw"
"io"
"sync"
"github.com/bep/imagemeta"
"github.com/bep/logg"
"github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/internal/warpc"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/resources/images/exif"
"github.com/disintegration/gift"
"github.com/gohugoio/hugo/common/himage"
"github.com/gohugoio/hugo/common/hugio"
)
func NewImage(f Format, proc *ImageProcessor, img image.Image, s Spec) *Image {
if img != nil {
return &Image{
Format: f,
Proc: proc,
Spec: s,
imageConfig: &imageConfig{
config: imageConfigFromImage(img),
configLoaded: true,
},
}
}
return &Image{Format: f, Proc: proc, Spec: s, imageConfig: &imageConfig{}}
}
type Image struct {
Format Format
Proc *ImageProcessor
Spec Spec
*imageConfig
}
func (i *Image) EncodeTo(conf ImageConfig, img image.Image, w io.Writer) error {
return i.Proc.Codec.EncodeTo(conf, w, img)
}
// Height returns i's height.
func (i *Image) Height() int {
i.initConfig()
return i.config.Height
}
// Width returns i's width.
func (i *Image) Width() int {
i.initConfig()
return i.config.Width
}
func (i Image) WithImage(img image.Image) *Image {
i.Spec = nil
i.imageConfig = &imageConfig{
config: imageConfigFromImage(img),
configLoaded: true,
}
return &i
}
func (i Image) WithSpec(s Spec) *Image {
i.Spec = s
i.imageConfig = &imageConfig{}
return &i
}
// InitConfig reads the image config from the given reader.
func (i *Image) InitConfig(r io.Reader) error {
var err error
i.configInit.Do(func() {
i.config, _, err = i.Proc.Codec.DecodeConfig(r)
})
return err
}
func (i *Image) initConfig() error {
var err error
i.configInit.Do(func() {
if i.configLoaded {
return
}
var f hugio.ReadSeekCloser
f, err = i.Spec.ReadSeekCloser()
if err != nil {
return
}
defer f.Close()
i.config, _, err = i.Proc.Codec.DecodeConfig(f)
})
if err != nil {
return fmt.Errorf("failed to load image config: %w", err)
}
return nil
}
func NewImageProcessor(warnl logg.LevelLogger, wasmDispatchers *warpc.Dispatchers, cfg *config.ConfigNamespace[ImagingConfig, ImagingConfigInternal]) (*ImageProcessor, error) {
e := cfg.Config.Imaging.Exif
exifDecoder, err := exif.NewDecoder(
exif.WithDateDisabled(e.DisableDate),
exif.WithLatLongDisabled(e.DisableLatLong),
exif.ExcludeFields(e.ExcludeFields),
exif.IncludeFields(e.IncludeFields),
exif.WithWarnLogger(warnl),
)
if err != nil {
return nil, err
}
webpCodec, err := wasmDispatchers.NewWepCodec()
if err != nil {
return nil, err
}
if webpCodec == nil {
return nil, errors.New("webp codec is not available")
}
imageCodec := newCodec(webpCodec)
return &ImageProcessor{
Cfg: cfg,
exifDecoder: exifDecoder,
Codec: imageCodec,
}, nil
}
type ImageProcessor struct {
Cfg *config.ConfigNamespace[ImagingConfig, ImagingConfigInternal]
exifDecoder *exif.Decoder
Codec *Codec
}
// Filename is only used for logging.
func (p *ImageProcessor) DecodeExif(filename string, format imagemeta.ImageFormat, r io.Reader) (*exif.ExifInfo, error) {
return p.exifDecoder.Decode(filename, format, r)
}
func (p *ImageProcessor) FiltersFromConfig(src image.Image, conf ImageConfig) ([]gift.Filter, error) {
var filters []gift.Filter
if conf.Rotate != 0 {
// Apply any rotation before any resize.
filters = append(filters, gift.Rotate(float32(conf.Rotate), color.Transparent, gift.NearestNeighborInterpolation))
}
switch conf.Action {
case "resize":
filters = append(filters, gift.Resize(conf.Width, conf.Height, conf.Filter))
case "crop":
if conf.Anchor == SmartCropAnchor {
bounds, err := p.smartCrop(src, conf.Width, conf.Height, conf.Filter)
if err != nil {
return nil, err
}
// First crop using the bounds returned by smartCrop.
filters = append(filters, gift.Crop(bounds))
// Then center crop the image to get an image the desired size without resizing.
filters = append(filters, gift.CropToSize(conf.Width, conf.Height, gift.CenterAnchor))
} else {
filters = append(filters, gift.CropToSize(conf.Width, conf.Height, conf.Anchor))
}
case "fill":
if conf.Anchor == SmartCropAnchor {
bounds, err := p.smartCrop(src, conf.Width, conf.Height, conf.Filter)
if err != nil {
return nil, err
}
// First crop it, then resize it.
filters = append(filters, gift.Crop(bounds))
filters = append(filters, gift.Resize(conf.Width, conf.Height, conf.Filter))
} else {
filters = append(filters, gift.ResizeToFill(conf.Width, conf.Height, conf.Filter, conf.Anchor))
}
case "fit":
filters = append(filters, gift.ResizeToFit(conf.Width, conf.Height, conf.Filter))
default:
}
return filters, nil
}
func (p *ImageProcessor) ApplyFiltersFromConfig(src image.Image, conf ImageConfig) (image.Image, error) {
filters, err := p.FiltersFromConfig(src, conf)
if err != nil {
return nil, err
}
if len(filters) == 0 {
return p.resolveSrc(src, conf.TargetFormat), nil
}
img, err := p.doFilter(src, conf.TargetFormat, filters...)
if err != nil {
return nil, err
}
return img, nil
}
func (p *ImageProcessor) Filter(src image.Image, filters ...gift.Filter) (image.Image, error) {
return p.doFilter(src, 0, filters...)
}
func (p *ImageProcessor) resolveSrc(src image.Image, targetFormat Format) image.Image {
if animatedImage, ok := src.(himage.AnimatedImage); ok {
frames := animatedImage.GetFrames()
// If e.g. converting an animated GIF to JPEG, we only want the first frame.
if len(frames) < 2 || !targetFormat.SupportsAnimation() {
src = frames[0]
}
}
return src
}
func (p *ImageProcessor) doFilter(src image.Image, targetFormat Format, filters ...gift.Filter) (image.Image, error) {
filter := gift.New(filters...)
if anim, ok := src.(himage.AnimatedImage); ok {
frames := anim.GetFrames()
if len(frames) < 2 || !targetFormat.SupportsAnimation() {
src = frames[0]
} else {
var bounds image.Rectangle
firstFrame := frames[0]
tmp := image.NewNRGBA(firstFrame.Bounds())
for i, frame := range frames {
gift.New().DrawAt(tmp, frame, frame.Bounds().Min, gift.OverOperator)
bounds = filter.Bounds(tmp.Bounds())
var dst draw.Image
if paletted, ok := frame.(*image.Paletted); ok {
// Gif.
dst = image.NewPaletted(bounds, paletted.Palette)
} else {
dst = image.NewNRGBA(bounds)
}
filter.Draw(dst, tmp)
frames[i] = dst
}
anim.SetWidthHeight(bounds.Dx(), bounds.Dy())
anim.SetFrames(frames)
return anim, nil
}
}
bounds := filter.Bounds(src.Bounds())
var dst draw.Image
switch src.(type) {
case *image.RGBA:
dst = image.NewRGBA(bounds)
case *image.NRGBA:
dst = image.NewNRGBA(bounds)
case *image.Gray:
dst = image.NewGray(bounds)
default:
dst = image.NewNRGBA(bounds)
}
filter.Draw(dst, src)
return dst, nil
}
func GetDefaultImageConfig(defaults *config.ConfigNamespace[ImagingConfig, ImagingConfigInternal]) ImageConfig {
if defaults == nil {
defaults = defaultImageConfig
}
return ImageConfig{
Anchor: -1, // The real values start at 0.
Hint: "photo",
Quality: defaults.Config.Imaging.Quality,
Compression: defaults.Config.Imaging.Compression,
}
}
type Spec interface {
// Loads the image source.
ReadSeekCloser() (hugio.ReadSeekCloser, error)
}
// Format is an image file format.
type Format int
const (
JPEG Format = iota + 1
PNG
GIF
TIFF
BMP
WEBP
)
func (f Format) ToImageMetaImageFormatFormat() imagemeta.ImageFormat {
switch f {
case JPEG:
return imagemeta.JPEG
case PNG:
return imagemeta.PNG
case TIFF:
return imagemeta.TIFF
case WEBP:
return imagemeta.WebP
default:
return -1
}
}
// RequiresDefaultQuality returns if the default quality needs to be applied to
// images of this format.
func (f Format) RequiresDefaultQuality() bool {
return f == JPEG || f == WEBP
}
// SupportsTransparency reports whether it supports transparency in any form.
func (f Format) SupportsTransparency() bool {
return f != JPEG
}
// SupportsAnimation reports whether the format supports animation.
func (f Format) SupportsAnimation() bool {
return f == GIF || f == WEBP
}
// DefaultExtension returns the default file extension of this format, starting with a dot.
// For example: .jpg for JPEG
func (f Format) DefaultExtension() string {
return f.MediaType().FirstSuffix.FullSuffix
}
// MediaType returns the media type of this image, e.g. image/jpeg for JPEG
func (f Format) MediaType() media.Type {
switch f {
case JPEG:
return media.Builtin.JPEGType
case PNG:
return media.Builtin.PNGType
case GIF:
return media.Builtin.GIFType
case TIFF:
return media.Builtin.TIFFType
case BMP:
return media.Builtin.BMPType
case WEBP:
return media.Builtin.WEBPType
default:
panic(fmt.Sprintf("%d is not a valid image format", f))
}
}
func (f Format) String() string {
switch f {
case JPEG:
return "JPEG"
case PNG:
return "PNG"
case GIF:
return "GIF"
case TIFF:
return "TIFF"
case BMP:
return "BMP"
case WEBP:
return "WEBP"
default:
return "Unknown"
}
}
type imageConfig struct {
config image.Config
configInit sync.Once
configLoaded bool
}
func imageConfigFromImage(img image.Image) image.Config {
if cp, ok := img.(himage.ImageConfigProvider); ok {
return cp.GetImageConfig()
}
b := img.Bounds()
return image.Config{Width: b.Max.X, Height: b.Max.Y}
}
// UnwrapFilter unwraps the given filter if it is a filter wrapper.
func UnwrapFilter(in gift.Filter) gift.Filter {
if f, ok := in.(filter); ok {
return f.Filter
}
return in
}
// ToFilters converts the given input to a slice of gift.Filter.
func ToFilters(in any) []gift.Filter {
switch v := in.(type) {
case []gift.Filter:
return v
case []filter:
vv := make([]gift.Filter, len(v))
for i, f := range v {
vv[i] = f
}
return vv
case gift.Filter:
return []gift.Filter{v}
default:
panic(fmt.Sprintf("%T is not an image filter", in))
}
}
// IsOpaque returns false if the image has alpha channel and there is at least 1
// pixel that is not (fully) opaque.
func IsOpaque(img image.Image) bool {
if oim, ok := img.(interface {
Opaque() bool
}); ok {
return oim.Opaque()
}
return false
}
// ImageSource identifies and decodes an image.
type ImageSource interface {
DecodeImage() (image.Image, error)
Key() string
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/color_test.go | resources/images/color_test.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images
import (
"image/color"
"testing"
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/htesting/hqt"
)
func TestHexStringToColor(t *testing.T) {
c := qt.New(t)
for _, test := range []struct {
arg string
expect any
}{
{"f", false},
{"#f", false},
{"#fffffff", false},
{"fffffff", false},
{"#fff", color.White},
{"fff", color.White},
{"FFF", color.White},
{"FfF", color.White},
{"#ffffff", color.White},
{"ffffff", color.White},
{"#000", color.Black},
{"#4287f5", color.RGBA{R: 0x42, G: 0x87, B: 0xf5, A: 0xff}},
{"777", color.RGBA{R: 0x77, G: 0x77, B: 0x77, A: 0xff}},
} {
c.Run(test.arg, func(c *qt.C) {
c.Parallel()
result, err := hexStringToColorGo(test.arg)
if b, ok := test.expect.(bool); ok && !b {
c.Assert(err, qt.Not(qt.IsNil))
return
}
c.Assert(err, qt.IsNil)
c.Assert(result, qt.DeepEquals, test.expect)
})
}
}
func TestColorToHexString(t *testing.T) {
c := qt.New(t)
for _, test := range []struct {
arg color.Color
expect string
}{
{color.White, "#ffffff"},
{color.Black, "#000000"},
{color.RGBA{R: 0x42, G: 0x87, B: 0xf5, A: 0xff}, "#4287f5"},
// 50% opacity.
// Note that the .Colors (dominant colors) received from the Image resource
// will always have an alpha value of 0xff.
{color.RGBA{R: 0x42, G: 0x87, B: 0xf5, A: 0x80}, "#4287f580"},
} {
c.Run(test.expect, func(c *qt.C) {
c.Parallel()
result := ColorGoToHexString(test.arg)
c.Assert(result, qt.Equals, test.expect)
})
}
}
func TestAddColorToPalette(t *testing.T) {
c := qt.New(t)
palette := color.Palette{color.White, color.Black}
c.Assert(AddColorToPalette(color.White, palette), qt.HasLen, 2)
blue1, _ := hexStringToColorGo("34c3eb")
blue2, _ := hexStringToColorGo("34c3eb")
white, _ := hexStringToColorGo("fff")
c.Assert(AddColorToPalette(white, palette), qt.HasLen, 2)
c.Assert(AddColorToPalette(blue1, palette), qt.HasLen, 3)
c.Assert(AddColorToPalette(blue2, palette), qt.HasLen, 3)
}
func TestReplaceColorInPalette(t *testing.T) {
c := qt.New(t)
palette := color.Palette{color.White, color.Black}
offWhite, _ := hexStringToColorGo("fcfcfc")
ReplaceColorInPalette(offWhite, palette)
c.Assert(palette, qt.HasLen, 2)
c.Assert(palette[0], qt.Equals, offWhite)
}
func TestColorLuminance(t *testing.T) {
c := qt.New(t)
c.Assert(hexStringToColor("#000000").Luminance(), hqt.IsSameFloat64, 0.0)
c.Assert(hexStringToColor("#768a9a").Luminance(), hqt.IsSameFloat64, 0.24361603589088263)
c.Assert(hexStringToColor("#d5bc9f").Luminance(), hqt.IsSameFloat64, 0.5261577672685374)
c.Assert(hexStringToColor("#ffffff").Luminance(), hqt.IsSameFloat64, 1.0)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/image_resource.go | resources/images/image_resource.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images
import (
"image"
"github.com/gohugoio/hugo/resources/images/exif"
"github.com/gohugoio/hugo/resources/resource"
)
// ImageResource represents an image resource.
type ImageResource interface {
resource.Resource
ImageResourceOps
}
type ImageResourceOps interface {
// Height returns the height of the Image.
Height() int
// Width returns the width of the Image.
Width() int
// Process applies the given image processing options to the image.
Process(spec string) (ImageResource, error)
// Crop an image to match the given dimensions without resizing.
// You must provide both width and height.
// Use the anchor option to change the crop box anchor point.
// {{ $image := $image.Crop "600x400" }}
Crop(spec string) (ImageResource, error)
// Fill scales the image to the smallest possible size that will cover the specified dimensions in spec,
// crops the resized image to the specified dimensions using the given anchor point.
// The spec is space delimited, e.g. `200x300 TopLeft`.
Fill(spec string) (ImageResource, error)
// Fit scales down the image using the given spec.
Fit(spec string) (ImageResource, error)
// Resize resizes the image to the given spec. If one of width or height is 0, the image aspect
// ratio is preserved.
Resize(spec string) (ImageResource, error)
// Filter applies one or more filters to an Image.
// {{ $image := $image.Filter (images.GaussianBlur 6) (images.Pixelate 8) }}
Filter(filters ...any) (ImageResource, error)
// Exif returns an ExifInfo object containing Image metadata.
Exif() *exif.ExifInfo
// Colors returns a slice of the most dominant colors in an image
// using a simple histogram method.
Colors() ([]Color, error)
// For internal use.
DecodeImage() (image.Image, error)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/process.go | resources/images/process.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images
import (
"image"
"image/draw"
"github.com/disintegration/gift"
)
var _ ImageProcessSpecProvider = (*processFilter)(nil)
type ImageProcessSpecProvider interface {
ImageProcessSpec() string
}
type processFilter struct {
spec string
}
func (f processFilter) Draw(dst draw.Image, src image.Image, options *gift.Options) {
panic("not supported")
}
func (f processFilter) Bounds(srcBounds image.Rectangle) image.Rectangle {
panic("not supported")
}
func (f processFilter) ImageProcessSpec() string {
return f.spec
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/color.go | resources/images/color.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images
import (
"encoding/hex"
"fmt"
"hash/fnv"
"image/color"
"math"
"strings"
"github.com/gohugoio/hugo/common/hstrings"
"slices"
)
type colorGoProvider interface {
ColorGo() color.Color
}
type Color struct {
// The color.
color color.Color
// The color prefixed with a #.
hex string
// The relative luminance of the color.
luminance float64
}
// Luminance as defined by w3.org.
// See https://www.w3.org/TR/WCAG21/#dfn-relative-luminance
func (c Color) Luminance() float64 {
return c.luminance
}
// ColorGo returns the color as a color.Color.
// For internal use only.
func (c Color) ColorGo() color.Color {
return c.color
}
// ColorHex returns the color as a hex string prefixed with a #.
func (c Color) ColorHex() string {
return c.hex
}
// String returns the color as a hex string prefixed with a #.
func (c Color) String() string {
return c.hex
}
// For hashstructure. This struct is used in template func options
// that needs to be able to hash a Color.
// For internal use only.
func (c Color) Hash() (uint64, error) {
h := fnv.New64a()
h.Write([]byte(c.hex))
return h.Sum64(), nil
}
func (c *Color) init() error {
c.hex = ColorGoToHexString(c.color)
r, g, b, _ := c.color.RGBA()
c.luminance = 0.2126*c.toSRGB(uint8(r)) + 0.7152*c.toSRGB(uint8(g)) + 0.0722*c.toSRGB(uint8(b))
return nil
}
func (c Color) toSRGB(i uint8) float64 {
v := float64(i) / 255
if v <= 0.04045 {
return v / 12.92
} else {
return math.Pow((v+0.055)/1.055, 2.4)
}
}
// AddColorToPalette adds c as the first color in p if not already there.
// Note that it does no additional checks, so callers must make sure
// that the palette is valid for the relevant format.
func AddColorToPalette(c color.Color, p color.Palette) color.Palette {
var found bool
if slices.Contains(p, c) {
found = true
}
if !found {
p = append(color.Palette{c}, p...)
}
return p
}
// ReplaceColorInPalette will replace the color in palette p closest to c in Euclidean
// R,G,B,A space with c.
func ReplaceColorInPalette(c color.Color, p color.Palette) {
p[p.Index(c)] = c
}
// ColorGoToHexString converts a color.Color to a hex string.
func ColorGoToHexString(c color.Color) string {
r, g, b, a := c.RGBA()
rgba := color.RGBA{uint8(r), uint8(g), uint8(b), uint8(a)}
if rgba.A == 0xff {
return fmt.Sprintf("#%.2x%.2x%.2x", rgba.R, rgba.G, rgba.B)
}
return fmt.Sprintf("#%.2x%.2x%.2x%.2x", rgba.R, rgba.G, rgba.B, rgba.A)
}
// ColorGoToColor converts a color.Color to a Color.
func ColorGoToColor(c color.Color) Color {
cc := Color{color: c}
if err := cc.init(); err != nil {
panic(err)
}
return cc
}
func hexStringToColor(s string) Color {
c, err := hexStringToColorGo(s)
if err != nil {
panic(err)
}
return ColorGoToColor(c)
}
// HexStringsToColors converts a slice of hex strings to a slice of Colors.
func HexStringsToColors(s ...string) []Color {
var colors []Color
for _, v := range s {
colors = append(colors, hexStringToColor(v))
}
return colors
}
func toColorGo(v any) (color.Color, bool, error) {
switch vv := v.(type) {
case colorGoProvider:
return vv.ColorGo(), true, nil
default:
s, ok := hstrings.ToString(v)
if !ok {
return nil, false, nil
}
c, err := hexStringToColorGo(s)
if err != nil {
return nil, false, err
}
return c, true, nil
}
}
func hexStringToColorGo(s string) (color.Color, error) {
s = strings.TrimPrefix(s, "#")
if len(s) != 3 && len(s) != 4 && len(s) != 6 && len(s) != 8 {
return nil, fmt.Errorf("invalid color code: %q", s)
}
s = strings.ToLower(s)
if len(s) == 3 || len(s) == 4 {
var v string
for _, r := range s {
v += string(r) + string(r)
}
s = v
}
// Standard colors.
if s == "ffffff" {
return color.White, nil
}
if s == "000000" {
return color.Black, nil
}
// Set Alfa to white.
if len(s) == 6 {
s += "ff"
}
b, err := hex.DecodeString(s)
if err != nil {
return nil, err
}
return color.RGBA{b[0], b[1], b[2], b[3]}, nil
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/filters_test.go | resources/images/filters_test.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images
import (
"testing"
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/common/hashing"
)
func TestFilterHash(t *testing.T) {
c := qt.New(t)
f := &Filters{}
c.Assert(hashing.HashString(f.Grayscale()), qt.Equals, hashing.HashString(f.Grayscale()))
c.Assert(hashing.HashString(f.Grayscale()), qt.Not(qt.Equals), hashing.HashString(f.Invert()))
c.Assert(hashing.HashString(f.Gamma(32)), qt.Not(qt.Equals), hashing.HashString(f.Gamma(33)))
c.Assert(hashing.HashString(f.Gamma(32)), qt.Equals, hashing.HashString(f.Gamma(32)))
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/config_test.go | resources/images/config_test.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images
import (
"fmt"
"strings"
"testing"
qt "github.com/frankban/quicktest"
)
func TestDecodeConfig(t *testing.T) {
c := qt.New(t)
m := map[string]any{
"quality": 42,
"resampleFilter": "NearestNeighbor",
"anchor": "topLeft",
}
imagingConfig, err := DecodeConfig(m)
c.Assert(err, qt.IsNil)
conf := imagingConfig.Config
c.Assert(conf.Imaging.Quality, qt.Equals, 42)
c.Assert(conf.Imaging.ResampleFilter, qt.Equals, "nearestneighbor")
c.Assert(conf.Imaging.Anchor, qt.Equals, "topleft")
m = map[string]any{}
imagingConfig, err = DecodeConfig(m)
c.Assert(err, qt.IsNil)
conf = imagingConfig.Config
c.Assert(conf.Imaging.ResampleFilter, qt.Equals, "box")
c.Assert(conf.Imaging.Anchor, qt.Equals, "smart")
_, err = DecodeConfig(map[string]any{
"quality": 123,
})
c.Assert(err, qt.Not(qt.IsNil))
_, err = DecodeConfig(map[string]any{
"resampleFilter": "asdf",
})
c.Assert(err, qt.Not(qt.IsNil))
_, err = DecodeConfig(map[string]any{
"anchor": "asdf",
})
c.Assert(err, qt.Not(qt.IsNil))
imagingConfig, err = DecodeConfig(map[string]any{
"anchor": "Smart",
})
conf = imagingConfig.Config
c.Assert(err, qt.IsNil)
c.Assert(conf.Imaging.Anchor, qt.Equals, "smart")
imagingConfig, err = DecodeConfig(map[string]any{
"exif": map[string]any{
"disableLatLong": true,
},
})
c.Assert(err, qt.IsNil)
conf = imagingConfig.Config
c.Assert(conf.Imaging.Exif.DisableLatLong, qt.Equals, true)
c.Assert(conf.Imaging.Exif.ExcludeFields, qt.Equals, "GPS|Exif|Exposure[M|P|B]|Contrast|Resolution|Sharp|JPEG|Metering|Sensing|Saturation|ColorSpace|Flash|WhiteBalance")
}
func TestDecodeImageConfig(t *testing.T) {
for i, this := range []struct {
action string
in string
expect any
}{
{"resize", "300x400", newImageConfig("resize", 300, 400, 75, 0, "box", "smart", "")},
{"resize", "300x400 #fff", newImageConfig("resize", 300, 400, 75, 0, "box", "smart", "fff")},
{"resize", "100x200 bottomRight", newImageConfig("resize", 100, 200, 75, 0, "box", "BottomRight", "")},
{"resize", "10x20 topleft Lanczos", newImageConfig("resize", 10, 20, 75, 0, "Lanczos", "topleft", "")},
{"resize", "linear left 10x r180", newImageConfig("resize", 10, 0, 75, 180, "linear", "left", "")},
{"resize", "x20 riGht Cosine q95", newImageConfig("resize", 0, 20, 95, 0, "cosine", "right", "")},
{"crop", "300x400", newImageConfig("crop", 300, 400, 75, 0, "box", "smart", "")},
{"fill", "300x400", newImageConfig("fill", 300, 400, 75, 0, "box", "smart", "")},
{"fit", "300x400", newImageConfig("fit", 300, 400, 75, 0, "box", "smart", "")},
{"resize", "", false},
{"resize", "foo", false},
{"crop", "100x", false},
{"fill", "100x", false},
{"fit", "100x", false},
{"foo", "100x", false},
} {
cfg, err := DecodeConfig(nil)
if err != nil {
t.Fatal(err)
}
options := append([]string{this.action}, strings.Fields(this.in)...)
result, err := DecodeImageConfig(options, cfg, PNG)
if b, ok := this.expect.(bool); ok && !b {
if err == nil {
t.Errorf("[%d] parseImageConfig didn't return an expected error", i)
}
} else {
if err != nil {
t.Fatalf("[%d] err: %s", i, err)
}
expect := this.expect.(ImageConfig)
result.Key = ""
if fmt.Sprint(result) != fmt.Sprint(expect) {
t.Fatalf("[%d] got\n%v\n but expected\n%v", i, result, expect)
}
}
}
}
func newImageConfig(action string, width, height, quality, rotate int, filter, anchor, bgColor string) ImageConfig {
var c ImageConfig = GetDefaultImageConfig(nil)
c.Action = action
c.TargetFormat = PNG
c.Width = width
c.Height = height
c.Quality = quality
c.Rotate = rotate
c.BgColor, _ = hexStringToColorGo(bgColor)
c.Anchor = SmartCropAnchor
if filter != "" {
filter = strings.ToLower(filter)
if v, ok := imageFilters[filter]; ok {
c.Filter = v
}
}
if anchor != "" {
anchor = strings.ToLower(anchor)
if v, ok := anchorPositions[anchor]; ok {
c.Anchor = v
}
}
return c
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/auto_orient.go | resources/images/auto_orient.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images
import (
"image"
"image/draw"
"github.com/disintegration/gift"
"github.com/gohugoio/hugo/resources/images/exif"
"github.com/spf13/cast"
)
var _ gift.Filter = (*autoOrientFilter)(nil)
var transformationFilters = map[int]gift.Filter{
2: gift.FlipHorizontal(),
3: gift.Rotate180(),
4: gift.FlipVertical(),
5: gift.Transpose(),
6: gift.Rotate270(),
7: gift.Transverse(),
8: gift.Rotate90(),
}
type autoOrientFilter struct{}
type ImageFilterFromOrientationProvider interface {
AutoOrient(exifInfo *exif.ExifInfo) gift.Filter
}
func (f autoOrientFilter) Draw(dst draw.Image, src image.Image, options *gift.Options) {
panic("not supported")
}
func (f autoOrientFilter) Bounds(srcBounds image.Rectangle) image.Rectangle {
panic("not supported")
}
func (f autoOrientFilter) AutoOrient(exifInfo *exif.ExifInfo) gift.Filter {
if exifInfo != nil {
if v, ok := exifInfo.Tags["Orientation"]; ok {
orientation := cast.ToInt(v)
if filter, ok := transformationFilters[orientation]; ok {
return filter
}
}
}
return nil
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/images_integration_test.go | resources/images/images_integration_test.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images_test
import (
"testing"
"github.com/gohugoio/hugo/hugolib"
)
func TestAutoOrient(t *testing.T) {
files := `
-- hugo.toml --
-- assets/rotate270.jpg --
sourcefilename: ../testdata/exif/orientation6.jpg
-- layouts/home.html --
{{ $img := resources.Get "rotate270.jpg" }}
W/H original: {{ $img.Width }}/{{ $img.Height }}
{{ $rotated := $img.Filter images.AutoOrient }}
W/H rotated: {{ $rotated.Width }}/{{ $rotated.Height }}
`
b := hugolib.Test(t, files)
b.AssertFileContent("public/index.html", "W/H original: 80/40\n\nW/H rotated: 40/80")
}
// Issue 12733.
func TestOrientationEq(t *testing.T) {
files := `
-- hugo.toml --
-- assets/rotate270.jpg --
sourcefilename: ../testdata/exif/orientation6.jpg
-- layouts/home.html --
{{ $img := resources.Get "rotate270.jpg" }}
{{ $orientation := $img.Exif.Tags.Orientation }}
Orientation: {{ $orientation }}|eq 6: {{ eq $orientation 6 }}|Type: {{ printf "%T" $orientation }}|
`
b := hugolib.Test(t, files)
b.AssertFileContent("public/index.html", "Orientation: 6|eq 6: true|")
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/resampling.go | resources/images/resampling.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images
import "math"
// We moved from imaging to the gift package for image processing at some point.
// That package had more, but also less resampling filters. So we add the missing
// ones here. They are fairly exotic, but someone may use them, so keep them here
// for now.
//
// The filters below are ported from https://github.com/disintegration/imaging/blob/9aab30e6aa535fe3337b489b76759ef97dfaf362/resize.go#L369
// MIT License.
var (
// Hermite cubic spline filter (BC-spline; B=0; C=0).
hermiteResampling = resamp{
name: "Hermite",
support: 1.0,
kernel: func(x float32) float32 {
x = absf32(x)
if x < 1.0 {
return bcspline(x, 0.0, 0.0)
}
return 0
},
}
// Mitchell-Netravali cubic filter (BC-spline; B=1/3; C=1/3).
mitchellNetravaliResampling = resamp{
name: "MitchellNetravali",
support: 2.0,
kernel: func(x float32) float32 {
x = absf32(x)
if x < 2.0 {
return bcspline(x, 1.0/3.0, 1.0/3.0)
}
return 0
},
}
// Catmull-Rom - sharp cubic filter (BC-spline; B=0; C=0.5).
catmullRomResampling = resamp{
name: "CatmullRomResampling",
support: 2.0,
kernel: func(x float32) float32 {
x = absf32(x)
if x < 2.0 {
return bcspline(x, 0.0, 0.5)
}
return 0
},
}
// BSpline is a smooth cubic filter (BC-spline; B=1; C=0).
bSplineResampling = resamp{
name: "BSplineResampling",
support: 2.0,
kernel: func(x float32) float32 {
x = absf32(x)
if x < 2.0 {
return bcspline(x, 1.0, 0.0)
}
return 0
},
}
// Gaussian blurring filter.
gaussianResampling = resamp{
name: "GaussianResampling",
support: 2.0,
kernel: func(x float32) float32 {
x = absf32(x)
if x < 2.0 {
return float32(math.Exp(float64(-2 * x * x)))
}
return 0
},
}
// Hann-windowed sinc filter (3 lobes).
hannResampling = resamp{
name: "HannResampling",
support: 3.0,
kernel: func(x float32) float32 {
x = absf32(x)
if x < 3.0 {
return sinc(x) * float32(0.5+0.5*math.Cos(math.Pi*float64(x)/3.0))
}
return 0
},
}
hammingResampling = resamp{
name: "HammingResampling",
support: 3.0,
kernel: func(x float32) float32 {
x = absf32(x)
if x < 3.0 {
return sinc(x) * float32(0.54+0.46*math.Cos(math.Pi*float64(x)/3.0))
}
return 0
},
}
// Blackman-windowed sinc filter (3 lobes).
blackmanResampling = resamp{
name: "BlackmanResampling",
support: 3.0,
kernel: func(x float32) float32 {
x = absf32(x)
if x < 3.0 {
return sinc(x) * float32(0.42-0.5*math.Cos(math.Pi*float64(x)/3.0+math.Pi)+0.08*math.Cos(2.0*math.Pi*float64(x)/3.0))
}
return 0
},
}
bartlettResampling = resamp{
name: "BartlettResampling",
support: 3.0,
kernel: func(x float32) float32 {
x = absf32(x)
if x < 3.0 {
return sinc(x) * (3.0 - x) / 3.0
}
return 0
},
}
// Welch-windowed sinc filter (parabolic window, 3 lobes).
welchResampling = resamp{
name: "WelchResampling",
support: 3.0,
kernel: func(x float32) float32 {
x = absf32(x)
if x < 3.0 {
return sinc(x) * (1.0 - (x * x / 9.0))
}
return 0
},
}
// Cosine-windowed sinc filter (3 lobes).
cosineResampling = resamp{
name: "CosineResampling",
support: 3.0,
kernel: func(x float32) float32 {
x = absf32(x)
if x < 3.0 {
return sinc(x) * float32(math.Cos((math.Pi/2.0)*(float64(x)/3.0)))
}
return 0
},
}
)
// The following code is borrowed from https://raw.githubusercontent.com/disintegration/gift/master/resize.go
// MIT licensed.
type resamp struct {
name string
support float32
kernel func(float32) float32
}
func (r resamp) String() string {
return r.name
}
func (r resamp) Support() float32 {
return r.support
}
func (r resamp) Kernel(x float32) float32 {
return r.kernel(x)
}
func bcspline(x, b, c float32) float32 {
if x < 0 {
x = -x
}
if x < 1 {
return ((12-9*b-6*c)*x*x*x + (-18+12*b+6*c)*x*x + (6 - 2*b)) / 6
}
if x < 2 {
return ((-b-6*c)*x*x*x + (6*b+30*c)*x*x + (-12*b-48*c)*x + (8*b + 24*c)) / 6
}
return 0
}
func absf32(x float32) float32 {
if x < 0 {
return -x
}
return x
}
func sinc(x float32) float32 {
if x == 0 {
return 1
}
return float32(math.Sin(math.Pi*float64(x)) / (math.Pi * float64(x)))
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/filters.go | resources/images/filters.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package images provides template functions for manipulating images.
package images
import (
"fmt"
"image/color"
"strings"
"github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/resources/resource"
"github.com/makeworld-the-better-one/dither/v2"
"github.com/mitchellh/mapstructure"
"github.com/disintegration/gift"
"github.com/spf13/cast"
)
// Increment for re-generation of images using these filters.
const filterAPIVersion = 0
type Filters struct{}
// Process creates a filter that processes an image using the given specification.
func (*Filters) Process(spec any) gift.Filter {
specs := strings.ToLower(cast.ToString(spec))
return filter{
Options: newFilterOpts(specs),
Filter: processFilter{
spec: specs,
},
}
}
// Overlay creates a filter that overlays src at position x y.
func (*Filters) Overlay(src ImageSource, x, y any) gift.Filter {
return filter{
Options: newFilterOpts(src.Key(), x, y),
Filter: overlayFilter{src: src, x: cast.ToInt(x), y: cast.ToInt(y)},
}
}
// Mask creates a filter that applies a mask image to the source image.
func (*Filters) Mask(mask ImageSource) gift.Filter {
return filter{
Options: newFilterOpts(mask.Key()),
Filter: maskFilter{mask: mask},
}
}
// Opacity creates a filter that changes the opacity of an image.
// The opacity parameter must be in range (0, 1).
func (*Filters) Opacity(opacity any) gift.Filter {
return filter{
Options: newFilterOpts(opacity),
Filter: opacityFilter{opacity: cast.ToFloat32(opacity)},
}
}
// Text creates a filter that draws text with the given options.
func (*Filters) Text(text string, options ...any) gift.Filter {
tf := textFilter{
text: text,
color: color.White,
size: 20,
x: 10,
y: 10,
alignx: "left",
aligny: "top",
linespacing: 2,
}
var opt maps.Params
if len(options) > 0 {
opt = maps.MustToParamsAndPrepare(options[0])
for option, v := range opt {
switch option {
case "color":
if color, ok, _ := toColorGo(v); ok {
tf.color = color
}
case "size":
tf.size = cast.ToFloat64(v)
case "x":
tf.x = cast.ToInt(v)
case "y":
tf.y = cast.ToInt(v)
case "alignx":
tf.alignx = cast.ToString(v)
if tf.alignx != "left" && tf.alignx != "center" && tf.alignx != "right" {
panic("alignx must be one of left, center, right")
}
case "aligny":
tf.aligny = cast.ToString(v)
if tf.aligny != "top" && tf.aligny != "center" && tf.aligny != "bottom" {
panic("aligny must be one of top, center, bottom")
}
case "linespacing":
tf.linespacing = cast.ToInt(v)
case "font":
if err, ok := v.(error); ok {
panic(fmt.Sprintf("invalid font source: %s", err))
}
fontSource, ok1 := v.(hugio.ReadSeekCloserProvider)
identifier, ok2 := v.(resource.Identifier)
if !(ok1 && ok2) {
panic(fmt.Sprintf("invalid text font source: %T", v))
}
tf.fontSource = fontSource
// The input value isn't hashable and will not make a stable key.
// Replace it with a string in the map used as basis for the
// hash string.
opt["font"] = identifier.Key()
}
}
}
return filter{
Options: newFilterOpts(text, opt),
Filter: tf,
}
}
// Padding creates a filter that resizes the image canvas without resizing the
// image. The last argument is the canvas color, expressed as an RGB or RGBA
// hexadecimal color. The default value is `ffffffff` (opaque white). The
// preceding arguments are the padding values, in pixels, using the CSS
// shorthand property syntax. Negative padding values will crop the image. The
// signature is images.Padding V1 [V2] [V3] [V4] [COLOR].
func (*Filters) Padding(args ...any) gift.Filter {
if len(args) < 1 || len(args) > 5 {
panic("the padding filter requires between 1 and 5 arguments")
}
var top, right, bottom, left int
var ccolor color.Color = color.White // canvas color
_args := args // preserve original args for most stable hash
if vcs, ok, err := toColorGo(args[len(args)-1]); ok || err != nil {
if err != nil {
panic("invalid canvas color: specify RGB or RGBA using hex notation")
}
ccolor = vcs
args = args[:len(args)-1]
if len(args) == 0 {
panic("not enough arguments: provide one or more padding values using the CSS shorthand property syntax")
}
}
var vals []int
for _, v := range args {
vi := cast.ToInt(v)
if vi > 5000 {
panic("padding values must not exceed 5000 pixels")
}
vals = append(vals, vi)
}
switch len(args) {
case 1:
top, right, bottom, left = vals[0], vals[0], vals[0], vals[0]
case 2:
top, right, bottom, left = vals[0], vals[1], vals[0], vals[1]
case 3:
top, right, bottom, left = vals[0], vals[1], vals[2], vals[1]
case 4:
top, right, bottom, left = vals[0], vals[1], vals[2], vals[3]
default:
panic(fmt.Sprintf("too many padding values: received %d, expected maximum of 4", len(args)))
}
return filter{
Options: newFilterOpts(_args...),
Filter: paddingFilter{
top: top,
right: right,
bottom: bottom,
left: left,
ccolor: ccolor,
},
}
}
// Dither creates a filter that dithers an image.
func (*Filters) Dither(options ...any) gift.Filter {
ditherOptions := struct {
Colors []any
Method string
Serpentine bool
Strength float32
}{
Method: "floydsteinberg",
Serpentine: true,
Strength: 1.0,
}
if len(options) != 0 {
err := mapstructure.WeakDecode(options[0], &ditherOptions)
if err != nil {
panic(fmt.Sprintf("failed to decode options: %s", err))
}
}
if len(ditherOptions.Colors) == 0 {
ditherOptions.Colors = []any{"000000ff", "ffffffff"}
}
if len(ditherOptions.Colors) < 2 {
panic("palette must have at least two colors")
}
var palette []color.Color
for _, c := range ditherOptions.Colors {
cc, ok, err := toColorGo(c)
if !ok || err != nil {
panic(fmt.Sprintf("%q is an invalid color: specify RGB or RGBA using hexadecimal notation", c))
}
palette = append(palette, cc)
}
d := dither.NewDitherer(palette)
if method, ok := ditherMethodsErrorDiffusion[strings.ToLower(ditherOptions.Method)]; ok {
d.Matrix = dither.ErrorDiffusionStrength(method, ditherOptions.Strength)
d.Serpentine = ditherOptions.Serpentine
} else if method, ok := ditherMethodsOrdered[strings.ToLower(ditherOptions.Method)]; ok {
d.Mapper = dither.PixelMapperFromMatrix(method, ditherOptions.Strength)
} else {
panic(fmt.Sprintf("%q is an invalid dithering method: see documentation", ditherOptions.Method))
}
return filter{
Options: newFilterOpts(ditherOptions),
Filter: ditherFilter{ditherer: d},
}
}
// AutoOrient creates a filter that rotates and flips an image as needed per
// its EXIF orientation tag.
func (*Filters) AutoOrient() gift.Filter {
return filter{
Filter: autoOrientFilter{},
}
}
// Brightness creates a filter that changes the brightness of an image.
// The percentage parameter must be in range (-100, 100).
func (*Filters) Brightness(percentage any) gift.Filter {
return filter{
Options: newFilterOpts(percentage),
Filter: gift.Brightness(cast.ToFloat32(percentage)),
}
}
// ColorBalance creates a filter that changes the color balance of an image.
// The percentage parameters for each color channel (red, green, blue) must be in range (-100, 500).
func (*Filters) ColorBalance(percentageRed, percentageGreen, percentageBlue any) gift.Filter {
return filter{
Options: newFilterOpts(percentageRed, percentageGreen, percentageBlue),
Filter: gift.ColorBalance(cast.ToFloat32(percentageRed), cast.ToFloat32(percentageGreen), cast.ToFloat32(percentageBlue)),
}
}
// Colorize creates a filter that produces a colorized version of an image.
// The hue parameter is the angle on the color wheel, typically in range (0, 360).
// The saturation parameter must be in range (0, 100).
// The percentage parameter specifies the strength of the effect, it must be in range (0, 100).
func (*Filters) Colorize(hue, saturation, percentage any) gift.Filter {
return filter{
Options: newFilterOpts(hue, saturation, percentage),
Filter: gift.Colorize(cast.ToFloat32(hue), cast.ToFloat32(saturation), cast.ToFloat32(percentage)),
}
}
// Contrast creates a filter that changes the contrast of an image.
// The percentage parameter must be in range (-100, 100).
func (*Filters) Contrast(percentage any) gift.Filter {
return filter{
Options: newFilterOpts(percentage),
Filter: gift.Contrast(cast.ToFloat32(percentage)),
}
}
// Gamma creates a filter that performs a gamma correction on an image.
// The gamma parameter must be positive. Gamma = 1 gives the original image.
// Gamma less than 1 darkens the image and gamma greater than 1 lightens it.
func (*Filters) Gamma(gamma any) gift.Filter {
return filter{
Options: newFilterOpts(gamma),
Filter: gift.Gamma(cast.ToFloat32(gamma)),
}
}
// GaussianBlur creates a filter that applies a gaussian blur to an image.
func (*Filters) GaussianBlur(sigma any) gift.Filter {
return filter{
Options: newFilterOpts(sigma),
Filter: gift.GaussianBlur(cast.ToFloat32(sigma)),
}
}
// Grayscale creates a filter that produces a grayscale version of an image.
func (*Filters) Grayscale() gift.Filter {
return filter{
Filter: gift.Grayscale(),
}
}
// Hue creates a filter that rotates the hue of an image.
// The hue angle shift is typically in range -180 to 180.
func (*Filters) Hue(shift any) gift.Filter {
return filter{
Options: newFilterOpts(shift),
Filter: gift.Hue(cast.ToFloat32(shift)),
}
}
// Invert creates a filter that negates the colors of an image.
func (*Filters) Invert() gift.Filter {
return filter{
Filter: gift.Invert(),
}
}
// Pixelate creates a filter that applies a pixelation effect to an image.
func (*Filters) Pixelate(size any) gift.Filter {
return filter{
Options: newFilterOpts(size),
Filter: gift.Pixelate(cast.ToInt(size)),
}
}
// Saturation creates a filter that changes the saturation of an image.
func (*Filters) Saturation(percentage any) gift.Filter {
return filter{
Options: newFilterOpts(percentage),
Filter: gift.Saturation(cast.ToFloat32(percentage)),
}
}
// Sepia creates a filter that produces a sepia-toned version of an image.
func (*Filters) Sepia(percentage any) gift.Filter {
return filter{
Options: newFilterOpts(percentage),
Filter: gift.Sepia(cast.ToFloat32(percentage)),
}
}
// Sigmoid creates a filter that changes the contrast of an image using a sigmoidal function and returns the adjusted image.
// It's a non-linear contrast change useful for photo adjustments as it preserves highlight and shadow detail.
func (*Filters) Sigmoid(midpoint, factor any) gift.Filter {
return filter{
Options: newFilterOpts(midpoint, factor),
Filter: gift.Sigmoid(cast.ToFloat32(midpoint), cast.ToFloat32(factor)),
}
}
// UnsharpMask creates a filter that sharpens an image.
// The sigma parameter is used in a gaussian function and affects the radius of effect.
// Sigma must be positive. Sharpen radius roughly equals 3 * sigma.
// The amount parameter controls how much darker and how much lighter the edge borders become. Typically between 0.5 and 1.5.
// The threshold parameter controls the minimum brightness change that will be sharpened. Typically between 0 and 0.05.
func (*Filters) UnsharpMask(sigma, amount, threshold any) gift.Filter {
return filter{
Options: newFilterOpts(sigma, amount, threshold),
Filter: gift.UnsharpMask(cast.ToFloat32(sigma), cast.ToFloat32(amount), cast.ToFloat32(threshold)),
}
}
type filter struct {
Options filterOpts
gift.Filter
}
// For cache-busting.
type filterOpts struct {
Version int
Vals any
}
func newFilterOpts(vals ...any) filterOpts {
return filterOpts{
Version: filterAPIVersion,
Vals: vals,
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/overlay.go | resources/images/overlay.go | // Copyright 2020 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images
import (
"fmt"
"image"
"image/draw"
"github.com/disintegration/gift"
)
var _ gift.Filter = (*overlayFilter)(nil)
type overlayFilter struct {
src ImageSource
x, y int
}
func (f overlayFilter) Draw(dst draw.Image, src image.Image, options *gift.Options) {
overlaySrc, err := f.src.DecodeImage()
if err != nil {
panic(fmt.Sprintf("failed to decode image: %s", err))
}
gift.New().Draw(dst, src)
gift.New().DrawAt(dst, overlaySrc, image.Pt(f.x, f.y), gift.OverOperator)
}
func (f overlayFilter) Bounds(srcBounds image.Rectangle) image.Rectangle {
return image.Rect(0, 0, srcBounds.Dx(), srcBounds.Dy())
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/codec.go | resources/images/codec.go | // Copyright 2025 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package images
import (
"bufio"
"errors"
"fmt"
"image"
"image/color/palette"
"image/draw"
"image/gif"
"image/jpeg"
"image/png"
"io"
"github.com/gohugoio/hugo/common/himage"
"golang.org/x/image/bmp"
"golang.org/x/image/tiff"
)
// Decoder defines the decoding of an image format.
// These matches the globalmpackage image functions.
type Decoder interface {
Decode(r io.Reader) (image.Image, error)
DecodeConfig(r io.Reader) (image.Config, error)
}
type ToEncoder interface {
EncodeTo(conf ImageConfig, w io.Writer, src image.Image) error
}
// EncoderWithOptions defines the encoding of an image format with the given options.
type Encoder interface {
Encode(w io.Writer, src image.Image, options map[string]any) error
}
// EncodeDecoder defines both decoding and encoding of an image format as defined by the standard library.
type EncodeDecoder interface {
Decoder
Encoder
}
// Codec is a generic image codec supporting multiple formats.
type Codec struct {
webp EncodeDecoder
}
func newCodec(webp EncodeDecoder) *Codec {
return &Codec{webp: webp}
}
func (d *Codec) EncodeTo(conf ImageConfig, w io.Writer, img image.Image) error {
switch conf.TargetFormat {
case JPEG:
var rgba *image.RGBA
quality := conf.Quality
if nrgba, ok := img.(*image.NRGBA); ok {
if nrgba.Opaque() {
rgba = &image.RGBA{
Pix: nrgba.Pix,
Stride: nrgba.Stride,
Rect: nrgba.Rect,
}
}
}
if rgba != nil {
return jpeg.Encode(w, rgba, &jpeg.Options{Quality: quality})
}
return jpeg.Encode(w, img, &jpeg.Options{Quality: quality})
case PNG:
encoder := png.Encoder{CompressionLevel: png.DefaultCompression}
return encoder.Encode(w, img)
case GIF:
if anim, ok := img.(himage.AnimatedImage); ok {
if g, ok := anim.GetRaw().(*gif.GIF); ok {
return gif.EncodeAll(w, g)
}
// Animated image, but not a GIF. Convert it.
frames := anim.GetFrames()
if len(frames) == 0 {
return gif.Encode(w, img, &gif.Options{NumColors: 256})
}
frameDurations := anim.GetFrameDurations()
if len(frameDurations) != len(frames) {
return errors.New("gif: number of frame durations does not match number of frames")
}
outGif := &gif.GIF{
Delay: himage.FrameDurationsToGifDelays(frameDurations),
}
outGif.LoopCount = anim.GetLoopCount()
for _, frame := range frames {
bounds := frame.Bounds()
palettedImage := image.NewPaletted(bounds, palette.Plan9)
draw.Draw(palettedImage, palettedImage.Rect, frame, bounds.Min, draw.Src)
outGif.Image = append(outGif.Image, palettedImage)
}
return gif.EncodeAll(w, outGif)
}
return gif.Encode(w, img, &gif.Options{
NumColors: 256,
})
case TIFF:
return tiff.Encode(w, img, &tiff.Options{Compression: tiff.Deflate, Predictor: true})
case BMP:
return bmp.Encode(w, img)
case WEBP:
opts := map[string]any{
"compression": conf.Compression,
"quality": conf.Quality,
"hint": conf.Hint,
}
return d.webp.Encode(w, img, opts)
default:
return errors.New("format not supported")
}
}
func (d *Codec) DecodeFormat(f Format, r io.Reader) (image.Image, error) {
switch f {
case JPEG, PNG:
// We reworked this decode/encode setup to get full WebP support in v0.153.0.
// In the first take of that we used f to decide whether to call png.Decode or jpeg.Decode here,
// but testing it on some sites, it seems that it's not uncommon to store JPEGs with PNG extensions and vice versa.
// So, to reduce some noise in that release, we fallback to the standard library here,
// which will read the magic bytes and decode accordingly.
img, _, err := image.Decode(r)
return img, err
case GIF:
g, err := gif.DecodeAll(r)
if err != nil {
return nil, fmt.Errorf("failed to decode gif: %w", err)
}
if len(g.Delay) > 1 {
return &giphy{gif: g, Image: g.Image[0]}, nil
}
return g.Image[0], nil
case TIFF:
return tiff.Decode(r)
case BMP:
return bmp.Decode(r)
case WEBP:
img, err := d.webp.Decode(r)
if err == nil {
return img, nil
}
if rs, ok := r.(io.ReadSeeker); ok {
// See issue 14288. Turns out it's not uncommon to e.g. name their PNG files with a WEBP extension.
// With the old Go's webp decoder, this didn't fail (it looked for the file header),
// but now some error has surfaced.
// To reduce some noise, we try to reset and decode again using the standard library.
_, err2 := rs.Seek(0, io.SeekStart)
if err2 != nil {
return nil, err
}
img, _, err2 = image.Decode(rs)
if err2 == nil {
return img, nil
}
}
return nil, err
default:
return nil, errors.New("format not supported")
}
}
func (d *Codec) Decode(r io.Reader) (image.Image, error) {
rr := toPeekReader(r)
format, err := formatFromImage(rr)
if err != nil {
return nil, err
}
if format != 0 {
return d.DecodeFormat(format, rr)
}
// Fallback to the standard image.Decode.
img, _, err := image.Decode(rr)
return img, err
}
func (d *Codec) DecodeConfig(r io.Reader) (image.Config, string, error) {
rr := toPeekReader(r)
format, err := formatFromImage(rr)
if err != nil {
return image.Config{}, "", err
}
if format == WEBP {
cfg, err := d.webp.DecodeConfig(rr)
return cfg, "webp", err
}
// Fallback to the standard image.DecodeConfig.
conf, name, err := image.DecodeConfig(rr)
return conf, name, err
}
// toPeekReader converts an io.Reader to a peekReader.
func toPeekReader(r io.Reader) peekReader {
if rr, ok := r.(peekReader); ok {
return rr
}
return bufio.NewReader(r)
}
// A peekReader is an io.Reader that can also peek ahead.
type peekReader interface {
io.Reader
Peek(int) ([]byte, error)
}
const (
// The WebP file header is 12 bytes long and starts with "RIFF" followed by
// 4 bytes indicating the file size, followed by "WEBP" and the VP8 chunk header.
// We use '?' as a wildcard for the 4 size bytes.
magicWebp = "RIFF????WEBPVP8"
// The GIF file header is 6 bytes long and starts with "GIF87a" or "GIF89a".
magicGif = "GIF8???"
)
type magicFormat struct {
magic string
format Format
}
var magicFormats = []magicFormat{
{magic: magicWebp, format: WEBP},
{magic: magicGif, format: GIF},
}
// formatFromImage determines the image format from the magic bytes.
// Note that this is only a partial implementation,
// as we currently only need WebP and GIF detection.
// The others can be handled by the standard library.
func formatFromImage(r peekReader) (Format, error) {
for _, mf := range magicFormats {
magicLen := len(mf.magic)
b, err := r.Peek(magicLen)
if err == nil && match(mf.magic, b) {
return mf.format, nil
}
}
return 0, nil
}
func match(magic string, b []byte) bool {
if len(magic) != len(b) {
return false
}
for i := 0; i < len(magic); i++ {
if magic[i] != '?' && magic[i] != b[i] {
return false
}
}
return true
}
var (
_ himage.AnimatedImage = (*giphy)(nil)
_ himage.ImageConfigProvider = (*giphy)(nil)
)
type giphy struct {
image.Image
gif *gif.GIF
}
func (g *giphy) GetRaw() any {
return g.gif
}
func (g *giphy) GetLoopCount() int {
return g.gif.LoopCount
}
func (g *giphy) GetFrames() []image.Image {
frames := make([]image.Image, len(g.gif.Image))
for i, frame := range g.gif.Image {
frames[i] = frame
}
return frames
}
func (g *giphy) GetImageConfig() image.Config {
return g.gif.Config
}
func (g *giphy) SetFrames(frames []image.Image) {
if len(frames) == 0 {
panic("frames cannot be empty")
}
g.gif.Image = make([]*image.Paletted, len(frames))
for i, frame := range frames {
g.gif.Image[i] = frame.(*image.Paletted)
}
g.Image = g.gif.Image[0]
}
func (g *giphy) SetWidthHeight(width, height int) {
g.gif.Config.Width = width
g.gif.Config.Height = height
}
func (g *giphy) GetFrameDurations() []int {
return himage.GifDelaysToFrameDurations(g.gif.Delay)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/exif/exif_test.go | resources/images/exif/exif_test.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package exif
import (
"encoding/json"
"os"
"path/filepath"
"testing"
"time"
"github.com/bep/imagemeta"
"github.com/google/go-cmp/cmp"
qt "github.com/frankban/quicktest"
)
func TestExif(t *testing.T) {
c := qt.New(t)
f, err := os.Open(filepath.FromSlash("../../testdata/sunset.jpg"))
c.Assert(err, qt.IsNil)
defer f.Close()
d, err := NewDecoder(IncludeFields("Lens|Date"))
c.Assert(err, qt.IsNil)
x, err := d.Decode("", imagemeta.JPEG, f)
c.Assert(err, qt.IsNil)
c.Assert(x.Date.Format("2006-01-02"), qt.Equals, "2017-10-27")
// Malaga: https://goo.gl/taazZy
c.Assert(x.Lat, qt.Equals, float64(36.59744166666667))
c.Assert(x.Long, qt.Equals, float64(-4.50846))
v, found := x.Tags["LensModel"]
c.Assert(found, qt.Equals, true)
lensModel, ok := v.(string)
c.Assert(ok, qt.Equals, true)
c.Assert(lensModel, qt.Equals, "smc PENTAX-DA* 16-50mm F2.8 ED AL [IF] SDM")
v, found = x.Tags["ModifyDate"]
c.Assert(found, qt.Equals, true)
c.Assert(v, qt.Equals, "2017:11:23 09:56:54")
// Verify that it survives a round-trip to JSON and back.
data, err := json.Marshal(x)
c.Assert(err, qt.IsNil)
x2 := &ExifInfo{}
err = json.Unmarshal(data, x2)
c.Assert(err, qt.IsNil)
c.Assert(x2, eq, x)
}
func TestExifPNG(t *testing.T) {
c := qt.New(t)
f, err := os.Open(filepath.FromSlash("../../testdata/gohugoio.png"))
c.Assert(err, qt.IsNil)
defer f.Close()
d, err := NewDecoder()
c.Assert(err, qt.IsNil)
_, err = d.Decode("", imagemeta.PNG, f)
c.Assert(err, qt.IsNil)
}
func TestIssue8079(t *testing.T) {
c := qt.New(t)
f, err := os.Open(filepath.FromSlash("../../testdata/iss8079.jpg"))
c.Assert(err, qt.IsNil)
defer f.Close()
d, err := NewDecoder()
c.Assert(err, qt.IsNil)
x, err := d.Decode("", imagemeta.JPEG, f)
c.Assert(err, qt.IsNil)
c.Assert(x.Tags["ImageDescription"], qt.Equals, "Città del Vaticano #nanoblock #vatican #vaticancity")
}
func BenchmarkDecodeExif(b *testing.B) {
c := qt.New(b)
f, err := os.Open(filepath.FromSlash("../../testdata/sunset.jpg"))
c.Assert(err, qt.IsNil)
defer f.Close()
d, err := NewDecoder()
c.Assert(err, qt.IsNil)
for b.Loop() {
_, err = d.Decode("", imagemeta.JPEG, f)
c.Assert(err, qt.IsNil)
f.Seek(0, 0)
}
}
var eq = qt.CmpEquals(
cmp.Comparer(
func(v1, v2 imagemeta.Rat[uint32]) bool {
return v1.String() == v2.String()
},
),
cmp.Comparer(
func(v1, v2 imagemeta.Rat[int32]) bool {
return v1.String() == v2.String()
},
),
cmp.Comparer(func(v1, v2 time.Time) bool {
return v1.Unix() == v2.Unix()
}),
)
func TestIssue10738(t *testing.T) {
c := qt.New(t)
testFunc := func(c *qt.C, path, include string) any {
c.Helper()
f, err := os.Open(filepath.FromSlash(path))
c.Assert(err, qt.IsNil)
defer f.Close()
d, err := NewDecoder(IncludeFields(include))
c.Assert(err, qt.IsNil)
x, err := d.Decode("", imagemeta.JPEG, f)
c.Assert(err, qt.IsNil)
// Verify that it survives a round-trip to JSON and back.
data, err := json.Marshal(x)
c.Assert(err, qt.IsNil)
x2 := &ExifInfo{}
err = json.Unmarshal(data, x2)
c.Assert(err, qt.IsNil)
c.Assert(x2, eq, x)
v, found := x.Tags["ExposureTime"]
c.Assert(found, qt.Equals, true)
return v
}
type args struct {
path string // imagePath
include string // includeFields
}
type want struct {
vN int64 // numerator
vD int64 // denominator
}
type testCase struct {
name string
args args
want want
}
tests := []testCase{
{
"canon_cr2_fraction", args{
path: "../../testdata/issue10738/canon_cr2_fraction.jpg",
include: "Lens|Date|ExposureTime",
}, want{
1,
500,
},
},
{
"canon_cr2_integer", args{
path: "../../testdata/issue10738/canon_cr2_integer.jpg",
include: "Lens|Date|ExposureTime",
}, want{
10,
1,
},
},
{
"dji_dng_fraction", args{
path: "../../testdata/issue10738/dji_dng_fraction.jpg",
include: "Lens|Date|ExposureTime",
}, want{
1,
4000,
},
},
{
"fuji_raf_fraction", args{
path: "../../testdata/issue10738/fuji_raf_fraction.jpg",
include: "Lens|Date|ExposureTime",
}, want{
1,
250,
},
},
{
"fuji_raf_integer", args{
path: "../../testdata/issue10738/fuji_raf_integer.jpg",
include: "Lens|Date|ExposureTime",
}, want{
1,
1,
},
},
{
"leica_dng_fraction", args{
path: "../../testdata/issue10738/leica_dng_fraction.jpg",
include: "Lens|Date|ExposureTime",
}, want{
1,
100,
},
},
{
"lumix_rw2_fraction", args{
path: "../../testdata/issue10738/lumix_rw2_fraction.jpg",
include: "Lens|Date|ExposureTime",
}, want{
1,
400,
},
},
{
"nikon_nef_d5600", args{
path: "../../testdata/issue10738/nikon_nef_d5600.jpg",
include: "Lens|Date|ExposureTime",
}, want{
1,
1000,
},
},
{
"nikon_nef_fraction", args{
path: "../../testdata/issue10738/nikon_nef_fraction.jpg",
include: "Lens|Date|ExposureTime",
}, want{
1,
640,
},
},
{
"nikon_nef_integer", args{
path: "../../testdata/issue10738/nikon_nef_integer.jpg",
include: "Lens|Date|ExposureTime",
}, want{
30,
1,
},
},
{
"nikon_nef_fraction_2", args{
path: "../../testdata/issue10738/nikon_nef_fraction_2.jpg",
include: "Lens|Date|ExposureTime",
}, want{
1,
6400,
},
},
{
"sony_arw_fraction", args{
path: "../../testdata/issue10738/sony_arw_fraction.jpg",
include: "Lens|Date|ExposureTime",
}, want{
1,
160,
},
},
{
"sony_arw_integer", args{
path: "../../testdata/issue10738/sony_arw_integer.jpg",
include: "Lens|Date|ExposureTime",
}, want{
4,
1,
},
},
}
for _, tt := range tests {
c.Run(tt.name, func(c *qt.C) {
got := testFunc(c, tt.args.path, tt.args.include)
switch v := got.(type) {
case float64:
c.Assert(v, qt.Equals, float64(tt.want.vN))
case imagemeta.Rat[uint32]:
r, err := imagemeta.NewRat[uint32](uint32(tt.want.vN), uint32(tt.want.vD))
c.Assert(err, qt.IsNil)
c.Assert(v, eq, r)
default:
c.Fatalf("unexpected type: %T", got)
}
})
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/exif/exif.go | resources/images/exif/exif.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package exif
import (
"fmt"
"io"
"regexp"
"strconv"
"strings"
"time"
"github.com/bep/imagemeta"
"github.com/bep/logg"
"github.com/bep/tmc"
)
// ExifInfo holds the decoded Exif data for an Image.
type ExifInfo struct {
// GPS latitude in degrees.
Lat float64
// GPS longitude in degrees.
Long float64
// Image creation date/time.
Date time.Time
// A collection of the available Exif tags for this Image.
Tags Tags
}
type Decoder struct {
includeFieldsRe *regexp.Regexp
excludeFieldsrRe *regexp.Regexp
noDate bool
noLatLong bool
warnl logg.LevelLogger
}
func (d *Decoder) shouldInclude(s string) bool {
return (d.includeFieldsRe == nil || d.includeFieldsRe.MatchString(s))
}
func (d *Decoder) shouldExclude(s string) bool {
return d.excludeFieldsrRe != nil && d.excludeFieldsrRe.MatchString(s)
}
func IncludeFields(expression string) func(*Decoder) error {
return func(d *Decoder) error {
re, err := compileRegexp(expression)
if err != nil {
return err
}
d.includeFieldsRe = re
return nil
}
}
func ExcludeFields(expression string) func(*Decoder) error {
return func(d *Decoder) error {
re, err := compileRegexp(expression)
if err != nil {
return err
}
d.excludeFieldsrRe = re
return nil
}
}
func WithLatLongDisabled(disabled bool) func(*Decoder) error {
return func(d *Decoder) error {
d.noLatLong = disabled
return nil
}
}
func WithDateDisabled(disabled bool) func(*Decoder) error {
return func(d *Decoder) error {
d.noDate = disabled
return nil
}
}
func WithWarnLogger(warnl logg.LevelLogger) func(*Decoder) error {
return func(d *Decoder) error {
d.warnl = warnl
return nil
}
}
func compileRegexp(expression string) (*regexp.Regexp, error) {
expression = strings.TrimSpace(expression)
if expression == "" {
return nil, nil
}
if !strings.HasPrefix(expression, "(") {
// Make it case insensitive
expression = "(?i)" + expression
}
return regexp.Compile(expression)
}
func NewDecoder(options ...func(*Decoder) error) (*Decoder, error) {
d := &Decoder{}
for _, opt := range options {
if err := opt(d); err != nil {
return nil, err
}
}
return d, nil
}
var (
isTimeTag = func(s string) bool {
return strings.Contains(s, "Time")
}
isGPSTag = func(s string) bool {
return strings.HasPrefix(s, "GPS")
}
)
// Filename is only used for logging.
func (d *Decoder) Decode(filename string, format imagemeta.ImageFormat, r io.Reader) (ex *ExifInfo, err error) {
defer func() {
if r := recover(); r != nil {
err = fmt.Errorf("exif failed: %v", r)
}
}()
var tagInfos imagemeta.Tags
handleTag := func(ti imagemeta.TagInfo) error {
tagInfos.Add(ti)
return nil
}
shouldInclude := func(ti imagemeta.TagInfo) bool {
if ti.Source == imagemeta.EXIF {
if !d.noDate {
// We need the time tags to calculate the date.
if isTimeTag(ti.Tag) {
return true
}
}
if !d.noLatLong {
// We need to GPS tags to calculate the lat/long.
if isGPSTag(ti.Tag) {
return true
}
}
if !strings.HasPrefix(ti.Namespace, "IFD0") {
// Drop thumbnail tags.
return false
}
}
if d.shouldExclude(ti.Tag) {
return false
}
return d.shouldInclude(ti.Tag)
}
var warnf func(string, ...any)
if d.warnl != nil {
// There should be very little warnings (fingers crossed!),
// but this will typically be unrecognized formats.
// To be able to possibly get rid of these warnings,
// we need to know what images are causing them.
warnf = func(format string, args ...any) {
format = fmt.Sprintf("%q: %s: ", filename, format)
d.warnl.Logf(format, args...)
}
}
err = imagemeta.Decode(
imagemeta.Options{
R: r.(io.ReadSeeker),
ImageFormat: format,
ShouldHandleTag: shouldInclude,
HandleTag: handleTag,
Sources: imagemeta.EXIF, // For now. TODO(bep)
Warnf: warnf,
},
)
var tm time.Time
var lat, long float64
if !d.noDate {
tm, _ = tagInfos.GetDateTime()
}
if !d.noLatLong {
lat, long, _ = tagInfos.GetLatLong()
}
tags := make(map[string]any)
for k, v := range tagInfos.All() {
if d.shouldExclude(k) {
continue
}
if !d.shouldInclude(k) {
continue
}
tags[k] = v.Value
}
ex = &ExifInfo{Lat: lat, Long: long, Date: tm, Tags: tags}
return
}
var tcodec *tmc.Codec
func init() {
newIntadapter := func(target any) tmc.Adapter {
var bitSize int
var isSigned bool
switch target.(type) {
case int:
bitSize = 0
isSigned = true
case int8:
bitSize = 8
isSigned = true
case int16:
bitSize = 16
isSigned = true
case int32:
bitSize = 32
isSigned = true
case int64:
bitSize = 64
isSigned = true
case uint:
bitSize = 0
case uint8:
bitSize = 8
case uint16:
bitSize = 16
case uint32:
bitSize = 32
case uint64:
bitSize = 64
}
intFromString := func(s string) (any, error) {
if bitSize == 0 {
return strconv.Atoi(s)
}
var v any
var err error
if isSigned {
v, err = strconv.ParseInt(s, 10, bitSize)
} else {
v, err = strconv.ParseUint(s, 10, bitSize)
}
if err != nil {
return 0, err
}
if isSigned {
i := v.(int64)
switch target.(type) {
case int:
return int(i), nil
case int8:
return int8(i), nil
case int16:
return int16(i), nil
case int32:
return int32(i), nil
case int64:
return i, nil
}
}
i := v.(uint64)
switch target.(type) {
case uint:
return uint(i), nil
case uint8:
return uint8(i), nil
case uint16:
return uint16(i), nil
case uint32:
return uint32(i), nil
case uint64:
return i, nil
}
return 0, fmt.Errorf("unsupported target type %T", target)
}
intToString := func(v any) (string, error) {
return fmt.Sprintf("%d", v), nil
}
return tmc.NewAdapter(target, intFromString, intToString)
}
ru, _ := imagemeta.NewRat[uint32](1, 2)
ri, _ := imagemeta.NewRat[int32](1, 2)
tmcAdapters := []tmc.Adapter{
tmc.NewAdapter(ru, nil, nil),
tmc.NewAdapter(ri, nil, nil),
newIntadapter(int(1)),
newIntadapter(int8(1)),
newIntadapter(int16(1)),
newIntadapter(int32(1)),
newIntadapter(int64(1)),
newIntadapter(uint(1)),
newIntadapter(uint8(1)),
newIntadapter(uint16(1)),
newIntadapter(uint32(1)),
newIntadapter(uint64(1)),
}
tmcAdapters = append(tmc.DefaultTypeAdapters, tmcAdapters...)
var err error
tcodec, err = tmc.New(tmc.WithTypeAdapters(tmcAdapters))
if err != nil {
panic(err)
}
}
// Tags is a map of EXIF tags.
type Tags map[string]any
// UnmarshalJSON is for internal use only.
func (v *Tags) UnmarshalJSON(b []byte) error {
vv := make(map[string]any)
if err := tcodec.Unmarshal(b, &vv); err != nil {
return err
}
*v = vv
return nil
}
// MarshalJSON is for internal use only.
func (v Tags) MarshalJSON() ([]byte, error) {
return tcodec.Marshal(v)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/resources/images/imagetesting/testing.go | resources/images/imagetesting/testing.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package imagetesting
import (
"image"
"io/fs"
"os"
"path/filepath"
"runtime"
"testing"
qt "github.com/frankban/quicktest"
"github.com/google/go-cmp/cmp"
"github.com/disintegration/gift"
"github.com/gohugoio/hugo/common/hashing"
"github.com/gohugoio/hugo/common/himage"
"github.com/gohugoio/hugo/common/hugio"
"github.com/gohugoio/hugo/htesting"
"github.com/gohugoio/hugo/hugofs"
"github.com/gohugoio/hugo/hugolib"
)
var eq = qt.CmpEquals(
cmp.Comparer(func(p1, p2 os.FileInfo) bool {
return p1.Name() == p2.Name() && p1.Size() == p2.Size() && p1.IsDir() == p2.IsDir()
}),
cmp.Comparer(func(d1, d2 fs.DirEntry) bool {
p1, err1 := d1.Info()
p2, err2 := d2.Info()
if err1 != nil || err2 != nil {
return false
}
return p1.Name() == p2.Name() && p1.Size() == p2.Size() && p1.IsDir() == p2.IsDir()
}),
)
// GoldenImageTestOpts provides options for a golden image test.
type GoldenImageTestOpts struct {
// The test.
T testing.TB
// Name of the test. Will be used as the base folder for generated images.
// Slashes allowed and encouraged.
Name string
// The test site's files in txttar format.
Files string
// Set to true to write golden files to disk.
WriteFiles bool
// If not set, a temporary directory will be created.
WorkingDir string
// Set to true to print the temp dir used and keep it after the test.
PrintAndKeepTempDir bool
// Set to true to skip any assertions. Useful when adding new golden variants to a test.
DevMode bool
// Set to skip any assertions.
SkipAssertions bool
// Whether this represents a rebuild of the same site.
// Setting this to true will keep the previous golden image set.
Rebuild bool
}
// To rebuild all Golden image tests, toggle WriteFiles=true and run:
// GOARCH=amd64 go test -count 1 -timeout 30s -run "^TestImagesGolden" ./...
// TODO(bep) see if we can do this via flags.
var DefaultGoldenOpts = GoldenImageTestOpts{
WriteFiles: false,
DevMode: false,
}
func RunGolden(opts GoldenImageTestOpts) *hugolib.IntegrationTestBuilder {
opts.T.Helper()
c := hugolib.Test(opts.T, opts.Files, hugolib.TestOptWithConfig(func(conf *hugolib.IntegrationTestConfig) {
conf.NeedsOsFS = true
conf.WorkingDir = opts.WorkingDir
conf.PrintAndKeepTempDir = opts.PrintAndKeepTempDir
}))
codec := c.H.ResourceSpec.Imaging.Codec
c.AssertFileContent("public/index.html", "Home.")
outputDir := filepath.Join(c.H.Conf.WorkingDir(), "public", "images")
goldenBaseDir := filepath.Join("testdata", "images_golden")
goldenDir := filepath.Join(goldenBaseDir, filepath.FromSlash(opts.Name))
if opts.WriteFiles {
c.Assert(htesting.IsRealCI(), qt.IsFalse)
if !opts.Rebuild {
c.Assert(os.MkdirAll(goldenBaseDir, 0o777), qt.IsNil)
c.Assert(os.RemoveAll(goldenDir), qt.IsNil)
}
c.Assert(hugio.CopyDir(hugofs.Os, outputDir, goldenDir, nil), qt.IsNil)
return c
}
if opts.SkipAssertions {
return c
}
if opts.DevMode {
c.Assert(htesting.IsRealCI(), qt.IsFalse)
return c
}
shouldSkip := func(d fs.DirEntry) bool {
if runtime.GOARCH == "arm64" {
// TODO(bep) figure out why this fails on arm64. I have inspected the images, and they look identical.
if d.Name() == "giphy_hu_bb052284cc220165.webp" {
c.Logf("skipping %s on %s", d.Name(), runtime.GOARCH)
return true
}
}
return false
}
decodeAll := func(f *os.File) []image.Image {
c.Helper()
var images []image.Image
v, err := codec.Decode(f)
c.Assert(err, qt.IsNil, qt.Commentf(f.Name()))
if anim, ok := v.(himage.AnimatedImage); ok {
images = anim.GetFrames()
} else {
images = append(images, v)
}
return images
}
entries1, err := os.ReadDir(outputDir)
c.Assert(err, qt.IsNil)
entries2, err := os.ReadDir(goldenDir)
c.Assert(err, qt.IsNil)
c.Assert(len(entries1), qt.Equals, len(entries2))
for i, e1 := range entries1 {
if shouldSkip != nil && shouldSkip(e1) {
continue
}
c.Assert(filepath.Ext(e1.Name()), qt.Not(qt.Equals), "")
func() {
e2 := entries2[i]
f1, err := os.Open(filepath.Join(outputDir, e1.Name()))
c.Assert(err, qt.IsNil)
defer f1.Close()
f2, err := os.Open(filepath.Join(goldenDir, e2.Name()))
c.Assert(err, qt.IsNil)
defer f2.Close()
imgs2 := decodeAll(f2)
imgs1 := decodeAll(f1)
c.Assert(len(imgs1), qt.Equals, len(imgs2))
if !UsesFMA {
c.Assert(e1, eq, e2)
_, err = f1.Seek(0, 0)
c.Assert(err, qt.IsNil)
_, err = f2.Seek(0, 0)
c.Assert(err, qt.IsNil)
hash1, _, err := hashing.XXHashFromReader(f1)
c.Assert(err, qt.IsNil)
hash2, _, err := hashing.XXHashFromReader(f2)
c.Assert(err, qt.IsNil)
c.Assert(hash1, qt.Equals, hash2)
}
for i, img1 := range imgs1 {
img2 := imgs2[i]
nrgba1 := image.NewNRGBA(img1.Bounds())
gift.New().Draw(nrgba1, img1)
nrgba2 := image.NewNRGBA(img2.Bounds())
gift.New().Draw(nrgba2, img2)
c.Assert(goldenEqual(nrgba1, nrgba2), qt.Equals, true, qt.Commentf(e1.Name()))
}
}()
}
return c
}
// goldenEqual compares two NRGBA images. It is used in golden tests only.
// A small tolerance is allowed on architectures using "fused multiply and add"
// (FMA) instruction to accommodate for floating-point rounding differences
// with control golden images that were generated on amd64 architecture.
// See https://golang.org/ref/spec#Floating_point_operators
// and https://github.com/gohugoio/hugo/issues/6387 for more information.
//
// Based on https://github.com/disintegration/gift/blob/a999ff8d5226e5ab14b64a94fca07c4ac3f357cf/gift_test.go#L598-L625
// Copyright (c) 2014-2019 Grigory Dryapak
// Licensed under the MIT License.
func goldenEqual(img1, img2 *image.NRGBA) bool {
maxDiff := 0
if runtime.GOARCH != "amd64" {
// The golden files are created using the AMD64 architecture.
// Be lenient on other platforms due to floaging point and dithering differences.
maxDiff = 15
}
if !img1.Rect.Eq(img2.Rect) {
return false
}
if len(img1.Pix) != len(img2.Pix) {
return false
}
for i := range img1.Pix {
diff := int(img1.Pix[i]) - int(img2.Pix[i])
if diff < 0 {
diff = -diff
}
if diff > maxDiff {
return false
}
}
return true
}
// We don't have a CI test environment for these, and there are known dithering issues that makes these time consuming to maintain.
var SkipGoldenTests = runtime.GOARCH == "ppc64" || runtime.GOARCH == "ppc64le" || runtime.GOARCH == "s390x"
// UsesFMA indicates whether "fused multiply and add" (FMA) instruction is
// used. The command "grep FMADD go/test/codegen/floats.go" can help keep
// the FMA-using architecture list updated.
var UsesFMA = runtime.GOARCH == "s390x" ||
runtime.GOARCH == "ppc64" ||
runtime.GOARCH == "ppc64le" ||
runtime.GOARCH == "arm64" ||
runtime.GOARCH == "riscv64" ||
runtime.GOARCH == "loong64"
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/frontmatter_test.go | parser/frontmatter_test.go | // Copyright 2015 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package parser
import (
"bytes"
"reflect"
"testing"
"github.com/gohugoio/hugo/parser/metadecoders"
)
func TestInterfaceToConfig(t *testing.T) {
cases := []struct {
input any
format metadecoders.Format
want []byte
isErr bool
}{
// TOML
{map[string]any{}, metadecoders.TOML, nil, false},
{
map[string]any{"title": "test' 1"},
metadecoders.TOML,
[]byte("title = \"test' 1\"\n"),
false,
},
// YAML
{map[string]any{}, metadecoders.YAML, []byte("{}\n"), false},
{
map[string]any{"title": "test 1"},
metadecoders.YAML,
[]byte("title: test 1\n"),
false,
},
// JSON
{map[string]any{}, metadecoders.JSON, []byte("{}\n"), false},
{
map[string]any{"title": "test 1"},
metadecoders.JSON,
[]byte("{\n \"title\": \"test 1\"\n}\n"),
false,
},
// Errors
{nil, metadecoders.TOML, nil, true},
{map[string]any{}, "foo", nil, true},
}
for i, c := range cases {
var buf bytes.Buffer
err := InterfaceToConfig(c.input, c.format, &buf)
if err != nil {
if c.isErr {
continue
}
t.Fatalf("[%d] unexpected error value: %v", i, err)
}
if !reflect.DeepEqual(buf.Bytes(), c.want) {
t.Errorf("[%d] not equal:\nwant %q,\n got %q", i, c.want, buf.Bytes())
}
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/lowercase_camel_json_test.go | parser/lowercase_camel_json_test.go | package parser
import (
"testing"
qt "github.com/frankban/quicktest"
)
func TestReplacingJSONMarshaller(t *testing.T) {
c := qt.New(t)
m := map[string]any{
"foo": "bar",
"baz": 42,
"zeroInt1": 0,
"zeroInt2": 0,
"zeroFloat": 0.0,
"zeroString": "",
"zeroBool": false,
"nil": nil,
}
marshaller := ReplacingJSONMarshaller{
Value: m,
KeysToLower: true,
OmitEmpty: true,
}
b, err := marshaller.MarshalJSON()
c.Assert(err, qt.IsNil)
c.Assert(string(b), qt.Equals, `{"baz":42,"foo":"bar"}`)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/lowercase_camel_json.go | parser/lowercase_camel_json.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package parser
import (
"bytes"
"encoding/json"
"regexp"
"unicode"
"unicode/utf8"
"github.com/gohugoio/hugo/common/hreflect"
)
// Regexp definitions
var (
keyMatchRegex = regexp.MustCompile(`\"(\w+)\":`)
nullEnableBoolRegex = regexp.MustCompile(`\"(enable\w+)\":null`)
)
type NullBoolJSONMarshaller struct {
Wrapped json.Marshaler
}
func (c NullBoolJSONMarshaller) MarshalJSON() ([]byte, error) {
b, err := c.Wrapped.MarshalJSON()
if err != nil {
return nil, err
}
return nullEnableBoolRegex.ReplaceAll(b, []byte(`"$1": false`)), nil
}
// Code adapted from https://gist.github.com/piersy/b9934790a8892db1a603820c0c23e4a7
type LowerCaseCamelJSONMarshaller struct {
Value any
}
var preserveUpperCaseKeyRe = regexp.MustCompile(`^"HTTP`)
func preserveUpperCaseKey(match []byte) bool {
return preserveUpperCaseKeyRe.Match(match)
}
func (c LowerCaseCamelJSONMarshaller) MarshalJSON() ([]byte, error) {
marshalled, err := json.Marshal(c.Value)
converted := keyMatchRegex.ReplaceAllFunc(
marshalled,
func(match []byte) []byte {
// Attributes on the form XML, JSON etc.
if bytes.Equal(match, bytes.ToUpper(match)) {
return bytes.ToLower(match)
}
// Empty keys are valid JSON, only lowercase if we do not have an
// empty key.
if len(match) > 2 && !preserveUpperCaseKey(match) {
// Decode first rune after the double quotes
r, width := utf8.DecodeRune(match[1:])
r = unicode.ToLower(r)
utf8.EncodeRune(match[1:width+1], r)
}
return match
},
)
return converted, err
}
type ReplacingJSONMarshaller struct {
Value any
KeysToLower bool
OmitEmpty bool
}
func (c ReplacingJSONMarshaller) MarshalJSON() ([]byte, error) {
converted, err := json.Marshal(c.Value)
if c.KeysToLower {
converted = keyMatchRegex.ReplaceAllFunc(
converted,
func(match []byte) []byte {
return bytes.ToLower(match)
},
)
}
if c.OmitEmpty {
// It's tricky to do this with a regexp, so convert it to a map, remove zero values and convert back.
var m map[string]any
err = json.Unmarshal(converted, &m)
if err != nil {
return nil, err
}
var removeZeroVAlues func(m map[string]any)
removeZeroVAlues = func(m map[string]any) {
for k, v := range m {
if !hreflect.IsMap(v) && !hreflect.IsTruthful(v) {
delete(m, k)
} else {
switch vv := v.(type) {
case map[string]any:
removeZeroVAlues(vv)
case []any:
for _, vvv := range vv {
if m, ok := vvv.(map[string]any); ok {
removeZeroVAlues(m)
}
}
}
}
}
}
removeZeroVAlues(m)
converted, err = json.Marshal(m)
}
return converted, err
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/frontmatter.go | parser/frontmatter.go | // Copyright 2015 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package parser
import (
"encoding/json"
"errors"
"io"
"github.com/gohugoio/hugo/parser/metadecoders"
toml "github.com/pelletier/go-toml/v2"
xml "github.com/clbanning/mxj/v2"
)
const (
yamlDelimLf = "---\n"
tomlDelimLf = "+++\n"
)
func InterfaceToConfig(in any, format metadecoders.Format, w io.Writer) error {
if in == nil {
return errors.New("input was nil")
}
switch format {
case metadecoders.YAML:
b, err := metadecoders.MarshalYAML(in)
if err != nil {
return err
}
_, err = w.Write(b)
return err
case metadecoders.TOML:
enc := toml.NewEncoder(w)
enc.SetIndentTables(true)
return enc.Encode(in)
case metadecoders.JSON:
b, err := json.MarshalIndent(in, "", " ")
if err != nil {
return err
}
_, err = w.Write(b)
if err != nil {
return err
}
_, err = w.Write([]byte{'\n'})
return err
case metadecoders.XML:
b, err := xml.AnyXmlIndent(in, "", "\t", "root")
if err != nil {
return err
}
_, err = w.Write(b)
return err
default:
return errors.New("unsupported Format provided")
}
}
func InterfaceToFrontMatter(in any, format metadecoders.Format, w io.Writer) error {
if in == nil {
return errors.New("input was nil")
}
switch format {
case metadecoders.YAML:
_, err := w.Write([]byte(yamlDelimLf))
if err != nil {
return err
}
err = InterfaceToConfig(in, format, w)
if err != nil {
return err
}
_, err = w.Write([]byte(yamlDelimLf))
return err
case metadecoders.TOML:
_, err := w.Write([]byte(tomlDelimLf))
if err != nil {
return err
}
err = InterfaceToConfig(in, format, w)
if err != nil {
return err
}
_, err = w.Write([]byte(tomlDelimLf))
return err
default:
return InterfaceToConfig(in, format, w)
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/metadecoders/decoder.go | parser/metadecoders/decoder.go | // Copyright 2025 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package metadecoders
import (
"bytes"
"encoding/csv"
"encoding/json"
"fmt"
"log"
"regexp"
"strconv"
"strings"
"unsafe"
"github.com/gohugoio/hugo/common/herrors"
"github.com/gohugoio/hugo/common/maps"
"github.com/niklasfasching/go-org/org"
xml "github.com/clbanning/mxj/v2"
yaml "github.com/goccy/go-yaml"
toml "github.com/pelletier/go-toml/v2"
"github.com/spf13/afero"
"github.com/spf13/cast"
)
// Decoder provides some configuration options for the decoders.
type Decoder struct {
// Format specifies a specific format to decode from. If empty or
// unspecified, it's inferred from the contents or the filename.
Format string
// Delimiter is the field delimiter. Used in the CSV decoder. Default is
// ','.
Delimiter rune
// Comment, if not 0, is the comment character. Lines beginning with the
// Comment character without preceding whitespace are ignored. Used in the
// CSV decoder.
Comment rune
// If true, a quote may appear in an unquoted field and a non-doubled quote
// may appear in a quoted field. Used in the CSV decoder. Default is false.
LazyQuotes bool
// The target data type, either slice or map. Used in the CSV decoder.
// Default is slice.
TargetType string
}
// OptionsKey is used in cache keys.
func (d Decoder) OptionsKey() string {
var sb strings.Builder
sb.WriteString(d.Format)
sb.WriteRune(d.Delimiter)
sb.WriteRune(d.Comment)
sb.WriteString(strconv.FormatBool(d.LazyQuotes))
sb.WriteString(d.TargetType)
return sb.String()
}
// Default is a Decoder in its default configuration.
var Default = Decoder{
Delimiter: ',',
TargetType: "slice",
}
// UnmarshalToMap will unmarshall data in format f into a new map. This is
// what's needed for Hugo's front matter decoding.
func (d Decoder) UnmarshalToMap(data []byte, f Format) (map[string]any, error) {
m := make(map[string]any)
if data == nil {
return m, nil
}
err := d.UnmarshalTo(data, f, &m)
if m == nil {
// We migrated to github.com/goccy/go-yaml in v0.152.0,
// which produces nil maps for empty YAML files (and empty map nodes), unlike gopkg.in/yaml.v2.
//
// To prevent crashes when trying to handle empty config files etc., we ensure we always return a non-nil map here.
// See issue 14074.
m = make(map[string]any)
}
return m, err
}
// UnmarshalFileToMap is the same as UnmarshalToMap, but reads the data from
// the given filename.
func (d Decoder) UnmarshalFileToMap(fs afero.Fs, filename string) (map[string]any, error) {
format := FormatFromString(filename)
if format == "" {
return nil, fmt.Errorf("%q is not a valid configuration format", filename)
}
data, err := afero.ReadFile(fs, filename)
if err != nil {
return nil, err
}
return d.UnmarshalToMap(data, format)
}
// UnmarshalStringTo tries to unmarshal data to a new instance of type typ.
func (d Decoder) UnmarshalStringTo(data string, typ any) (any, error) {
data = strings.TrimSpace(data)
// We only check for the possible types in YAML, JSON and TOML.
switch typ.(type) {
case string:
return data, nil
case map[string]any, maps.Params:
format := d.FormatFromContentString(data)
return d.UnmarshalToMap([]byte(data), format)
case []any:
// A standalone slice. Let YAML handle it.
return d.Unmarshal([]byte(data), YAML)
case bool:
return cast.ToBoolE(data)
case int:
return cast.ToIntE(data)
case int64:
return cast.ToInt64E(data)
case float64:
return cast.ToFloat64E(data)
default:
return nil, fmt.Errorf("unmarshal: %T not supported", typ)
}
}
// Unmarshal will unmarshall data in format f into an interface{}.
// This is what's needed for Hugo's /data handling.
func (d Decoder) Unmarshal(data []byte, f Format) (any, error) {
if len(data) == 0 {
switch f {
case CSV:
switch d.TargetType {
case "map":
return make(map[string]any), nil
case "slice":
return make([][]string, 0), nil
default:
return nil, fmt.Errorf("invalid targetType: expected either slice or map, received %s", d.TargetType)
}
default:
return make(map[string]any), nil
}
}
var v any
err := d.UnmarshalTo(data, f, &v)
return v, err
}
// UnmarshalYaml unmarshals data in YAML format into v.
func UnmarshalYaml(data []byte, v any) error {
if err := yaml.Unmarshal(data, v); err != nil {
return err
}
if err := validateAliasLimitForCollections(v, calculateCollectionAliasLimit(len(data))); err != nil {
return err
}
return nil
}
// The Billion Laughs YAML example is about 500 bytes in size,
// but even halving that when converted to JSON would produce a file of about 4 MB in size,
// which, when repeated enough times, could be disruptive.
// For large data files where every row shares a common map via aliases,
// a large number of aliases could make sense.
// The primary goal here is to catch the small but malicious files.
func calculateCollectionAliasLimit(sizeInBytes int) int {
sizeInKB := sizeInBytes / 1024
if sizeInKB == 0 {
sizeInKB = 1
}
if sizeInKB < 2 {
// This should allow at most "thousand laughs",
// which should be plenty of room for legitimate uses.
return 100
}
// The numbers below are somewhat arbitrary, but should provide
// a reasonable trade-off between safety and usability.
if sizeInKB < 10 {
return 5000
}
return 10000
}
// Used in benchmarks.
func unmarshalYamlNoValidation(data []byte, v any) error {
if err := yaml.Unmarshal(data, v); err != nil {
return err
}
return nil
}
// See https://github.com/goccy/go-yaml/issues/461
// While it's true that yaml.Unmarshal isn't vulnerable to the Billion Laughs attack,
// we can easily get a delayed laughter when we try to render this very big structure later,
// e.g. via RenderString.
func validateAliasLimitForCollections(v any, limit int) error {
if limit <= 0 {
limit = 1000
}
collectionRefCounts := make(map[uintptr]int)
checkCollectionRef := func(v *any) error {
// Conversion of a Pointer to a uintptr (but not back to Pointer) is considered safe.
// See https://pkg.go.dev/unsafe#pkg-functions
ptr := uintptr(unsafe.Pointer(v))
if ptr == 0 {
return nil
}
collectionRefCounts[ptr]++
if collectionRefCounts[ptr] > limit {
return fmt.Errorf("too many YAML aliases for non-scalar nodes")
}
return nil
}
var validate func(v any) error
validate = func(v any) error {
switch vv := v.(type) {
case *map[string]any:
if err := checkCollectionRef(&v); err != nil {
return err
}
for _, vvv := range *vv {
if err := validate(vvv); err != nil {
return err
}
}
case map[string]any:
if err := checkCollectionRef(&v); err != nil {
return err
}
for _, vvv := range vv {
if err := validate(vvv); err != nil {
return err
}
}
case []any:
if err := checkCollectionRef(&v); err != nil {
return err
}
for _, vvv := range vv {
if err := validate(vvv); err != nil {
return err
}
}
case *any:
return validate(*vv)
default:
// ok
}
return nil
}
return validate(v)
}
// UnmarshalTo unmarshals data in format f into v.
func (d Decoder) UnmarshalTo(data []byte, f Format, v any) error {
var err error
switch f {
case ORG:
err = d.unmarshalORG(data, v)
case JSON:
err = json.Unmarshal(data, v)
case XML:
var xmlRoot xml.Map
xmlRoot, err = xml.NewMapXml(data)
var xmlValue map[string]any
if err == nil {
xmlRootName, err := xmlRoot.Root()
if err != nil {
return toFileError(f, data, fmt.Errorf("failed to unmarshal XML: %w", err))
}
// Get the root value and verify it's a map
rootValue := xmlRoot[xmlRootName]
if rootValue == nil {
return toFileError(f, data, fmt.Errorf("XML root element '%s' has no value", xmlRootName))
}
// Type check before conversion
mapValue, ok := rootValue.(map[string]any)
if !ok {
return toFileError(f, data, fmt.Errorf("XML root element '%s' must be a map/object, got %T", xmlRootName, rootValue))
}
xmlValue = mapValue
}
switch v := v.(type) {
case *map[string]any:
*v = xmlValue
case *any:
*v = xmlValue
}
case TOML:
err = toml.Unmarshal(data, v)
case YAML:
return UnmarshalYaml(data, v)
case CSV:
return d.unmarshalCSV(data, v)
default:
return fmt.Errorf("unmarshal of format %q is not supported", f)
}
if err == nil {
return nil
}
return toFileError(f, data, fmt.Errorf("unmarshal failed: %w", err))
}
func (d Decoder) unmarshalCSV(data []byte, v any) error {
r := csv.NewReader(bytes.NewReader(data))
r.Comma = d.Delimiter
r.Comment = d.Comment
r.LazyQuotes = d.LazyQuotes
records, err := r.ReadAll()
if err != nil {
return err
}
switch vv := v.(type) {
case *any:
switch d.TargetType {
case "map":
if len(records) < 2 {
return fmt.Errorf("cannot unmarshal CSV into %T: expected at least a header row and one data row", v)
}
seen := make(map[string]bool, len(records[0]))
for _, fieldName := range records[0] {
if seen[fieldName] {
return fmt.Errorf("cannot unmarshal CSV into %T: header row contains duplicate field names", v)
}
seen[fieldName] = true
}
sm := make([]map[string]string, len(records)-1)
for i, record := range records[1:] {
m := make(map[string]string, len(records[0]))
for j, col := range record {
m[records[0][j]] = col
}
sm[i] = m
}
*vv = sm
case "slice":
*vv = records
default:
return fmt.Errorf("cannot unmarshal CSV into %T: invalid targetType: expected either slice or map, received %s", v, d.TargetType)
}
default:
return fmt.Errorf("cannot unmarshal CSV into %T", v)
}
return nil
}
func parseORGDate(s string) string {
r := regexp.MustCompile(`[<\[](\d{4}-\d{2}-\d{2}) .*[>\]]`)
if m := r.FindStringSubmatch(s); m != nil {
return m[1]
}
return s
}
func (d Decoder) unmarshalORG(data []byte, v any) error {
config := org.New()
config.Log = log.Default() // TODO(bep)
document := config.Parse(bytes.NewReader(data), "")
if document.Error != nil {
return document.Error
}
frontMatter := make(map[string]any, len(document.BufferSettings))
for k, v := range document.BufferSettings {
k = strings.ToLower(k)
if strings.HasSuffix(k, "[]") {
frontMatter[k[:len(k)-2]] = strings.Fields(v)
} else if strings.Contains(v, "\n") {
frontMatter[k] = strings.Split(v, "\n")
} else if k == "filetags" {
trimmed := strings.TrimPrefix(v, ":")
trimmed = strings.TrimSuffix(trimmed, ":")
frontMatter[k] = strings.Split(trimmed, ":")
} else if k == "date" || k == "lastmod" || k == "publishdate" || k == "expirydate" {
frontMatter[k] = parseORGDate(v)
} else {
frontMatter[k] = v
}
}
switch vv := v.(type) {
case *map[string]any:
*vv = frontMatter
case *any:
*vv = frontMatter
}
return nil
}
func toFileError(f Format, data []byte, err error) error {
return herrors.NewFileErrorFromName(err, fmt.Sprintf("_stream.%s", f)).UpdateContent(bytes.NewReader(data), nil)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/metadecoders/format.go | parser/metadecoders/format.go | // Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package metadecoders
import (
"path/filepath"
"strings"
)
type Format string
const (
// These are the supported metadata formats in Hugo. Most of these are also
// supported as /data formats.
ORG Format = "org"
JSON Format = "json"
TOML Format = "toml"
YAML Format = "yaml"
CSV Format = "csv"
XML Format = "xml"
)
// FormatFromStrings returns the first non-empty Format from the given strings.
func FormatFromStrings(ss ...string) Format {
for _, s := range ss {
if f := FormatFromString(s); f != "" {
return f
}
}
return ""
}
// FormatFromString turns formatStr, typically a file extension without any ".",
// into a Format. It returns an empty string for unknown formats.
func FormatFromString(formatStr string) Format {
formatStr = strings.ToLower(formatStr)
if strings.Contains(formatStr, ".") {
// Assume a filename
formatStr = strings.TrimPrefix(filepath.Ext(formatStr), ".")
}
switch formatStr {
case "yaml", "yml":
return YAML
case "json":
return JSON
case "toml":
return TOML
case "org":
return ORG
case "csv":
return CSV
case "xml":
return XML
}
return ""
}
// FormatFromContentString tries to detect the format (JSON, YAML, TOML or XML)
// in the given string.
// It return an empty string if no format could be detected.
func (d Decoder) FormatFromContentString(data string) Format {
csvIdx := strings.IndexRune(data, d.Delimiter)
jsonIdx := strings.Index(data, "{")
yamlIdx := strings.Index(data, ":")
xmlIdx := strings.Index(data, "<")
tomlIdx := strings.Index(data, "=")
if isLowerIndexThan(csvIdx, jsonIdx, yamlIdx, xmlIdx, tomlIdx) {
return CSV
}
if isLowerIndexThan(jsonIdx, yamlIdx, xmlIdx, tomlIdx) {
return JSON
}
if isLowerIndexThan(yamlIdx, xmlIdx, tomlIdx) {
return YAML
}
if isLowerIndexThan(xmlIdx, tomlIdx) {
return XML
}
if tomlIdx != -1 {
return TOML
}
return ""
}
func isLowerIndexThan(first int, others ...int) bool {
if first == -1 {
return false
}
for _, other := range others {
if other != -1 && other < first {
return false
}
}
return true
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/metadecoders/decoder_integration_test.go | parser/metadecoders/decoder_integration_test.go | // Copyright 2025 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package metadecoders_test
import (
"testing"
"github.com/gohugoio/hugo/hugolib"
)
func TestYAMLIntegerSortIssue14078(t *testing.T) {
files := `
-- assets/mydata.yaml --
a:
weight: 1
x:
weight: 2
c:
weight: 3
t:
weight: 4
-- layouts/all.html --
{{ $mydata := resources.Get "mydata.yaml" | transform.Unmarshal }}
Type: {{ printf "%T" $mydata.a.weight }}|
Sorted: {{ sort $mydata "weight" }}|
`
b := hugolib.Test(t, files)
b.AssertFileContent("public/index.html", "Sorted: [map[weight:1] map[weight:2] map[weight:3] map[weight:4]]|")
}
func TestYAMLIntegerWhere(t *testing.T) {
files := `
-- assets/mydata.yaml --
a:
weight: 1
x:
weight: 2
c:
weight: 3
t:
weight: 4
-- assets/myslice.yaml --
- weight: 1
name: one
- weight: 2
name: two
- weight: 3
name: three
- weight: 4
name: four
-- layouts/all.html --
{{ $mydata1 := resources.Get "mydata.yaml" | transform.Unmarshal }}
{{ $myslice := resources.Get "myslice.yaml" | transform.Unmarshal }}
{{ $filtered := where $myslice "weight" "ge" $mydata1.x.weight }}
mydata1: {{ $mydata1 }}|
Filtered: {{ $filtered }}|
`
b := hugolib.Test(t, files)
b.AssertFileContent("public/index.html", "[map[name:two weight:2] map[name:three weight:3] map[name:four weight:4]]|")
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/metadecoders/format_test.go | parser/metadecoders/format_test.go | // Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package metadecoders
import (
"testing"
qt "github.com/frankban/quicktest"
)
func TestFormatFromString(t *testing.T) {
c := qt.New(t)
for _, test := range []struct {
s string
expect Format
}{
{"json", JSON},
{"yaml", YAML},
{"yml", YAML},
{"xml", XML},
{"toml", TOML},
{"config.toml", TOML},
{"tOMl", TOML},
{"org", ORG},
{"foo", ""},
} {
c.Assert(FormatFromString(test.s), qt.Equals, test.expect)
}
}
func TestFormatFromContentString(t *testing.T) {
t.Parallel()
c := qt.New(t)
for i, test := range []struct {
data string
expect any
}{
{`foo = "bar"`, TOML},
{` foo = "bar"`, TOML},
{`foo="bar"`, TOML},
{`foo: "bar"`, YAML},
{`foo:"bar"`, YAML},
{`{ "foo": "bar"`, JSON},
{`a,b,c"`, CSV},
{`<foo>bar</foo>"`, XML},
{`asdfasdf`, Format("")},
{``, Format("")},
} {
errMsg := qt.Commentf("[%d] %s", i, test.data)
result := Default.FormatFromContentString(test.data)
c.Assert(result, qt.Equals, test.expect, errMsg)
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/metadecoders/decoder_test.go | parser/metadecoders/decoder_test.go | // Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package metadecoders
import (
"testing"
qt "github.com/frankban/quicktest"
)
func TestUnmarshalXML(t *testing.T) {
c := qt.New(t)
xmlDoc := `<?xml version="1.0" encoding="utf-8" standalone="yes"?>
<rss version="2.0"
xmlns:atom="http://www.w3.org/2005/Atom">
<channel>
<title>Example feed</title>
<link>https://example.com/</link>
<description>Example feed</description>
<generator>Hugo -- gohugo.io</generator>
<language>en-us</language>
<copyright>Example</copyright>
<lastBuildDate>Fri, 08 Jan 2021 14:44:10 +0000</lastBuildDate>
<atom:link href="https://example.com/feed.xml" rel="self" type="application/rss+xml"/>
<item>
<title>Example title</title>
<link>https://example.com/2021/11/30/example-title/</link>
<pubDate>Tue, 30 Nov 2021 15:00:00 +0000</pubDate>
<guid>https://example.com/2021/11/30/example-title/</guid>
<description>Example description</description>
</item>
</channel>
</rss>`
expect := map[string]any{
"-atom": "http://www.w3.org/2005/Atom", "-version": "2.0",
"channel": map[string]any{
"copyright": "Example",
"description": "Example feed",
"generator": "Hugo -- gohugo.io",
"item": map[string]any{
"description": "Example description",
"guid": "https://example.com/2021/11/30/example-title/",
"link": "https://example.com/2021/11/30/example-title/",
"pubDate": "Tue, 30 Nov 2021 15:00:00 +0000",
"title": "Example title",
},
"language": "en-us",
"lastBuildDate": "Fri, 08 Jan 2021 14:44:10 +0000",
"link": []any{"https://example.com/", map[string]any{
"-href": "https://example.com/feed.xml",
"-rel": "self",
"-type": "application/rss+xml",
}},
"title": "Example feed",
},
}
d := Default
m, err := d.Unmarshal([]byte(xmlDoc), XML)
c.Assert(err, qt.IsNil)
c.Assert(m, qt.DeepEquals, expect)
}
func TestUnmarshalToMap(t *testing.T) {
c := qt.New(t)
expect := map[string]any{"a": "b"}
d := Default
for i, test := range []struct {
data string
format Format
expect any
}{
{`a = "b"`, TOML, expect},
{`a: "b"`, YAML, expect},
// Make sure we get all string keys, even for YAML
{"a: Easy!\nb:\n c: 2\n d: [3, 4]", YAML, map[string]any{"a": "Easy!", "b": map[string]any{"c": uint64(2), "d": []any{uint64(3), uint64(4)}}}},
{"a:\n true: 1\n false: 2", YAML, map[string]any{"a": map[string]any{"true": uint64(1), "false": uint64(2)}}},
{`{ "a": "b" }`, JSON, expect},
{`<root><a>b</a></root>`, XML, expect},
{`#+a: b`, ORG, expect},
// errors
{`a = b`, TOML, false},
{`a,b,c`, CSV, false}, // Use Unmarshal for CSV
{`<root>just a string</root>`, XML, false},
} {
msg := qt.Commentf("%d: %s", i, test.format)
m, err := d.UnmarshalToMap([]byte(test.data), test.format)
if b, ok := test.expect.(bool); ok && !b {
c.Assert(err, qt.Not(qt.IsNil), msg)
} else {
c.Assert(err, qt.IsNil, msg)
c.Assert(m, qt.DeepEquals, test.expect, msg)
}
}
}
func TestUnmarshalToInterface(t *testing.T) {
c := qt.New(t)
expect := map[string]any{"a": "b"}
d := Default
for i, test := range []struct {
data []byte
format Format
expect any
}{
{[]byte(`[ "Brecker", "Blake", "Redman" ]`), JSON, []any{"Brecker", "Blake", "Redman"}},
{[]byte(`{ "a": "b" }`), JSON, expect},
{[]byte(``), JSON, map[string]any{}},
{[]byte(nil), JSON, map[string]any{}},
{[]byte(`#+a: b`), ORG, expect},
{[]byte("#+a: foo bar\n#+a: baz"), ORG, map[string]any{"a": []string{string("foo bar"), string("baz")}}},
{[]byte(`#+DATE: <2020-06-26 Fri>`), ORG, map[string]any{"date": "2020-06-26"}},
{[]byte(`#+LASTMOD: <2020-06-26 Fri>`), ORG, map[string]any{"lastmod": "2020-06-26"}},
{[]byte(`#+FILETAGS: :work:`), ORG, map[string]any{"filetags": []string{"work"}}},
{[]byte(`#+FILETAGS: :work:fun:`), ORG, map[string]any{"filetags": []string{"work", "fun"}}},
{[]byte(`#+PUBLISHDATE: <2020-06-26 Fri>`), ORG, map[string]any{"publishdate": "2020-06-26"}},
{[]byte(`#+EXPIRYDATE: <2020-06-26 Fri>`), ORG, map[string]any{"expirydate": "2020-06-26"}},
{[]byte(`a = "b"`), TOML, expect},
{[]byte(`a: "b"`), YAML, expect},
{[]byte(`<root><a>b</a></root>`), XML, expect},
{[]byte(`a,b,c`), CSV, [][]string{{"a", "b", "c"}}},
{[]byte("a: Easy!\nb:\n c: 2\n d: [3, 4]"), YAML, map[string]any{"a": "Easy!", "b": map[string]any{"c": uint64(2), "d": []any{uint64(3), uint64(4)}}}},
// errors
{[]byte(`a = "`), TOML, false},
} {
msg := qt.Commentf("%d: %s", i, test.format)
m, err := d.Unmarshal(test.data, test.format)
if b, ok := test.expect.(bool); ok && !b {
c.Assert(err, qt.Not(qt.IsNil), msg)
} else {
c.Assert(err, qt.IsNil, msg)
c.Assert(m, qt.DeepEquals, test.expect, msg)
}
}
}
func TestUnmarshalStringTo(t *testing.T) {
c := qt.New(t)
d := Default
expectMap := map[string]any{"a": "b"}
for i, test := range []struct {
data string
to any
expect any
}{
{"a string", "string", "a string"},
{`{ "a": "b" }`, make(map[string]any), expectMap},
{"32", int64(1234), int64(32)},
{"32", int(1234), int(32)},
{"3.14159", float64(1), float64(3.14159)},
{"[3,7,9]", []any{}, []any{uint64(3), uint64(7), uint64(9)}},
{"[3.1,7.2,9.3]", []any{}, []any{3.1, 7.2, 9.3}},
} {
msg := qt.Commentf("%d: %T", i, test.to)
m, err := d.UnmarshalStringTo(test.data, test.to)
if b, ok := test.expect.(bool); ok && !b {
c.Assert(err, qt.Not(qt.IsNil), msg)
} else {
c.Assert(err, qt.IsNil, msg)
c.Assert(m, qt.DeepEquals, test.expect, msg)
}
}
}
func TestCalculateAliasLimit(t *testing.T) {
c := qt.New(t)
const kb = 1024
c.Assert(calculateCollectionAliasLimit(0), qt.Equals, 100)
c.Assert(calculateCollectionAliasLimit(500), qt.Equals, 100)
c.Assert(calculateCollectionAliasLimit(1*kb), qt.Equals, 100)
c.Assert(calculateCollectionAliasLimit(2*kb), qt.Equals, 5000)
c.Assert(calculateCollectionAliasLimit(8*kb), qt.Equals, 5000)
c.Assert(calculateCollectionAliasLimit(12*kb), qt.Equals, 10000)
c.Assert(calculateCollectionAliasLimit(10000*kb), qt.Equals, 10000)
}
func BenchmarkDecodeYAMLToMap(b *testing.B) {
d := Default
data := []byte(`
a:
v1: 32
v2: 43
v3: "foo"
b:
- a
- b
c: "d"
`)
for b.Loop() {
_, err := d.UnmarshalToMap(data, YAML)
if err != nil {
b.Fatal(err)
}
}
}
func BenchmarkUnmarshalBillionLaughs(b *testing.B) {
yamlBillionLaughs := []byte(`
a: &a [_, _, _, _, _, _, _, _, _, _, _, _, _, _, _]
b: &b [*a, *a, *a, *a, *a, *a, *a, *a, *a, *a]
c: &c [*b, *b, *b, *b, *b, *b, *b, *b, *b, *b]
d: &d [*c, *c, *c, *c, *c, *c, *c, *c, *c, *c]
e: &e [*d, *d, *d, *d, *d, *d, *d, *d, *d, *d]
f: &f [*e, *e, *e, *e, *e, *e, *e, *e, *e, *e]
g: &g [*f, *f, *f, *f, *f, *f, *f, *f, *f, *f]
h: &h [*g, *g, *g, *g, *g, *g, *g, *g, *g, *g]
i: &i [*h, *h, *h, *h, *h, *h, *h, *h, *h, *h]
`)
yamlFrontMatter := []byte(`
title: mysect
tags: [tag1, tag2]
params:
color: blue
`)
yamlTests := []struct {
Title string
Content []byte
IsExpectedToFailValidation bool
}{
{"Billion Laughs", yamlBillionLaughs, true},
{"YAML Front Matter", yamlFrontMatter, false},
}
for _, tt := range yamlTests {
b.Run(tt.Title+" no validation", func(b *testing.B) {
for b.Loop() {
var v any
if err := unmarshalYamlNoValidation(tt.Content, &v); err != nil {
b.Fatal(err)
}
}
})
b.Run(tt.Title+" with validation", func(b *testing.B) {
for b.Loop() {
var v any
err := UnmarshalYaml(tt.Content, &v)
if tt.IsExpectedToFailValidation {
if err == nil {
b.Fatal("expected to fail validation but did not")
}
} else {
if err != nil {
b.Fatal(err)
}
}
}
})
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/metadecoders/encoder.go | parser/metadecoders/encoder.go | // Copyright 2025 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package metadecoders
import yaml "github.com/goccy/go-yaml"
var yamlEncodeOptions = []yaml.EncodeOption{
yaml.UseSingleQuote(true),
// This prevents excessively large objects, see https://github.com/goccy/go-yaml/issues/461
yaml.WithSmartAnchor(),
}
// MarshalYAML marshals the given value to YAML.
var MarshalYAML = func(v any) ([]byte, error) {
return yaml.MarshalWithOptions(v, yamlEncodeOptions...)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/pageparser/pageparser_intro_test.go | parser/pageparser/pageparser_intro_test.go | // Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pageparser
import (
"fmt"
"strings"
"testing"
qt "github.com/frankban/quicktest"
)
type lexerTest struct {
name string
input string
items []typeText
err error
}
type typeText struct {
typ ItemType
text string
}
func nti(tp ItemType, val string) typeText {
return typeText{typ: tp, text: val}
}
var (
tstJSON = `{ "a": { "b": "\"Hugo\"}" } }`
tstFrontMatterTOML = nti(TypeFrontMatterTOML, "foo = \"bar\"\n")
tstFrontMatterYAML = nti(TypeFrontMatterYAML, "foo: \"bar\"\n")
tstFrontMatterYAMLCRLF = nti(TypeFrontMatterYAML, "foo: \"bar\"\r\n")
tstFrontMatterJSON = nti(TypeFrontMatterJSON, tstJSON+"\r\n")
tstSomeText = nti(tText, "\nSome text.\n")
tstSummaryDivider = nti(TypeLeadSummaryDivider, "<!--more-->\n")
tstNewline = nti(tText, "\n")
tstORG = `
#+TITLE: T1
#+AUTHOR: A1
#+DESCRIPTION: D1
`
tstFrontMatterORG = nti(TypeFrontMatterORG, tstORG)
)
var crLfReplacer = strings.NewReplacer("\r", "#", "\n", "$")
// TODO(bep) a way to toggle ORG mode vs the rest.
var frontMatterTests = []lexerTest{
{"empty", "", []typeText{tstEOF}, nil},
{"Byte order mark", "\ufeff\nSome text.\n", []typeText{nti(TypeIgnore, "\ufeff"), tstSomeText, tstEOF}, nil},
{"No front matter", "\nSome text.\n", []typeText{tstSomeText, tstEOF}, nil},
{"YAML front matter", "---\nfoo: \"bar\"\n---\n\nSome text.\n", []typeText{tstFrontMatterYAML, tstSomeText, tstEOF}, nil},
{"YAML empty front matter", "---\n---\n\nSome text.\n", []typeText{nti(TypeFrontMatterYAML, ""), tstSomeText, tstEOF}, nil},
// Note that we keep all bytes as they are, but we need to handle CRLF
{"YAML front matter CRLF", "---\r\nfoo: \"bar\"\r\n---\n\nSome text.\n", []typeText{tstFrontMatterYAMLCRLF, tstSomeText, tstEOF}, nil},
{"TOML front matter", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n", []typeText{tstFrontMatterTOML, tstSomeText, tstEOF}, nil},
{"JSON front matter", tstJSON + "\r\n\nSome text.\n", []typeText{tstFrontMatterJSON, tstSomeText, tstEOF}, nil},
{"ORG front matter", tstORG + "\nSome text.\n", []typeText{tstFrontMatterORG, tstSomeText, tstEOF}, nil},
{"Summary divider ORG", tstORG + "\nSome text.\n# more\nSome text.\n", []typeText{tstFrontMatterORG, tstSomeText, nti(TypeLeadSummaryDivider, "# more\n"), nti(tText, "Some text.\n"), tstEOF}, nil},
{"Summary divider", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n<!--more-->\nSome text.\n", []typeText{tstFrontMatterTOML, tstSomeText, tstSummaryDivider, nti(tText, "Some text.\n"), tstEOF}, nil},
{"Summary divider same line", "+++\nfoo = \"bar\"\n+++\n\nSome text.<!--more-->Some text.\n", []typeText{tstFrontMatterTOML, nti(tText, "\nSome text."), nti(TypeLeadSummaryDivider, "<!--more-->"), nti(tText, "Some text.\n"), tstEOF}, nil},
// https://github.com/gohugoio/hugo/issues/5402
{"Summary and shortcode, no space", "+++\nfoo = \"bar\"\n+++\n\nSome text.\n<!--more-->{{< sc1 >}}\nSome text.\n", []typeText{tstFrontMatterTOML, tstSomeText, nti(TypeLeadSummaryDivider, "<!--more-->"), tstLeftNoMD, tstSC1, tstRightNoMD, tstSomeText, tstEOF}, nil},
// https://github.com/gohugoio/hugo/issues/5464
{"Summary and shortcode only", "+++\nfoo = \"bar\"\n+++\n{{< sc1 >}}\n<!--more-->\n{{< sc2 >}}", []typeText{tstFrontMatterTOML, tstLeftNoMD, tstSC1, tstRightNoMD, tstNewline, tstSummaryDivider, tstLeftNoMD, tstSC2, tstRightNoMD, tstEOF}, nil},
}
func TestFrontMatter(t *testing.T) {
t.Parallel()
c := qt.New(t)
for i, test := range frontMatterTests {
items, err := collect([]byte(test.input), false, lexIntroSection)
if err != nil {
c.Assert(err, qt.Equals, test.err)
continue
} else {
c.Assert(test.err, qt.IsNil)
}
if !equal(test.input, items, test.items) {
got := itemsToString(items, []byte(test.input))
expected := testItemsToString(test.items)
c.Assert(got, qt.Equals, expected, qt.Commentf("Test %d: %s", i, test.name))
}
}
}
func itemsToString(items []Item, source []byte) string {
var sb strings.Builder
for i, item := range items {
var s string
if item.Err != nil {
s = item.Err.Error()
} else {
s = string(item.Val(source))
}
sb.WriteString(fmt.Sprintf("%s: %s\n", item.Type, s))
if i < len(items)-1 {
sb.WriteString("\n")
}
}
return crLfReplacer.Replace(sb.String())
}
func testItemsToString(items []typeText) string {
var sb strings.Builder
for i, item := range items {
sb.WriteString(fmt.Sprintf("%s: %s\n", item.typ, item.text))
if i < len(items)-1 {
sb.WriteString("\n")
}
}
return crLfReplacer.Replace(sb.String())
}
func collectWithConfig(input []byte, skipFrontMatter bool, stateStart stateFunc, cfg Config) (items []Item, err error) {
l := newPageLexer(input, stateStart, cfg)
l.run()
iter := NewIterator(l.items)
for {
if l.err != nil {
return nil, l.err
}
item := iter.Next()
items = append(items, item)
if item.Type == tEOF || item.Type == tError {
break
}
}
return
}
func collect(input []byte, skipFrontMatter bool, stateStart stateFunc) (items []Item, err error) {
var cfg Config
return collectWithConfig(input, skipFrontMatter, stateStart, cfg)
}
func collectStringMain(input string) ([]Item, error) {
return collect([]byte(input), true, lexMainSection)
}
// no positional checking, for now ...
func equal(source string, got []Item, expect []typeText) bool {
if len(got) != len(expect) {
return false
}
sourceb := []byte(source)
for k := range got {
g := got[k]
e := expect[k]
if g.Type != e.typ {
return false
}
var s string
if g.Err != nil {
s = g.Err.Error()
} else {
s = string(g.Val(sourceb))
}
if s != e.text {
return false
}
}
return true
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/pageparser/item_test.go | parser/pageparser/item_test.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pageparser
import (
"testing"
qt "github.com/frankban/quicktest"
)
func TestItemValTyped(t *testing.T) {
c := qt.New(t)
source := []byte("3.14")
c.Assert(Item{low: 0, high: len(source)}.ValTyped(source), qt.Equals, float64(3.14))
source = []byte(".14")
c.Assert(Item{low: 0, high: len(source)}.ValTyped(source), qt.Equals, float64(0.14))
source = []byte("314")
c.Assert(Item{low: 0, high: len(source)}.ValTyped(source), qt.Equals, 314)
source = []byte("314")
c.Assert(Item{low: 0, high: len(source), isString: true}.ValTyped(source), qt.Equals, "314")
source = []byte("314x")
c.Assert(Item{low: 0, high: len(source)}.ValTyped(source), qt.Equals, "314x")
source = []byte("314 ")
c.Assert(Item{low: 0, high: len(source)}.ValTyped(source), qt.Equals, "314 ")
source = []byte("true")
c.Assert(Item{low: 0, high: len(source)}.ValTyped(source), qt.Equals, true)
source = []byte("false")
c.Assert(Item{low: 0, high: len(source)}.ValTyped(source), qt.Equals, false)
source = []byte("falsex")
c.Assert(Item{low: 0, high: len(source)}.ValTyped(source), qt.Equals, "falsex")
source = []byte("xfalse")
c.Assert(Item{low: 0, high: len(source)}.ValTyped(source), qt.Equals, "xfalse")
source = []byte("truex")
c.Assert(Item{low: 0, high: len(source)}.ValTyped(source), qt.Equals, "truex")
source = []byte("xtrue")
c.Assert(Item{low: 0, high: len(source)}.ValTyped(source), qt.Equals, "xtrue")
}
func TestItemBoolMethods(t *testing.T) {
c := qt.New(t)
source := []byte(" shortcode ")
tests := []struct {
name string
item Item
source []byte
want bool
call func(Item, []byte) bool
}{
{
name: "IsText true",
item: Item{Type: tText},
call: func(i Item, _ []byte) bool { return i.IsText() },
want: true,
},
{
name: "IsIndentation false",
item: Item{Type: tText},
call: func(i Item, _ []byte) bool { return i.IsIndentation() },
want: false,
},
{
name: "IsShortcodeName",
item: Item{Type: tScName},
call: func(i Item, _ []byte) bool { return i.IsShortcodeName() },
want: true,
},
{
name: "IsNonWhitespace true",
item: Item{
Type: tText,
low: 2,
high: 11,
},
source: source,
call: func(i Item, src []byte) bool { return i.IsNonWhitespace(src) },
want: true,
},
{
name: "IsShortcodeParam false",
item: Item{Type: tScParamVal},
call: func(i Item, _ []byte) bool { return i.IsShortcodeParam() },
want: false,
},
{
name: "IsInlineShortcodeName",
item: Item{Type: tScNameInline},
call: func(i Item, _ []byte) bool { return i.IsInlineShortcodeName() },
want: true,
},
{
name: "IsLeftShortcodeDelim tLeftDelimScWithMarkup",
item: Item{Type: tLeftDelimScWithMarkup},
call: func(i Item, _ []byte) bool { return i.IsLeftShortcodeDelim() },
want: true,
},
{
name: "IsLeftShortcodeDelim tLeftDelimScNoMarkup",
item: Item{Type: tLeftDelimScNoMarkup},
call: func(i Item, _ []byte) bool { return i.IsLeftShortcodeDelim() },
want: true,
},
{
name: "IsRightShortcodeDelim tRightDelimScWithMarkup",
item: Item{Type: tRightDelimScWithMarkup},
call: func(i Item, _ []byte) bool { return i.IsRightShortcodeDelim() },
want: true,
},
{
name: "IsRightShortcodeDelim tRightDelimScNoMarkup",
item: Item{Type: tRightDelimScNoMarkup},
call: func(i Item, _ []byte) bool { return i.IsRightShortcodeDelim() },
want: true,
},
{
name: "IsShortcodeClose",
item: Item{Type: tScClose},
call: func(i Item, _ []byte) bool { return i.IsShortcodeClose() },
want: true,
},
{
name: "IsShortcodeParamVal",
item: Item{Type: tScParamVal},
call: func(i Item, _ []byte) bool { return i.IsShortcodeParamVal() },
want: true,
},
{
name: "IsShortcodeMarkupDelimiter tLeftDelimScWithMarkup",
item: Item{Type: tLeftDelimScWithMarkup},
call: func(i Item, _ []byte) bool { return i.IsShortcodeMarkupDelimiter() },
want: true,
},
{
name: "IsShortcodeMarkupDelimiter tRightDelimScWithMarkup",
item: Item{Type: tRightDelimScWithMarkup},
call: func(i Item, _ []byte) bool { return i.IsShortcodeMarkupDelimiter() },
want: true,
},
{
name: "IsFrontMatter TypeFrontMatterYAML",
item: Item{Type: TypeFrontMatterYAML},
call: func(i Item, _ []byte) bool { return i.IsFrontMatter() },
want: true,
},
{
name: "IsFrontMatter TypeFrontMatterTOML",
item: Item{Type: TypeFrontMatterTOML},
call: func(i Item, _ []byte) bool { return i.IsFrontMatter() },
want: true,
},
{
name: "IsFrontMatter TypeFrontMatterJSON",
item: Item{Type: TypeFrontMatterJSON},
call: func(i Item, _ []byte) bool { return i.IsFrontMatter() },
want: true,
},
{
name: "IsFrontMatter TypeFrontMatterORG",
item: Item{Type: TypeFrontMatterORG},
call: func(i Item, _ []byte) bool { return i.IsFrontMatter() },
want: true,
},
{
name: "IsDone tError",
item: Item{Type: tError},
call: func(i Item, _ []byte) bool { return i.IsDone() },
want: true,
},
{
name: "IsDone tEOF",
item: Item{Type: tEOF},
call: func(i Item, _ []byte) bool { return i.IsDone() },
want: true,
},
{
name: "IsEOF",
item: Item{Type: tEOF},
call: func(i Item, _ []byte) bool { return i.IsEOF() },
want: true,
},
{
name: "IsError",
item: Item{Type: tError},
call: func(i Item, _ []byte) bool { return i.IsError() },
want: true,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := tt.call(tt.item, tt.source)
c.Assert(got, qt.Equals, tt.want)
})
}
}
func TestItem_ToString(t *testing.T) {
c := qt.New(t)
source := []byte("src")
long := make([]byte, 100)
for i := range long {
long[i] = byte(i)
}
tests := []struct {
name string
item Item
source []byte
want string
call func(Item, []byte) string
}{
{
name: "EOF",
item: Item{Type: tEOF},
call: func(i Item, _ []byte) string { return i.ToString(source) },
want: "EOF",
},
{
name: "Error",
item: Item{Type: tError},
call: func(i Item, _ []byte) string { return i.ToString(source) },
want: "",
},
{
name: "Indentation",
item: Item{Type: tIndentation},
call: func(i Item, _ []byte) string { return i.ToString(source) },
want: "tIndentation:[]",
},
{
name: "Long",
item: Item{Type: tKeywordMarker + 1, low: 0, high: 100},
call: func(i Item, _ []byte) string { return i.ToString(long) },
want: "<" + string(long) + ">",
},
{
name: "Empty",
item: Item{Type: tKeywordMarker + 1},
call: func(i Item, _ []byte) string { return i.ToString([]byte("")) },
want: "<>",
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := tt.call(tt.item, tt.source)
c.Assert(got, qt.Equals, tt.want)
})
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/pageparser/pagelexer_intro.go | parser/pageparser/pagelexer_intro.go | // Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pageparser
func lexIntroSection(l *pageLexer) stateFunc {
LOOP:
for {
r := l.next()
if r == eof {
break
}
switch {
case r == '+':
return l.lexFrontMatterSection(TypeFrontMatterTOML, r, "TOML", delimTOML)
case r == '-':
return l.lexFrontMatterSection(TypeFrontMatterYAML, r, "YAML", delimYAML)
case r == '{':
return lexFrontMatterJSON
case r == '#':
return lexFrontMatterOrgMode
case r == byteOrderMark:
l.emit(TypeIgnore)
case !isSpace(r) && !isEndOfLine(r):
break LOOP
}
}
// Now move on to the shortcodes.
return lexMainSection
}
func lexFrontMatterJSON(l *pageLexer) stateFunc {
// Include the left delimiter
l.backup()
var (
inQuote bool
level int
)
for {
r := l.next()
switch {
case r == eof:
return l.errorf("unexpected EOF parsing JSON front matter")
case r == '{':
if !inQuote {
level++
}
case r == '}':
if !inQuote {
level--
}
case r == '"':
inQuote = !inQuote
case r == '\\':
// This may be an escaped quote. Make sure it's not marked as a
// real one.
l.next()
}
if level == 0 {
break
}
}
l.consumeCRLF()
l.emit(TypeFrontMatterJSON)
return lexMainSection
}
func lexFrontMatterOrgMode(l *pageLexer) stateFunc {
/*
#+TITLE: Test File For chaseadamsio/goorgeous
#+AUTHOR: Chase Adams
#+DESCRIPTION: Just another golang parser for org content!
*/
l.backup()
if !l.hasPrefix(delimOrg) {
return lexMainSection
}
l.summaryDivider = summaryDividerOrg
// Read lines until we no longer see a #+ prefix
LOOP:
for {
r := l.next()
switch {
case r == '\n':
if !l.hasPrefix(delimOrg) {
break LOOP
}
case r == eof:
break LOOP
}
}
l.emit(TypeFrontMatterORG)
return lexMainSection
}
// Handle YAML or TOML front matter.
func (l *pageLexer) lexFrontMatterSection(tp ItemType, delimr rune, name string, delim []byte) stateFunc {
for range 2 {
if r := l.next(); r != delimr {
return l.errorf("invalid %s delimiter", name)
}
}
// Let front matter start at line 1
wasEndOfLine := l.consumeCRLF()
// We don't care about the delimiters.
l.ignore()
var r rune
for {
if !wasEndOfLine {
r = l.next()
if r == eof {
return l.errorf("EOF looking for end %s front matter delimiter", name)
}
}
if wasEndOfLine || isEndOfLine(r) {
if l.hasPrefix(delim) {
l.emit(tp)
l.pos += 3
l.consumeCRLF()
l.ignore()
break
}
}
wasEndOfLine = false
}
return lexMainSection
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/pageparser/pagelexer_shortcode.go | parser/pageparser/pagelexer_shortcode.go | // Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pageparser
import "unique"
type lexerShortcodeState struct {
currLeftDelimItem ItemType
currRightDelimItem ItemType
isInline bool
currShortcodeName string // is only set when a shortcode is in opened state
closingState int // > 0 = on its way to be closed
elementStepNum int // step number in element
paramElements int // number of elements (name + value = 2) found first
openShortcodes map[unique.Handle[string]]bool // set of shortcodes in open state
}
// Shortcode syntax
var (
leftDelimSc = []byte("{{")
leftDelimScNoMarkup = []byte("{{<")
rightDelimScNoMarkup = []byte(">}}")
leftDelimScWithMarkup = []byte("{{%")
rightDelimScWithMarkup = []byte("%}}")
leftComment = []byte("/*") // comments in this context us used to to mark shortcodes as "not really a shortcode"
rightComment = []byte("*/")
)
func (l *pageLexer) isShortCodeStart() bool {
return l.hasPrefix(leftDelimScWithMarkup) || l.hasPrefix(leftDelimScNoMarkup)
}
func lexShortcodeLeftDelim(l *pageLexer) stateFunc {
l.pos += len(l.currentLeftShortcodeDelim())
if l.hasPrefix(leftComment) {
return lexShortcodeComment
}
l.emit(l.currentLeftShortcodeDelimItem())
l.elementStepNum = 0
l.paramElements = 0
return lexInsideShortcode
}
func lexShortcodeComment(l *pageLexer) stateFunc {
posRightComment := l.index(append(rightComment, l.currentRightShortcodeDelim()...))
if posRightComment <= 1 {
return l.errorf("comment must be closed")
}
// we emit all as text, except the comment markers
l.emit(tText)
l.pos += len(leftComment)
l.ignore()
l.pos += posRightComment - len(leftComment)
l.emit(tText)
l.pos += len(rightComment)
l.ignore()
l.pos += len(l.currentRightShortcodeDelim())
l.emit(tText)
return lexMainSection
}
func lexShortcodeRightDelim(l *pageLexer) stateFunc {
l.closingState = 0
l.pos += len(l.currentRightShortcodeDelim())
l.emit(l.currentRightShortcodeDelimItem())
return lexMainSection
}
// either:
// 1. param
// 2. "param" or "param\"
// 3. param="123" or param="123\"
// 4. param="Some \"escaped\" text"
// 5. `param`
// 6. param=`123`
func lexShortcodeParam(l *pageLexer, escapedQuoteStart bool) stateFunc {
first := true
nextEq := false
var r rune
for {
r = l.next()
if first {
if r == '"' || (r == '`' && !escapedQuoteStart) {
// a positional param with quotes
if l.paramElements == 2 {
return l.errorf("got quoted positional parameter. Cannot mix named and positional parameters")
}
l.paramElements = 1
l.backup()
if r == '"' {
return lexShortcodeQuotedParamVal(l, !escapedQuoteStart, tScParam)
}
return lexShortCodeParamRawStringVal(l, tScParam)
} else if r == '`' && escapedQuoteStart {
return l.errorf("unrecognized escape character")
}
first = false
} else if r == '=' {
// a named param
l.backup()
nextEq = true
break
}
if !isAlphaNumericOrHyphen(r) && r != '.' { // Floats have period
l.backup()
break
}
}
if l.paramElements == 0 {
l.paramElements++
if nextEq {
l.paramElements++
}
} else {
if nextEq && l.paramElements == 1 {
return l.errorf("got named parameter '%s'. Cannot mix named and positional parameters", l.current())
} else if !nextEq && l.paramElements == 2 {
return l.errorf("got positional parameter '%s'. Cannot mix named and positional parameters", l.current())
}
}
l.emit(tScParam)
return lexInsideShortcode
}
func lexShortcodeParamVal(l *pageLexer) stateFunc {
l.consumeToSpace()
l.emit(tScParamVal)
return lexInsideShortcode
}
func lexShortCodeParamRawStringVal(l *pageLexer, typ ItemType) stateFunc {
openBacktickFound := false
Loop:
for {
switch r := l.next(); {
case r == '`':
if openBacktickFound {
l.backup()
break Loop
} else {
openBacktickFound = true
l.ignore()
}
case r == eof:
return l.errorf("unterminated raw string in shortcode parameter-argument: '%s'", l.current())
}
}
l.emitString(typ)
l.next()
l.ignore()
return lexInsideShortcode
}
func lexShortcodeQuotedParamVal(l *pageLexer, escapedQuotedValuesAllowed bool, typ ItemType) stateFunc {
openQuoteFound := false
escapedInnerQuoteFound := false
escapedQuoteState := 0
Loop:
for {
switch r := l.next(); {
case r == '\\':
if l.peek() == '"' {
if openQuoteFound && !escapedQuotedValuesAllowed {
l.backup()
break Loop
} else if openQuoteFound {
// the coming quote is inside
escapedInnerQuoteFound = true
escapedQuoteState = 1
}
} else if l.peek() == '`' {
return l.errorf("unrecognized escape character")
}
case r == eof, r == '\n':
return l.errorf("unterminated quoted string in shortcode parameter-argument: '%s'", l.current())
case r == '"':
if escapedQuoteState == 0 {
if openQuoteFound {
l.backup()
break Loop
} else {
openQuoteFound = true
l.ignore()
}
} else {
escapedQuoteState = 0
}
}
}
if escapedInnerQuoteFound {
l.ignoreEscapesAndEmit(typ, true)
} else {
l.emitString(typ)
}
r := l.next()
if r == '\\' {
if l.peek() == '"' {
// ignore the escaped closing quote
l.ignore()
l.next()
l.ignore()
}
} else if r == '"' {
// ignore closing quote
l.ignore()
} else {
// handled by next state
l.backup()
}
return lexInsideShortcode
}
// Inline shortcodes has the form {{< myshortcode.inline >}}
var inlineIdentifier = []byte("inline ")
// scans an alphanumeric inside shortcode
func lexIdentifierInShortcode(l *pageLexer) stateFunc {
lookForEnd := false
Loop:
for {
switch r := l.next(); {
case isAlphaNumericOrHyphen(r):
// Allow forward slash inside names to make it possible to create namespaces.
case r == '/':
case r == '.':
l.isInline = l.hasPrefix(inlineIdentifier)
if !l.isInline {
return l.errorf("period in shortcode name only allowed for inline identifiers")
}
default:
l.backup()
word := unique.Make(string(l.input[l.start:l.pos]))
if l.closingState > 0 {
if !l.openShortcodes[word] {
return l.errorf("closing tag for shortcode '%s' does not match start tag", word.Value())
}
l.openShortcodes[word] = false
lookForEnd = true
}
l.closingState = 0
l.currShortcodeName = word.Value()
l.openShortcodes[word] = true
l.elementStepNum++
if l.isInline {
l.emit(tScNameInline)
} else {
l.emit(tScName)
}
break Loop
}
}
if lookForEnd {
return lexEndOfShortcode
}
return lexInsideShortcode
}
func lexEndOfShortcode(l *pageLexer) stateFunc {
l.isInline = false
if l.hasPrefix(l.currentRightShortcodeDelim()) {
return lexShortcodeRightDelim
}
switch r := l.next(); {
case isSpace(r):
l.ignore()
default:
return l.errorf("unclosed shortcode")
}
return lexEndOfShortcode
}
// scans the elements inside shortcode tags
func lexInsideShortcode(l *pageLexer) stateFunc {
if l.hasPrefix(l.currentRightShortcodeDelim()) {
return lexShortcodeRightDelim
}
switch r := l.next(); {
case r == eof:
// eol is allowed inside shortcodes; this may go to end of document before it fails
return l.errorf("unclosed shortcode action")
case isSpace(r), isEndOfLine(r):
l.ignore()
case r == '=':
l.consumeSpace()
l.ignore()
peek := l.peek()
if peek == '"' || peek == '\\' {
return lexShortcodeQuotedParamVal(l, peek != '\\', tScParamVal)
} else if peek == '`' {
return lexShortCodeParamRawStringVal(l, tScParamVal)
}
return lexShortcodeParamVal
case r == '/':
if l.currShortcodeName == "" {
return l.errorf("got closing shortcode, but none is open")
}
l.closingState++
l.isInline = false
l.elementStepNum = 0
l.emit(tScClose)
case r == '\\':
l.ignore()
if l.peek() == '"' || l.peek() == '`' {
return lexShortcodeParam(l, true)
}
case l.elementStepNum > 0 && (isAlphaNumericOrHyphen(r) || r == '"' || r == '`'): // positional params can have quotes
l.backup()
return lexShortcodeParam(l, false)
case isAlphaNumeric(r):
l.backup()
return lexIdentifierInShortcode
default:
return l.errorf("unrecognized character in shortcode action: %#U. Note: Parameters with non-alphanumeric args must be quoted", r)
}
return lexInsideShortcode
}
func (l *pageLexer) currentLeftShortcodeDelimItem() ItemType {
return l.currLeftDelimItem
}
func (l *pageLexer) currentRightShortcodeDelimItem() ItemType {
return l.currRightDelimItem
}
func (l *pageLexer) currentLeftShortcodeDelim() []byte {
if l.currLeftDelimItem == tLeftDelimScWithMarkup {
return leftDelimScWithMarkup
}
return leftDelimScNoMarkup
}
func (l *pageLexer) currentRightShortcodeDelim() []byte {
if l.currRightDelimItem == tRightDelimScWithMarkup {
return rightDelimScWithMarkup
}
return rightDelimScNoMarkup
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/pageparser/pageparser_shortcode_test.go | parser/pageparser/pageparser_shortcode_test.go | // Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pageparser
import (
"testing"
qt "github.com/frankban/quicktest"
)
var (
tstEOF = nti(tEOF, "")
tstLeftNoMD = nti(tLeftDelimScNoMarkup, "{{<")
tstRightNoMD = nti(tRightDelimScNoMarkup, ">}}")
tstLeftMD = nti(tLeftDelimScWithMarkup, "{{%")
tstRightMD = nti(tRightDelimScWithMarkup, "%}}")
tstSCClose = nti(tScClose, "/")
tstSC1 = nti(tScName, "sc1")
tstSC1Inline = nti(tScNameInline, "sc1.inline")
tstSC2Inline = nti(tScNameInline, "sc2.inline")
tstSC2 = nti(tScName, "sc2")
tstSC3 = nti(tScName, "sc3")
tstSCSlash = nti(tScName, "sc/sub")
tstParam1 = nti(tScParam, "param1")
tstParam2 = nti(tScParam, "param2")
tstVal = nti(tScParamVal, "Hello World")
tstText = nti(tText, "Hello World")
)
var shortCodeLexerTests = []lexerTest{
{"empty", "", []typeText{tstEOF}, nil},
{"spaces", " \t\n", []typeText{nti(tText, " \t\n"), tstEOF}, nil},
{"text", `to be or not`, []typeText{nti(tText, "to be or not"), tstEOF}, nil},
{"no markup", `{{< sc1 >}}`, []typeText{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}, nil},
{"with EOL", "{{< sc1 \n >}}", []typeText{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}, nil},
{"forward slash inside name", `{{< sc/sub >}}`, []typeText{tstLeftNoMD, tstSCSlash, tstRightNoMD, tstEOF}, nil},
{"simple with markup", `{{% sc1 %}}`, []typeText{tstLeftMD, tstSC1, tstRightMD, tstEOF}, nil},
{"with spaces", `{{< sc1 >}}`, []typeText{tstLeftNoMD, tstSC1, tstRightNoMD, tstEOF}, nil},
{"indented on new line", "Hello\n {{% sc1 %}}", []typeText{nti(tText, "Hello\n"), nti(tIndentation, " "), tstLeftMD, tstSC1, tstRightMD, tstEOF}, nil},
{"indented on new line tab", "Hello\n\t{{% sc1 %}}", []typeText{nti(tText, "Hello\n"), nti(tIndentation, "\t"), tstLeftMD, tstSC1, tstRightMD, tstEOF}, nil},
{"indented on first line", " {{% sc1 %}}", []typeText{nti(tIndentation, " "), tstLeftMD, tstSC1, tstRightMD, tstEOF}, nil},
{"mismatched rightDelim", `{{< sc1 %}}`, []typeText{
tstLeftNoMD, tstSC1,
nti(tError, "unrecognized character in shortcode action: U+0025 '%'. Note: Parameters with non-alphanumeric args must be quoted"),
}, nil},
{"inner, markup", `{{% sc1 %}} inner {{% /sc1 %}}`, []typeText{
tstLeftMD,
tstSC1,
tstRightMD,
nti(tText, " inner "),
tstLeftMD,
tstSCClose,
tstSC1,
tstRightMD,
tstEOF,
}, nil},
{"close, but no open", `{{< /sc1 >}}`, []typeText{
tstLeftNoMD, nti(tError, "got closing shortcode, but none is open"),
}, nil},
{"close wrong", `{{< sc1 >}}{{< /another >}}`, []typeText{
tstLeftNoMD, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose,
nti(tError, "closing tag for shortcode 'another' does not match start tag"),
}, nil},
{"close, but no open, more", `{{< sc1 >}}{{< /sc1 >}}{{< /another >}}`, []typeText{
tstLeftNoMD, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose,
nti(tError, "closing tag for shortcode 'another' does not match start tag"),
}, nil},
{"close with extra keyword", `{{< sc1 >}}{{< /sc1 keyword>}}`, []typeText{
tstLeftNoMD, tstSC1, tstRightNoMD, tstLeftNoMD, tstSCClose, tstSC1,
nti(tError, "unclosed shortcode"),
}, nil},
{"float param, positional", `{{< sc1 3.14 >}}`, []typeText{
tstLeftNoMD, tstSC1, nti(tScParam, "3.14"), tstRightNoMD, tstEOF,
}, nil},
{"float param, named", `{{< sc1 param1=3.14 >}}`, []typeText{
tstLeftNoMD, tstSC1, tstParam1, nti(tScParamVal, "3.14"), tstRightNoMD, tstEOF,
}, nil},
{"named param, raw string", `{{< sc1 param1=` + "`" + "Hello World" + "`" + " >}}", []typeText{
tstLeftNoMD, tstSC1, tstParam1, nti(tScParamVal, "Hello World"), tstRightNoMD, tstEOF,
}, nil},
{"float param, named, space before", `{{< sc1 param1= 3.14 >}}`, []typeText{
tstLeftNoMD, tstSC1, tstParam1, nti(tScParamVal, "3.14"), tstRightNoMD, tstEOF,
}, nil},
{"Youtube id", `{{< sc1 -ziL-Q_456igdO-4 >}}`, []typeText{
tstLeftNoMD, tstSC1, nti(tScParam, "-ziL-Q_456igdO-4"), tstRightNoMD, tstEOF,
}, nil},
{"non-alphanumerics param quoted", `{{< sc1 "-ziL-.%QigdO-4" >}}`, []typeText{
tstLeftNoMD, tstSC1, nti(tScParam, "-ziL-.%QigdO-4"), tstRightNoMD, tstEOF,
}, nil},
{"raw string", `{{< sc1` + "`" + "Hello World" + "`" + ` >}}`, []typeText{
tstLeftNoMD, tstSC1, nti(tScParam, "Hello World"), tstRightNoMD, tstEOF,
}, nil},
{"raw string with newline", `{{< sc1` + "`" + `Hello
World` + "`" + ` >}}`, []typeText{
tstLeftNoMD, tstSC1, nti(tScParam, `Hello
World`), tstRightNoMD, tstEOF,
}, nil},
{"raw string with escape character", `{{< sc1` + "`" + `Hello \b World` + "`" + ` >}}`, []typeText{
tstLeftNoMD, tstSC1, nti(tScParam, `Hello \b World`), tstRightNoMD, tstEOF,
}, nil},
{"two params", `{{< sc1 param1 param2 >}}`, []typeText{
tstLeftNoMD, tstSC1, tstParam1, tstParam2, tstRightNoMD, tstEOF,
}, nil},
// issue #934
{"self-closing", `{{< sc1 />}}`, []typeText{
tstLeftNoMD, tstSC1, tstSCClose, tstRightNoMD, tstEOF,
}, nil},
// Issue 2498
{"multiple self-closing", `{{< sc1 />}}{{< sc1 />}}`, []typeText{
tstLeftNoMD, tstSC1, tstSCClose, tstRightNoMD,
tstLeftNoMD, tstSC1, tstSCClose, tstRightNoMD, tstEOF,
}, nil},
{"self-closing with param", `{{< sc1 param1 />}}`, []typeText{
tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD, tstEOF,
}, nil},
{"self-closing with extra keyword", `{{< sc1 / keyword>}}`, []typeText{
tstLeftNoMD, tstSC1, tstSCClose, nti(tError, "closing tag for shortcode 'keyword' does not match start tag"),
}, nil},
{"multiple self-closing with param", `{{< sc1 param1 />}}{{< sc1 param1 />}}`, []typeText{
tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD,
tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD, tstEOF,
}, nil},
{"multiple different self-closing with param", `{{< sc1 param1 />}}{{< sc2 param1 />}}`, []typeText{
tstLeftNoMD, tstSC1, tstParam1, tstSCClose, tstRightNoMD,
tstLeftNoMD, tstSC2, tstParam1, tstSCClose, tstRightNoMD, tstEOF,
}, nil},
{"nested simple", `{{< sc1 >}}{{< sc2 >}}{{< /sc1 >}}`, []typeText{
tstLeftNoMD, tstSC1, tstRightNoMD,
tstLeftNoMD, tstSC2, tstRightNoMD,
tstLeftNoMD, tstSCClose, tstSC1, tstRightNoMD, tstEOF,
}, nil},
{"nested same", `{{< sc1 >}}{{< sc1 >}}{{< /sc1 >}}{{< /sc1 >}}`, []typeText{
tstLeftNoMD, tstSC1, tstRightNoMD,
tstLeftNoMD, tstSC1, tstRightNoMD,
tstLeftNoMD, tstSCClose, tstSC1, tstRightNoMD,
tstLeftNoMD, tstSCClose, tstSC1, tstRightNoMD, tstEOF,
}, nil},
{"nested complex", `{{< sc1 >}}ab{{% sc2 param1 %}}cd{{< sc3 >}}ef{{< /sc3 >}}gh{{% /sc2 %}}ij{{< /sc1 >}}kl`, []typeText{
tstLeftNoMD, tstSC1, tstRightNoMD,
nti(tText, "ab"),
tstLeftMD, tstSC2, tstParam1, tstRightMD,
nti(tText, "cd"),
tstLeftNoMD, tstSC3, tstRightNoMD,
nti(tText, "ef"),
tstLeftNoMD, tstSCClose, tstSC3, tstRightNoMD,
nti(tText, "gh"),
tstLeftMD, tstSCClose, tstSC2, tstRightMD,
nti(tText, "ij"),
tstLeftNoMD, tstSCClose, tstSC1, tstRightNoMD,
nti(tText, "kl"), tstEOF,
}, nil},
{"two quoted params", `{{< sc1 "param nr. 1" "param nr. 2" >}}`, []typeText{
tstLeftNoMD, tstSC1, nti(tScParam, "param nr. 1"), nti(tScParam, "param nr. 2"), tstRightNoMD, tstEOF,
}, nil},
{"two named params", `{{< sc1 param1="Hello World" param2="p2Val">}}`, []typeText{
tstLeftNoMD, tstSC1, tstParam1, tstVal, tstParam2, nti(tScParamVal, "p2Val"), tstRightNoMD, tstEOF,
}, nil},
{"escaped quotes", `{{< sc1 param1=\"Hello World\" >}}`, []typeText{
tstLeftNoMD, tstSC1, tstParam1, tstVal, tstRightNoMD, tstEOF,
}, nil},
{"escaped quotes, positional param", `{{< sc1 \"param1\" >}}`, []typeText{
tstLeftNoMD, tstSC1, tstParam1, tstRightNoMD, tstEOF,
}, nil},
{"escaped quotes inside escaped quotes", `{{< sc1 param1=\"Hello \"escaped\" World\" >}}`, []typeText{
tstLeftNoMD, tstSC1, tstParam1,
nti(tScParamVal, `Hello `), nti(tError, `got positional parameter 'escaped'. Cannot mix named and positional parameters`),
}, nil},
{
"escaped quotes inside nonescaped quotes",
`{{< sc1 param1="Hello \"escaped\" World" >}}`,
[]typeText{
tstLeftNoMD, tstSC1, tstParam1, nti(tScParamVal, `Hello "escaped" World`), tstRightNoMD, tstEOF,
},
nil,
},
{
"escaped quotes inside nonescaped quotes in positional param",
`{{< sc1 "Hello \"escaped\" World" >}}`,
[]typeText{
tstLeftNoMD, tstSC1, nti(tScParam, `Hello "escaped" World`), tstRightNoMD, tstEOF,
},
nil,
},
{"escaped raw string, named param", `{{< sc1 param1=` + `\` + "`" + "Hello World" + `\` + "`" + ` >}}`, []typeText{
tstLeftNoMD, tstSC1, tstParam1, nti(tError, "unrecognized escape character"),
}, nil},
{"escaped raw string, positional param", `{{< sc1 param1 ` + `\` + "`" + "Hello World" + `\` + "`" + ` >}}`, []typeText{
tstLeftNoMD, tstSC1, tstParam1, nti(tError, "unrecognized escape character"),
}, nil},
{"two raw string params", `{{< sc1` + "`" + "Hello World" + "`" + "`" + "Second Param" + "`" + ` >}}`, []typeText{
tstLeftNoMD, tstSC1, nti(tScParam, "Hello World"), nti(tScParam, "Second Param"), tstRightNoMD, tstEOF,
}, nil},
{"unterminated quote", `{{< sc1 param2="Hello World>}}`, []typeText{
tstLeftNoMD, tstSC1, tstParam2, nti(tError, "unterminated quoted string in shortcode parameter-argument: 'Hello World>}}'"),
}, nil},
{"unterminated raw string", `{{< sc1` + "`" + "Hello World" + ` >}}`, []typeText{
tstLeftNoMD, tstSC1, nti(tError, "unterminated raw string in shortcode parameter-argument: 'Hello World >}}'"),
}, nil},
{"unterminated raw string in second argument", `{{< sc1` + "`" + "Hello World" + "`" + "`" + "Second Param" + ` >}}`, []typeText{
tstLeftNoMD, tstSC1, nti(tScParam, "Hello World"), nti(tError, "unterminated raw string in shortcode parameter-argument: 'Second Param >}}'"),
}, nil},
{"one named param, one not", `{{< sc1 param1="Hello World" p2 >}}`, []typeText{
tstLeftNoMD, tstSC1, tstParam1, tstVal,
nti(tError, "got positional parameter 'p2'. Cannot mix named and positional parameters"),
}, nil},
{"one named param, one quoted positional param, both raw strings", `{{< sc1 param1=` + "`" + "Hello World" + "`" + "`" + "Second Param" + "`" + ` >}}`, []typeText{
tstLeftNoMD, tstSC1, tstParam1, tstVal,
nti(tError, "got quoted positional parameter. Cannot mix named and positional parameters"),
}, nil},
{"one named param, one quoted positional param", `{{< sc1 param1="Hello World" "And Universe" >}}`, []typeText{
tstLeftNoMD, tstSC1, tstParam1, tstVal,
nti(tError, "got quoted positional parameter. Cannot mix named and positional parameters"),
}, nil},
{"one quoted positional param, one named param", `{{< sc1 "param1" param2="And Universe" >}}`, []typeText{
tstLeftNoMD, tstSC1, tstParam1,
nti(tError, "got named parameter 'param2'. Cannot mix named and positional parameters"),
}, nil},
{"ono positional param, one not", `{{< sc1 param1 param2="Hello World">}}`, []typeText{
tstLeftNoMD, tstSC1, tstParam1,
nti(tError, "got named parameter 'param2'. Cannot mix named and positional parameters"),
}, nil},
{"commented out", `{{</* sc1 */>}}`, []typeText{
nti(tText, "{{<"), nti(tText, " sc1 "), nti(tText, ">}}"), tstEOF,
}, nil},
{"commented out, with asterisk inside", `{{</* sc1 "**/*.pdf" */>}}`, []typeText{
nti(tText, "{{<"), nti(tText, " sc1 \"**/*.pdf\" "), nti(tText, ">}}"), tstEOF,
}, nil},
{"commented out, missing close", `{{</* sc1 >}}`, []typeText{
nti(tError, "comment must be closed"),
}, nil},
{"commented out, misplaced close", `{{</* sc1 >}}*/`, []typeText{
nti(tError, "comment must be closed"),
}, nil},
// Inline shortcodes
{"basic inline", `{{< sc1.inline >}}Hello World{{< /sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstEOF}, nil},
{"basic inline with space", `{{< sc1.inline >}}Hello World{{< / sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstEOF}, nil},
{"inline self closing", `{{< sc1.inline >}}Hello World{{< /sc1.inline >}}Hello World{{< sc1.inline />}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSC1Inline, tstSCClose, tstRightNoMD, tstEOF}, nil},
{"inline self closing, then a new inline", `{{< sc1.inline >}}Hello World{{< /sc1.inline >}}Hello World{{< sc1.inline />}}{{< sc2.inline >}}Hello World{{< /sc2.inline >}}`, []typeText{
tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSC1Inline, tstSCClose, tstRightNoMD,
tstLeftNoMD, tstSC2Inline, tstRightNoMD, tstText, tstLeftNoMD, tstSCClose, tstSC2Inline, tstRightNoMD, tstEOF,
}, nil},
{"inline with template syntax", `{{< sc1.inline >}}{{ .Get 0 }}{{ .Get 1 }}{{< /sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, nti(tText, "{{ .Get 0 }}"), nti(tText, "{{ .Get 1 }}"), tstLeftNoMD, tstSCClose, tstSC1Inline, tstRightNoMD, tstEOF}, nil},
{"inline with nested shortcode (not supported)", `{{< sc1.inline >}}Hello World{{< sc1 >}}{{< /sc1.inline >}}`, []typeText{tstLeftNoMD, tstSC1Inline, tstRightNoMD, tstText, nti(tError, "inline shortcodes do not support nesting")}, nil},
{"inline case mismatch", `{{< sc1.Inline >}}Hello World{{< /sc1.Inline >}}`, []typeText{tstLeftNoMD, nti(tError, "period in shortcode name only allowed for inline identifiers")}, nil},
}
func TestShortcodeLexer(t *testing.T) {
t.Parallel()
c := qt.New(t)
for i, test := range shortCodeLexerTests {
t.Run(test.name, func(t *testing.T) {
items, err := collect([]byte(test.input), true, lexMainSection)
c.Assert(err, qt.IsNil)
if !equal(test.input, items, test.items) {
got := itemsToString(items, []byte(test.input))
expected := testItemsToString(test.items)
c.Assert(got, qt.Equals, expected, qt.Commentf("Test %d: %s", i, test.name))
}
})
}
}
func BenchmarkShortcodeLexer(b *testing.B) {
testInputs := make([][]byte, len(shortCodeLexerTests))
for i, input := range shortCodeLexerTests {
testInputs[i] = []byte(input.input)
}
var cfg Config
for b.Loop() {
for _, input := range testInputs {
_, err := collectWithConfig(input, true, lexMainSection, cfg)
if err != nil {
b.Fatal(err)
}
}
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/pageparser/pageparser.go | parser/pageparser/pageparser.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pageparser
import (
"bytes"
"errors"
"fmt"
"io"
"regexp"
"strings"
"github.com/gohugoio/hugo/parser/metadecoders"
)
// Result holds the parse result.
type Result interface {
// Iterator returns a new Iterator positioned at the beginning of the parse tree.
Iterator() *Iterator
// Input returns the input to Parse.
Input() []byte
}
var _ Result = (*pageLexer)(nil)
// ParseBytes parses the page in b according to the given Config.
func ParseBytes(b []byte, cfg Config) (Items, error) {
startLexer := lexIntroSection
if cfg.NoFrontMatter {
startLexer = lexMainSection
}
l, err := parseBytes(b, cfg, startLexer)
if err != nil {
return nil, err
}
return l.items, l.err
}
type ContentFrontMatter struct {
Content []byte
FrontMatter map[string]any
FrontMatterFormat metadecoders.Format
}
// ParseFrontMatterAndContent is a convenience method to extract front matter
// and content from a content page.
func ParseFrontMatterAndContent(r io.Reader) (ContentFrontMatter, error) {
var cf ContentFrontMatter
input, err := io.ReadAll(r)
if err != nil {
return cf, fmt.Errorf("failed to read page content: %w", err)
}
psr, err := ParseBytes(input, Config{})
if err != nil {
return cf, err
}
var frontMatterSource []byte
iter := NewIterator(psr)
walkFn := func(item Item) bool {
if frontMatterSource != nil {
// The rest is content.
cf.Content = input[item.low:]
// Done
return false
} else if item.IsFrontMatter() {
cf.FrontMatterFormat = FormatFromFrontMatterType(item.Type)
frontMatterSource = item.Val(input)
}
return true
}
iter.PeekWalk(walkFn)
cf.FrontMatter, err = metadecoders.Default.UnmarshalToMap(frontMatterSource, cf.FrontMatterFormat)
return cf, err
}
func FormatFromFrontMatterType(typ ItemType) metadecoders.Format {
switch typ {
case TypeFrontMatterJSON:
return metadecoders.JSON
case TypeFrontMatterORG:
return metadecoders.ORG
case TypeFrontMatterTOML:
return metadecoders.TOML
case TypeFrontMatterYAML:
return metadecoders.YAML
default:
return ""
}
}
// ParseMain parses starting with the main section. Used in tests.
func ParseMain(r io.Reader, cfg Config) (Result, error) {
return parseSection(r, cfg, lexMainSection)
}
func parseSection(r io.Reader, cfg Config, start stateFunc) (Result, error) {
b, err := io.ReadAll(r)
if err != nil {
return nil, fmt.Errorf("failed to read page content: %w", err)
}
return parseBytes(b, cfg, start)
}
func parseBytes(b []byte, cfg Config, start stateFunc) (*pageLexer, error) {
lexer := newPageLexer(b, start, cfg)
lexer.run()
return lexer, nil
}
// NewIterator creates a new Iterator.
func NewIterator(items Items) *Iterator {
return &Iterator{items: items, lastPos: -1}
}
// An Iterator has methods to iterate a parsed page with support going back
// if needed.
type Iterator struct {
items Items
lastPos int // position of the last item returned by nextItem
}
// consumes and returns the next item
func (t *Iterator) Next() Item {
t.lastPos++
return t.Current()
}
var errIndexOutOfBounds = Item{Type: tError, Err: errors.New("no more tokens")}
// Current will repeatably return the current item.
func (t *Iterator) Current() Item {
if t.lastPos >= len(t.items) {
return errIndexOutOfBounds
}
return t.items[t.lastPos]
}
// backs up one token.
func (t *Iterator) Backup() {
if t.lastPos < 0 {
panic("need to go forward before going back")
}
t.lastPos--
}
// Pos returns the current position in the input.
func (t *Iterator) Pos() int {
return t.lastPos
}
// check for non-error and non-EOF types coming next
func (t *Iterator) IsValueNext() bool {
i := t.Peek()
return i.Type != tError && i.Type != tEOF
}
// look at, but do not consume, the next item
// repeated, sequential calls will return the same item
func (t *Iterator) Peek() Item {
return t.items[t.lastPos+1]
}
// PeekWalk will feed the next items in the iterator to walkFn
// until it returns false.
func (t *Iterator) PeekWalk(walkFn func(item Item) bool) {
for i := t.lastPos + 1; i < len(t.items); i++ {
item := t.items[i]
if !walkFn(item) {
break
}
}
}
// Consume is a convenience method to consume the next n tokens,
// but back off Errors and EOF.
func (t *Iterator) Consume(cnt int) {
for range cnt {
token := t.Next()
if token.Type == tError || token.Type == tEOF {
t.Backup()
break
}
}
}
// LineNumber returns the current line number. Used for logging.
func (t *Iterator) LineNumber(source []byte) int {
return bytes.Count(source[:t.Current().low], lf) + 1
}
// IsProbablySourceOfItems returns true if the given source looks like original
// source of the items.
// There may be some false positives, but that is highly unlikely and good enough
// for the planned purpose.
// It will also return false if the last item is not EOF (error situations) and
// true if both source and items are empty.
func IsProbablySourceOfItems(source []byte, items Items) bool {
if len(source) == 0 && len(items) == 0 {
return false
}
if len(items) == 0 {
return false
}
last := items[len(items)-1]
if last.Type != tEOF {
return false
}
if last.Pos() != len(source) {
return false
}
for _, item := range items {
if item.Type == tError {
return false
}
if item.Type == tEOF {
return true
}
if item.Pos() >= len(source) {
return false
}
if item.firstByte != source[item.Pos()] {
return false
}
}
return true
}
var hasShortcodeRe = regexp.MustCompile(`{{[%,<][^\/]`)
// HasShortcode returns true if the given string contains a shortcode.
func HasShortcode(s string) bool {
// Fast path for the common case.
if !strings.Contains(s, "{{") {
return false
}
return hasShortcodeRe.MatchString(s)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/pageparser/pagelexer_intro_test.go | parser/pageparser/pagelexer_intro_test.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pageparser
import (
"testing"
qt "github.com/frankban/quicktest"
)
func Test_lexIntroSection(t *testing.T) {
t.Parallel()
c := qt.New(t)
for i, tt := range []struct {
input string
expectItemType ItemType
expectSummaryDivider []byte
}{
{"{\"title\": \"JSON\"}\n", TypeFrontMatterJSON, summaryDivider},
{"#+TITLE: ORG\n", TypeFrontMatterORG, summaryDividerOrg},
{"+++\ntitle = \"TOML\"\n+++\n", TypeFrontMatterTOML, summaryDivider},
{"---\ntitle: YAML\n---\n", TypeFrontMatterYAML, summaryDivider},
// Issue 13152
{"# ATX Header Level 1\n", tText, summaryDivider},
} {
errMsg := qt.Commentf("[%d] %v", i, tt.input)
l := newPageLexer([]byte(tt.input), lexIntroSection, Config{})
l.run()
c.Assert(l.items[0].Type, qt.Equals, tt.expectItemType, errMsg)
c.Assert(l.summaryDivider, qt.DeepEquals, tt.expectSummaryDivider, errMsg)
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/pageparser/pagelexer.go | parser/pageparser/pagelexer.go | // Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pageparser
import (
"bytes"
"fmt"
"unicode"
"unicode/utf8"
"unique"
)
const eof = -1
// returns the next state in scanner.
type stateFunc func(*pageLexer) stateFunc
type pageLexer struct {
input []byte
stateStart stateFunc
state stateFunc
pos int // input position
start int // item start position
width int // width of last element
// Contains lexers for shortcodes and other main section
// elements.
sectionHandlers *sectionHandlers
cfg Config
// The summary divider to look for.
summaryDivider []byte
// Set when we have parsed any summary divider
summaryDividerChecked bool
lexerShortcodeState
// items delivered to client
items Items
// error delivered to the client
err error
}
// Implement the Result interface
func (l *pageLexer) Iterator() *Iterator {
return NewIterator(l.items)
}
func (l *pageLexer) Input() []byte {
return l.input
}
type Config struct {
NoFrontMatter bool
NoSummaryDivider bool
}
// note: the input position here is normally 0 (start), but
// can be set if position of first shortcode is known
func newPageLexer(input []byte, stateStart stateFunc, cfg Config) *pageLexer {
lexer := &pageLexer{
input: input,
stateStart: stateStart,
summaryDivider: summaryDivider,
cfg: cfg,
lexerShortcodeState: lexerShortcodeState{
currLeftDelimItem: tLeftDelimScNoMarkup,
currRightDelimItem: tRightDelimScNoMarkup,
openShortcodes: make(map[unique.Handle[string]]bool),
},
items: make([]Item, 0, 5),
}
lexer.sectionHandlers = createSectionHandlers(lexer)
return lexer
}
// main loop
func (l *pageLexer) run() *pageLexer {
for l.state = l.stateStart; l.state != nil; {
l.state = l.state(l)
}
return l
}
// Page syntax
var (
byteOrderMark = '\ufeff'
summaryDivider = []byte("<!--more-->")
summaryDividerOrg = []byte("# more")
delimTOML = []byte("+++")
delimYAML = []byte("---")
delimOrg = []byte("#+")
)
func (l *pageLexer) next() rune {
if l.pos >= len(l.input) {
l.width = 0
return eof
}
runeValue, runeWidth := utf8.DecodeRune(l.input[l.pos:])
l.width = runeWidth
l.pos += l.width
return runeValue
}
// peek, but no consume
func (l *pageLexer) peek() rune {
r := l.next()
l.backup()
return r
}
// steps back one
func (l *pageLexer) backup() {
l.pos -= l.width
}
func (l *pageLexer) append(item Item) {
if item.Pos() < len(l.input) {
item.firstByte = l.input[item.Pos()]
}
l.items = append(l.items, item)
}
// sends an item back to the client.
func (l *pageLexer) emit(t ItemType) {
defer func() {
l.start = l.pos
}()
if t == tText {
// Identify any trailing whitespace/intendation.
// We currently only care about the last one.
for i := l.pos - 1; i >= l.start; i-- {
b := l.input[i]
if b != ' ' && b != '\t' && b != '\r' && b != '\n' {
break
}
if i == l.start && b != '\n' {
l.append(Item{Type: tIndentation, low: l.start, high: l.pos})
return
} else if b == '\n' && i < l.pos-1 {
l.append(Item{Type: t, low: l.start, high: i + 1})
l.append(Item{Type: tIndentation, low: i + 1, high: l.pos})
return
} else if b == '\n' && i == l.pos-1 {
break
}
}
}
l.append(Item{Type: t, low: l.start, high: l.pos})
}
// sends a string item back to the client.
func (l *pageLexer) emitString(t ItemType) {
l.append(Item{Type: t, low: l.start, high: l.pos, isString: true})
l.start = l.pos
}
func (l *pageLexer) isEOF() bool {
return l.pos >= len(l.input)
}
// special case, do not send '\\' back to client
func (l *pageLexer) ignoreEscapesAndEmit(t ItemType, isString bool) {
i := l.start
k := i
var segments []lowHigh
for i < l.pos {
r, w := utf8.DecodeRune(l.input[i:l.pos])
if r == '\\' {
if i > k {
segments = append(segments, lowHigh{k, i})
}
// See issue #10236.
// We don't send the backslash back to the client,
// which makes the end parsing simpler.
// This means that we cannot render the AST back to be
// exactly the same as the input,
// but that was also the situation before we introduced the issue in #10236.
k = i + w
}
i += w
}
if k < l.pos {
segments = append(segments, lowHigh{k, l.pos})
}
if len(segments) > 0 {
l.append(Item{Type: t, segments: segments})
}
l.start = l.pos
}
// gets the current value (for debugging and error handling)
func (l *pageLexer) current() []byte {
return l.input[l.start:l.pos]
}
// ignore current element
func (l *pageLexer) ignore() {
l.start = l.pos
}
var lf = []byte("\n")
// nil terminates the parser
func (l *pageLexer) errorf(format string, args ...any) stateFunc {
l.append(Item{Type: tError, Err: fmt.Errorf(format, args...), low: l.start, high: l.pos})
return nil
}
func (l *pageLexer) consumeCRLF() bool {
var consumed bool
for _, r := range crLf {
if l.next() != r {
l.backup()
} else {
consumed = true
}
}
return consumed
}
func (l *pageLexer) consumeToSpace() {
for {
r := l.next()
if r == eof || unicode.IsSpace(r) {
l.backup()
return
}
}
}
func (l *pageLexer) consumeSpace() {
for {
r := l.next()
if r == eof || !unicode.IsSpace(r) {
l.backup()
return
}
}
}
type sectionHandlers struct {
l *pageLexer
// Set when none of the sections are found so we
// can safely stop looking and skip to the end.
skipAll bool
handlers []*sectionHandler
skipIndexes []int
}
func (s *sectionHandlers) skip() int {
if s.skipAll {
return -1
}
s.skipIndexes = s.skipIndexes[:0]
var shouldSkip bool
for _, skipper := range s.handlers {
idx := skipper.skip()
if idx != -1 {
shouldSkip = true
s.skipIndexes = append(s.skipIndexes, idx)
}
}
if !shouldSkip {
s.skipAll = true
return -1
}
return minIndex(s.skipIndexes...)
}
func createSectionHandlers(l *pageLexer) *sectionHandlers {
handlers := make([]*sectionHandler, 0, 2)
shortCodeHandler := §ionHandler{
l: l,
skipFunc: func(l *pageLexer) int {
return l.index(leftDelimSc)
},
lexFunc: func(origin stateFunc, l *pageLexer) (stateFunc, bool) {
if !l.isShortCodeStart() {
return origin, false
}
if l.isInline {
// If we're inside an inline shortcode, the only valid shortcode markup is
// the markup which closes it.
b := l.input[l.pos+3:]
end := indexNonWhiteSpace(b, '/')
if end != len(l.input)-1 {
b = bytes.TrimSpace(b[end+1:])
if end == -1 || !bytes.HasPrefix(b, []byte(l.currShortcodeName+" ")) {
return l.errorf("inline shortcodes do not support nesting"), true
}
}
}
if l.hasPrefix(leftDelimScWithMarkup) {
l.currLeftDelimItem = tLeftDelimScWithMarkup
l.currRightDelimItem = tRightDelimScWithMarkup
} else {
l.currLeftDelimItem = tLeftDelimScNoMarkup
l.currRightDelimItem = tRightDelimScNoMarkup
}
return lexShortcodeLeftDelim, true
},
}
handlers = append(handlers, shortCodeHandler)
if !l.cfg.NoSummaryDivider {
summaryDividerHandler := §ionHandler{
l: l,
skipFunc: func(l *pageLexer) int {
if l.summaryDividerChecked {
return -1
}
return l.index(l.summaryDivider)
},
lexFunc: func(origin stateFunc, l *pageLexer) (stateFunc, bool) {
if !l.hasPrefix(l.summaryDivider) {
return origin, false
}
l.summaryDividerChecked = true
l.pos += len(l.summaryDivider)
// This makes it a little easier to reason about later.
l.consumeSpace()
l.emit(TypeLeadSummaryDivider)
return origin, true
},
}
handlers = append(handlers, summaryDividerHandler)
}
return §ionHandlers{
l: l,
handlers: handlers,
skipIndexes: make([]int, len(handlers)),
}
}
func (s *sectionHandlers) lex(origin stateFunc) stateFunc {
if s.skipAll {
return nil
}
if s.l.pos > s.l.start {
s.l.emit(tText)
}
for _, handler := range s.handlers {
if handler.skipAll {
continue
}
next, handled := handler.lexFunc(origin, handler.l)
if next == nil || handled {
return next
}
}
// Not handled by the above.
s.l.pos++
return origin
}
type sectionHandler struct {
l *pageLexer
// No more sections of this type.
skipAll bool
// Returns the index of the next match, -1 if none found.
skipFunc func(l *pageLexer) int
// Lex lexes the current section and returns the next state func and
// a bool telling if this section was handled.
// Note that returning nil as the next state will terminate the
// lexer.
lexFunc func(origin stateFunc, l *pageLexer) (stateFunc, bool)
}
func (s *sectionHandler) skip() int {
if s.skipAll {
return -1
}
idx := s.skipFunc(s.l)
if idx == -1 {
s.skipAll = true
}
return idx
}
func lexMainSection(l *pageLexer) stateFunc {
if l.isEOF() {
return lexDone
}
// Fast forward as far as possible.
skip := l.sectionHandlers.skip()
if skip == -1 {
l.pos = len(l.input)
return lexDone
} else if skip > 0 {
l.pos += skip
}
next := l.sectionHandlers.lex(lexMainSection)
if next != nil {
return next
}
l.pos = len(l.input)
return lexDone
}
func lexDone(l *pageLexer) stateFunc {
// Done!
if l.pos > l.start {
l.emit(tText)
}
l.emit(tEOF)
return nil
}
//lint:ignore U1000 useful for debugging
func (l *pageLexer) printCurrentInput() {
fmt.Printf("input[%d:]: %q", l.pos, string(l.input[l.pos:]))
}
// state helpers
func (l *pageLexer) index(sep []byte) int {
return bytes.Index(l.input[l.pos:], sep)
}
func (l *pageLexer) hasPrefix(prefix []byte) bool {
return bytes.HasPrefix(l.input[l.pos:], prefix)
}
// helper functions
// returns the min index >= 0
func minIndex(indices ...int) int {
min := -1
for _, j := range indices {
if j < 0 {
continue
}
if min == -1 {
min = j
} else if j < min {
min = j
}
}
return min
}
func indexNonWhiteSpace(s []byte, in rune) int {
idx := bytes.IndexFunc(s, func(r rune) bool {
return !unicode.IsSpace(r)
})
if idx == -1 {
return -1
}
r, _ := utf8.DecodeRune(s[idx:])
if r == in {
return idx
}
return -1
}
func isSpace(r rune) bool {
return r == ' ' || r == '\t'
}
func isAlphaNumericOrHyphen(r rune) bool {
// let unquoted YouTube ids as positional params slip through (they contain hyphens)
return isAlphaNumeric(r) || r == '-'
}
var crLf = []rune{'\r', '\n'}
func isEndOfLine(r rune) bool {
return r == '\r' || r == '\n'
}
func isAlphaNumeric(r rune) bool {
return r == '_' || unicode.IsLetter(r) || unicode.IsDigit(r)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/pageparser/doc.go | parser/pageparser/doc.go | // Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package pageparser provides a parser for Hugo content files (Markdown, HTML etc.) in Hugo.
// This implementation is highly inspired by the great talk given by Rob Pike called "Lexical Scanning in Go"
// It's on YouTube, Google it!.
// See slides here: https://go.dev/talks/2011/lex.slide#1
package pageparser
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/pageparser/item.go | parser/pageparser/item.go | // Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pageparser
import (
"bytes"
"fmt"
"regexp"
"strconv"
"github.com/yuin/goldmark/util"
)
type lowHigh struct {
Low int
High int
}
type Item struct {
Type ItemType
Err error
// The common case is a single segment.
low int
high int
// This is the uncommon case.
segments []lowHigh
// Used for validation.
firstByte byte
isString bool
}
type Items []Item
func (i Item) Pos() int {
if len(i.segments) > 0 {
return i.segments[0].Low
}
return i.low
}
func (i Item) Val(source []byte) []byte {
if len(i.segments) == 0 {
return source[i.low:i.high]
}
if len(i.segments) == 1 {
return source[i.segments[0].Low:i.segments[0].High]
}
var b bytes.Buffer
for _, s := range i.segments {
b.Write(source[s.Low:s.High])
}
return b.Bytes()
}
func (i Item) ValStr(source []byte) string {
return string(i.Val(source))
}
func (i Item) ValTyped(source []byte) any {
str := i.ValStr(source)
if i.isString {
// A quoted value that is a string even if it looks like a number etc.
return str
}
if boolRe.MatchString(str) {
return str == "true"
}
if intRe.MatchString(str) {
num, err := strconv.Atoi(str)
if err != nil {
return str
}
return num
}
if floatRe.MatchString(str) {
num, err := strconv.ParseFloat(str, 64)
if err != nil {
return str
}
return num
}
return str
}
func (i Item) IsText() bool {
return i.Type == tText || i.IsIndentation()
}
func (i Item) IsIndentation() bool {
return i.Type == tIndentation
}
func (i Item) IsNonWhitespace(source []byte) bool {
return len(bytes.TrimSpace(i.Val(source))) > 0
}
func (i Item) IsShortcodeName() bool {
return i.Type == tScName
}
func (i Item) IsInlineShortcodeName() bool {
return i.Type == tScNameInline
}
func (i Item) IsLeftShortcodeDelim() bool {
return i.Type == tLeftDelimScWithMarkup || i.Type == tLeftDelimScNoMarkup
}
func (i Item) IsRightShortcodeDelim() bool {
return i.Type == tRightDelimScWithMarkup || i.Type == tRightDelimScNoMarkup
}
func (i Item) IsShortcodeClose() bool {
return i.Type == tScClose
}
func (i Item) IsShortcodeParam() bool {
return i.Type == tScParam
}
func (i Item) IsShortcodeParamVal() bool {
return i.Type == tScParamVal
}
func (i Item) IsShortcodeMarkupDelimiter() bool {
return i.Type == tLeftDelimScWithMarkup || i.Type == tRightDelimScWithMarkup
}
func (i Item) IsFrontMatter() bool {
return i.Type >= TypeFrontMatterYAML && i.Type <= TypeFrontMatterORG
}
func (i Item) IsDone() bool {
return i.IsError() || i.IsEOF()
}
func (i Item) IsEOF() bool {
return i.Type == tEOF
}
func (i Item) IsError() bool {
return i.Type == tError
}
func (i Item) ToString(source []byte) string {
val := i.Val(source)
switch {
case i.IsEOF():
return "EOF"
case i.IsError():
return string(val)
case i.IsIndentation():
return fmt.Sprintf("%s:[%s]", i.Type, util.VisualizeSpaces(val))
case i.Type > tKeywordMarker:
return fmt.Sprintf("<%s>", val)
case len(val) > 50:
return fmt.Sprintf("%v:%.20q...", i.Type, val)
default:
return fmt.Sprintf("%v:[%s]", i.Type, val)
}
}
type ItemType int
const (
tError ItemType = iota
tEOF
// page items
TypeLeadSummaryDivider // <!--more-->, # more
TypeFrontMatterYAML
TypeFrontMatterTOML
TypeFrontMatterJSON
TypeFrontMatterORG
TypeIgnore // // The BOM Unicode byte order marker and possibly others
// shortcode items
tLeftDelimScNoMarkup
tRightDelimScNoMarkup
tLeftDelimScWithMarkup
tRightDelimScWithMarkup
tScClose
tScName
tScNameInline
tScParam
tScParamVal
tIndentation
tText // plain text
// preserved for later - keywords come after this
tKeywordMarker
)
var (
boolRe = regexp.MustCompile(`^(true|false)$`)
intRe = regexp.MustCompile(`^[-+]?\d+$`)
floatRe = regexp.MustCompile(`^[-+]?\d*\.\d+$`)
)
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/pageparser/pagelexer_test.go | parser/pageparser/pagelexer_test.go | // Copyright 2018 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pageparser
import (
"testing"
qt "github.com/frankban/quicktest"
)
func TestMinIndex(t *testing.T) {
c := qt.New(t)
c.Assert(minIndex(4, 1, 2, 3), qt.Equals, 1)
c.Assert(minIndex(4, 0, -2, 2, 5), qt.Equals, 0)
c.Assert(minIndex(), qt.Equals, -1)
c.Assert(minIndex(-2, -3), qt.Equals, -1)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/pageparser/pageparser_test.go | parser/pageparser/pageparser_test.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package pageparser
import (
"bytes"
"strings"
"testing"
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/parser/metadecoders"
)
func BenchmarkParse(b *testing.B) {
start := `
---
title: "Front Matters"
description: "It really does"
---
This is some summary. This is some summary. This is some summary. This is some summary.
<!--more-->
`
input := []byte(start + strings.Repeat(strings.Repeat("this is text", 30)+"{{< myshortcode >}}This is some inner content.{{< /myshortcode >}}", 10))
cfg := Config{}
for b.Loop() {
if _, err := parseBytes(input, cfg, lexIntroSection); err != nil {
b.Fatal(err)
}
}
}
func TestFormatFromFrontMatterType(t *testing.T) {
c := qt.New(t)
for _, test := range []struct {
typ ItemType
expect metadecoders.Format
}{
{TypeFrontMatterJSON, metadecoders.JSON},
{TypeFrontMatterTOML, metadecoders.TOML},
{TypeFrontMatterYAML, metadecoders.YAML},
{TypeFrontMatterORG, metadecoders.ORG},
{TypeIgnore, ""},
} {
c.Assert(FormatFromFrontMatterType(test.typ), qt.Equals, test.expect)
}
}
func TestIsProbablyItemsSource(t *testing.T) {
c := qt.New(t)
input := ` {{< foo >}} `
items, err := collectStringMain(input)
c.Assert(err, qt.IsNil)
c.Assert(IsProbablySourceOfItems([]byte(input), items), qt.IsTrue)
c.Assert(IsProbablySourceOfItems(bytes.Repeat([]byte(" "), len(input)), items), qt.IsFalse)
c.Assert(IsProbablySourceOfItems([]byte(`{{< foo >}} `), items), qt.IsFalse)
c.Assert(IsProbablySourceOfItems([]byte(``), items), qt.IsFalse)
}
func TestHasShortcode(t *testing.T) {
c := qt.New(t)
c.Assert(HasShortcode("{{< foo >}}"), qt.IsTrue)
c.Assert(HasShortcode("aSDasd SDasd aSD\n\nasdfadf{{% foo %}}\nasdf"), qt.IsTrue)
c.Assert(HasShortcode("{{</* foo */>}}"), qt.IsFalse)
c.Assert(HasShortcode("{{%/* foo */%}}"), qt.IsFalse)
}
func BenchmarkHasShortcode(b *testing.B) {
withShortcode := strings.Repeat("this is text", 30) + "{{< myshortcode >}}This is some inner content.{{< /myshortcode >}}" + strings.Repeat("this is text", 30)
withoutShortcode := strings.Repeat("this is text", 30) + "This is some inner content." + strings.Repeat("this is text", 30)
b.Run("Match", func(b *testing.B) {
for b.Loop() {
HasShortcode(withShortcode)
}
})
b.Run("NoMatch", func(b *testing.B) {
for b.Loop() {
HasShortcode(withoutShortcode)
}
})
}
func TestSummaryDividerStartingFromMain(t *testing.T) {
c := qt.New(t)
input := `aaa <!--more--> bbb`
items, err := collectStringMain(input)
c.Assert(err, qt.IsNil)
c.Assert(items, qt.HasLen, 4)
c.Assert(items[1].Type, qt.Equals, TypeLeadSummaryDivider)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/parser/pageparser/itemtype_string.go | parser/pageparser/itemtype_string.go | // Code generated by "stringer -type ItemType"; DO NOT EDIT.
package pageparser
import "strconv"
func _() {
// An "invalid array index" compiler error signifies that the constant values have changed.
// Re-run the stringer command to generate them again.
var x [1]struct{}
_ = x[tError-0]
_ = x[tEOF-1]
_ = x[TypeLeadSummaryDivider-2]
_ = x[TypeFrontMatterYAML-3]
_ = x[TypeFrontMatterTOML-4]
_ = x[TypeFrontMatterJSON-5]
_ = x[TypeFrontMatterORG-6]
_ = x[TypeIgnore-7]
_ = x[tLeftDelimScNoMarkup-8]
_ = x[tRightDelimScNoMarkup-9]
_ = x[tLeftDelimScWithMarkup-10]
_ = x[tRightDelimScWithMarkup-11]
_ = x[tScClose-12]
_ = x[tScName-13]
_ = x[tScNameInline-14]
_ = x[tScParam-15]
_ = x[tScParamVal-16]
_ = x[tIndentation-17]
_ = x[tText-18]
_ = x[tKeywordMarker-19]
}
const _ItemType_name = "tErrortEOFTypeLeadSummaryDividerTypeFrontMatterYAMLTypeFrontMatterTOMLTypeFrontMatterJSONTypeFrontMatterORGTypeIgnoretLeftDelimScNoMarkuptRightDelimScNoMarkuptLeftDelimScWithMarkuptRightDelimScWithMarkuptScClosetScNametScNameInlinetScParamtScParamValtIndentationtTexttKeywordMarker"
var _ItemType_index = [...]uint16{0, 6, 10, 32, 51, 70, 89, 107, 117, 137, 158, 180, 203, 211, 218, 231, 239, 250, 262, 267, 281}
func (i ItemType) String() string {
if i < 0 || i >= ItemType(len(_ItemType_index)-1) {
return "ItemType(" + strconv.FormatInt(int64(i), 10) + ")"
}
return _ItemType_name[_ItemType_index[i]:_ItemType_index[i+1]]
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/markup/markup_test.go | markup/markup_test.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package markup_test
import (
"testing"
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/config/testconfig"
"github.com/gohugoio/hugo/markup"
"github.com/gohugoio/hugo/markup/converter"
)
func TestConverterRegistry(t *testing.T) {
c := qt.New(t)
conf := testconfig.GetTestConfig(nil, nil)
r, err := markup.NewConverterProvider(converter.ProviderConfig{Conf: conf})
c.Assert(err, qt.IsNil)
c.Assert("goldmark", qt.Equals, r.GetMarkupConfig().DefaultMarkdownHandler)
checkName := func(name string) {
p := r.Get(name)
c.Assert(p, qt.Not(qt.IsNil))
c.Assert(p.Name(), qt.Equals, name)
}
c.Assert(r.Get("foo"), qt.IsNil)
c.Assert(r.Get("markdown").Name(), qt.Equals, "goldmark")
checkName("goldmark")
checkName("asciidocext")
checkName("rst")
checkName("pandoc")
checkName("org")
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/markup/markup.go | markup/markup.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package markup contains the markup handling (e.g. Markdown).
package markup
import (
"fmt"
"strings"
"github.com/gohugoio/hugo/markup/highlight"
"github.com/gohugoio/hugo/media"
"github.com/gohugoio/hugo/markup/markup_config"
"github.com/gohugoio/hugo/markup/goldmark"
"github.com/gohugoio/hugo/markup/org"
"github.com/gohugoio/hugo/markup/asciidocext"
"github.com/gohugoio/hugo/markup/converter"
"github.com/gohugoio/hugo/markup/pandoc"
"github.com/gohugoio/hugo/markup/rst"
)
func NewConverterProvider(cfg converter.ProviderConfig) (ConverterProvider, error) {
converters := make(map[string]converter.Provider)
mcfg := cfg.MarkupConfig()
if cfg.Highlighter == nil {
cfg.Highlighter = highlight.New(mcfg.Highlight)
}
defaultHandler := mcfg.DefaultMarkdownHandler
var defaultFound bool
add := func(p converter.ProviderProvider, subType string, aliases ...string) error {
c, err := p.New(cfg)
if err != nil {
return err
}
name := c.Name()
aliases = append(aliases, name)
aliases = append(aliases, subType)
if strings.EqualFold(name, defaultHandler) {
aliases = append(aliases, "markdown")
defaultFound = true
}
addConverter(converters, c, aliases...)
return nil
}
contentTypes := cfg.Conf.ContentTypes().(media.ContentTypes)
if err := add(goldmark.Provider, contentTypes.Markdown.SubType, contentTypes.Markdown.Suffixes()...); err != nil {
return nil, err
}
if err := add(asciidocext.Provider, contentTypes.AsciiDoc.SubType, contentTypes.AsciiDoc.Suffixes()...); err != nil {
return nil, err
}
if err := add(rst.Provider, contentTypes.ReStructuredText.SubType, contentTypes.ReStructuredText.Suffixes()...); err != nil {
return nil, err
}
if err := add(pandoc.Provider, contentTypes.Pandoc.SubType, contentTypes.Pandoc.Suffixes()...); err != nil {
return nil, err
}
if err := add(org.Provider, contentTypes.EmacsOrgMode.SubType, contentTypes.EmacsOrgMode.Suffixes()...); err != nil {
return nil, err
}
if !defaultFound {
msg := "markup: Configured defaultMarkdownHandler %q not found."
if defaultHandler == "blackfriday" {
msg += " Did you mean to use goldmark? Blackfriday was removed in Hugo v0.100.0."
}
return nil, fmt.Errorf(msg, defaultHandler)
}
return &converterRegistry{
config: cfg,
converters: converters,
}, nil
}
type ConverterProvider interface {
Get(name string) converter.Provider
IsGoldmark(name string) bool
// Default() converter.Provider
GetMarkupConfig() markup_config.Config
GetHighlighter() highlight.Highlighter
}
type converterRegistry struct {
// Maps name (md, markdown, goldmark etc.) to a converter provider.
// Note that this is also used for aliasing, so the same converter
// may be registered multiple times.
// All names are lower case.
converters map[string]converter.Provider
config converter.ProviderConfig
}
func (r *converterRegistry) IsGoldmark(name string) bool {
cp := r.Get(name)
return cp != nil && cp.Name() == "goldmark"
}
func (r *converterRegistry) Get(name string) converter.Provider {
return r.converters[strings.ToLower(name)]
}
func (r *converterRegistry) GetHighlighter() highlight.Highlighter {
return r.config.Highlighter
}
func (r *converterRegistry) GetMarkupConfig() markup_config.Config {
return r.config.MarkupConfig()
}
func addConverter(m map[string]converter.Provider, c converter.Provider, aliases ...string) {
for _, alias := range aliases {
m[alias] = c
}
}
// ResolveMarkup returns the markup type.
func ResolveMarkup(s string) string {
s = strings.ToLower(s)
switch s {
case "goldmark":
return media.DefaultContentTypes.Markdown.SubType
case "asciidocext":
return media.DefaultContentTypes.AsciiDoc.SubType
default:
return s
}
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/markup/blackfriday/anchors.go | markup/blackfriday/anchors.go | // Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package blackfriday holds some compatibility functions for the old Blackfriday v1 Markdown engine.
package blackfriday
import "unicode"
// SanitizedAnchorName is how Blackfriday sanitizes anchor names.
// Implementation borrowed from https://github.com/russross/blackfriday/blob/a477dd1646916742841ed20379f941cfa6c5bb6f/block.go#L1464
// Note that Hugo removed its Blackfriday support in v0.100.0, but you can still use this strategy for
// auto ID generation.
func SanitizedAnchorName(text string) string {
var anchorName []rune
futureDash := false
for _, r := range text {
switch {
case unicode.IsLetter(r) || unicode.IsNumber(r):
if futureDash && len(anchorName) > 0 {
anchorName = append(anchorName, '-')
}
futureDash = false
anchorName = append(anchorName, unicode.ToLower(r))
default:
futureDash = true
}
}
return string(anchorName)
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/markup/rst/convert_test.go | markup/rst/convert_test.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package rst
import (
"testing"
"github.com/gohugoio/hugo/common/hexec"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/config/security"
"github.com/gohugoio/hugo/markup/converter"
qt "github.com/frankban/quicktest"
)
func TestConvert(t *testing.T) {
if !Supports() {
t.Skip("rst not installed")
}
c := qt.New(t)
sc := security.DefaultConfig
sc.Exec.Allow = security.MustNewWhitelist("rst", "python")
p, err := Provider.New(
converter.ProviderConfig{
Logger: loggers.NewDefault(),
Exec: hexec.New(sc, "", loggers.NewDefault()),
})
c.Assert(err, qt.IsNil)
conv, err := p.New(converter.DocumentContext{})
c.Assert(err, qt.IsNil)
b, err := conv.Convert(converter.RenderContext{Src: []byte("testContent")})
c.Assert(err, qt.IsNil)
c.Assert(string(b.Bytes()), qt.Equals, "<div class=\"document\">\n\n\n<p>testContent</p>\n</div>")
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/markup/rst/convert.go | markup/rst/convert.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
// Package rst converts content to HTML using the RST external helper.
package rst
import (
"bytes"
"runtime"
"github.com/gohugoio/hugo/common/hexec"
"github.com/gohugoio/hugo/htesting"
"github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/markup/converter"
"github.com/gohugoio/hugo/markup/internal"
)
// Provider is the package entry point.
var Provider converter.ProviderProvider = provider{}
type provider struct{}
func (p provider) New(cfg converter.ProviderConfig) (converter.Provider, error) {
return converter.NewProvider("rst", func(ctx converter.DocumentContext) (converter.Converter, error) {
return &rstConverter{
ctx: ctx,
cfg: cfg,
}, nil
}), nil
}
type rstConverter struct {
ctx converter.DocumentContext
cfg converter.ProviderConfig
}
func (c *rstConverter) Convert(ctx converter.RenderContext) (converter.ResultRender, error) {
b, err := c.getRstContent(ctx.Src, c.ctx)
if err != nil {
return nil, err
}
return converter.Bytes(b), nil
}
func (c *rstConverter) Supports(feature identity.Identity) bool {
return false
}
// getRstContent calls the Python script rst2html as an external helper
// to convert reStructuredText content to HTML.
func (c *rstConverter) getRstContent(src []byte, ctx converter.DocumentContext) ([]byte, error) {
logger := c.cfg.Logger
binaryName, binaryPath := getRstBinaryNameAndPath()
if binaryName == "" {
logger.Println("rst2html / rst2html.py not found in $PATH: Please install.\n",
" Leaving reStructuredText content unrendered.")
return src, nil
}
logger.Infoln("Rendering", ctx.DocumentName, "with", binaryName, "...")
var result []byte
var err error
// certain *nix based OSs wrap executables in scripted launchers
// invoking binaries on these OSs via python interpreter causes SyntaxError
// invoke directly so that shebangs work as expected
// handle Windows manually because it doesn't do shebangs
if runtime.GOOS == "windows" {
pythonBinary, _ := internal.GetPythonBinaryAndExecPath()
args := []string{binaryPath, "--leave-comments", "--initial-header-level=2"}
result, err = internal.ExternallyRenderContent(c.cfg, ctx, src, pythonBinary, args)
} else {
args := []string{"--leave-comments", "--initial-header-level=2"}
result, err = internal.ExternallyRenderContent(c.cfg, ctx, src, binaryName, args)
}
if err != nil {
return nil, err
}
// TODO(bep) check if rst2html has a body only option.
bodyStart := bytes.Index(result, []byte("<body>\n"))
if bodyStart < 0 {
bodyStart = -7 // compensate for length
}
bodyEnd := bytes.Index(result, []byte("\n</body>"))
if bodyEnd < 0 || bodyEnd >= len(result) {
bodyEnd = max(len(result)-1, 0)
}
return result[bodyStart+7 : bodyEnd], err
}
var rst2Binaries = []string{"rst2html", "rst2html.py"}
func getRstBinaryNameAndPath() (string, string) {
for _, candidate := range rst2Binaries {
if pth := hexec.LookPath(candidate); pth != "" {
return candidate, pth
}
}
return "", ""
}
// Supports returns whether rst is (or should be) installed on this computer.
func Supports() bool {
name, _ := getRstBinaryNameAndPath()
hasBin := name != ""
if htesting.SupportsAll() {
if !hasBin {
panic("rst not installed")
}
return true
}
return hasBin
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/markup/tableofcontents/tableofcontents.go | markup/tableofcontents/tableofcontents.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package tableofcontents
import (
"fmt"
"html/template"
"sort"
"strings"
"github.com/gohugoio/hugo/common/collections"
"github.com/spf13/cast"
)
// Empty is an empty ToC.
var Empty = &Fragments{
Headings: Headings{},
HeadingsMap: map[string]*Heading{},
}
// Builder is used to build the ToC data structure.
type Builder struct {
identifiersSet bool
toc *Fragments
}
// AddAt adds the heading to the ToC.
func (b *Builder) AddAt(h *Heading, row, level int) {
if b.toc == nil {
b.toc = &Fragments{}
}
b.toc.addAt(h, row, level)
}
// SetIdentifiers sets the identifiers in the ToC.
func (b *Builder) SetIdentifiers(ids []string) {
if b.toc == nil {
b.toc = &Fragments{}
}
b.identifiersSet = true
sort.Strings(ids)
b.toc.Identifiers = ids
}
// Build returns the ToC.
func (b Builder) Build() *Fragments {
if b.toc == nil {
return Empty
}
b.toc.HeadingsMap = make(map[string]*Heading)
b.toc.walk(func(h *Heading) {
if h.ID != "" {
b.toc.HeadingsMap[h.ID] = h
if !b.identifiersSet {
b.toc.Identifiers = append(b.toc.Identifiers, h.ID)
}
}
})
sort.Strings(b.toc.Identifiers)
return b.toc
}
// Headings holds the top level headings.
type Headings []*Heading
// FilterBy returns a new Headings slice with all headings that matches the given predicate.
// For internal use only.
func (h Headings) FilterBy(fn func(*Heading) bool) Headings {
var out Headings
for _, h := range h {
h.walk(func(h *Heading) {
if fn(h) {
out = append(out, h)
}
})
}
return out
}
// Heading holds the data about a heading and its children.
type Heading struct {
ID string
Level int
Title string
Headings Headings
}
// IsZero is true when no ID or Text is set.
func (h Heading) IsZero() bool {
return h.ID == "" && h.Title == ""
}
func (h *Heading) walk(fn func(*Heading)) {
fn(h)
for _, h := range h.Headings {
h.walk(fn)
}
}
// Fragments holds the table of contents for a page.
type Fragments struct {
// Headings holds the top level headings.
Headings Headings
// Identifiers holds all the identifiers in the ToC as a sorted slice.
// Note that collections.SortedStringSlice has both a Contains and Count method
// that can be used to identify missing and duplicate IDs.
Identifiers collections.SortedStringSlice
// HeadingsMap holds all the headings in the ToC as a map.
// Note that with duplicate IDs, the last one will win.
HeadingsMap map[string]*Heading
}
// addAt adds the heading into the given location.
func (toc *Fragments) addAt(h *Heading, row, level int) {
for i := len(toc.Headings); i <= row; i++ {
toc.Headings = append(toc.Headings, &Heading{})
}
if level == 0 {
toc.Headings[row] = h
return
}
heading := toc.Headings[row]
for i := 1; i < level; i++ {
if len(heading.Headings) == 0 {
heading.Headings = append(heading.Headings, &Heading{})
}
heading = heading.Headings[len(heading.Headings)-1]
}
heading.Headings = append(heading.Headings, h)
}
// ToHTML renders the ToC as HTML.
func (toc *Fragments) ToHTML(startLevel, stopLevel any, ordered bool) (template.HTML, error) {
if toc == nil {
return "", nil
}
iStartLevel, err := cast.ToIntE(startLevel)
if err != nil {
return "", fmt.Errorf("startLevel: %w", err)
}
iStopLevel, err := cast.ToIntE(stopLevel)
if err != nil {
return "", fmt.Errorf("stopLevel: %w", err)
}
b := &tocBuilder{
s: strings.Builder{},
h: toc.Headings,
startLevel: iStartLevel,
stopLevel: iStopLevel,
ordered: ordered,
}
b.Build()
return template.HTML(b.s.String()), nil
}
func (toc Fragments) walk(fn func(*Heading)) {
for _, h := range toc.Headings {
h.walk(fn)
}
}
type tocBuilder struct {
s strings.Builder
h Headings
startLevel int
stopLevel int
ordered bool
}
func (b *tocBuilder) Build() {
b.writeNav(b.h)
}
func (b *tocBuilder) writeNav(h Headings) {
b.s.WriteString("<nav id=\"TableOfContents\">")
b.writeHeadings(1, 0, b.h)
b.s.WriteString("</nav>")
}
func (b *tocBuilder) writeHeadings(level, indent int, h Headings) {
if level < b.startLevel {
for _, h := range h {
b.writeHeadings(level+1, indent, h.Headings)
}
return
}
if b.stopLevel != -1 && level > b.stopLevel {
return
}
hasChildren := len(h) > 0
if hasChildren {
b.s.WriteString("\n")
b.indent(indent + 1)
if b.ordered {
b.s.WriteString("<ol>\n")
} else {
b.s.WriteString("<ul>\n")
}
}
for _, h := range h {
b.writeHeading(level+1, indent+2, h)
}
if hasChildren {
b.indent(indent + 1)
if b.ordered {
b.s.WriteString("</ol>")
} else {
b.s.WriteString("</ul>")
}
b.s.WriteString("\n")
b.indent(indent)
}
}
func (b *tocBuilder) writeHeading(level, indent int, h *Heading) {
b.indent(indent)
b.s.WriteString("<li>")
if !h.IsZero() {
b.s.WriteString("<a href=\"#" + h.ID + "\">" + h.Title + "</a>")
}
b.writeHeadings(level, indent, h.Headings)
b.s.WriteString("</li>\n")
}
func (b *tocBuilder) indent(n int) {
for range n {
b.s.WriteString(" ")
}
}
// DefaultConfig is the default ToC configuration.
var DefaultConfig = Config{
StartLevel: 2,
EndLevel: 3,
Ordered: false,
}
type Config struct {
// Heading start level to include in the table of contents, starting
// at h1 (inclusive).
// <docsmeta>{ "identifiers": ["h1"] }</docsmeta>
StartLevel int
// Heading end level, inclusive, to include in the table of contents.
// Default is 3, a value of -1 will include everything.
EndLevel int
// Whether to produce a ordered list or not.
Ordered bool
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
gohugoio/hugo | https://github.com/gohugoio/hugo/blob/5ea3e13db6e436904ee8154bba77af8247b7e534/markup/tableofcontents/tableofcontents_test.go | markup/tableofcontents/tableofcontents_test.go | // Copyright 2019 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package tableofcontents
import (
"testing"
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/common/collections"
)
var newTestTocBuilder = func() Builder {
var b Builder
b.AddAt(&Heading{Title: "Heading 1", ID: "h1-1"}, 0, 0)
b.AddAt(&Heading{Title: "1-H2-1", ID: "1-h2-1"}, 0, 1)
b.AddAt(&Heading{Title: "1-H2-2", ID: "1-h2-2"}, 0, 1)
b.AddAt(&Heading{Title: "1-H3-1", ID: "1-h2-2"}, 0, 2)
b.AddAt(&Heading{Title: "Heading 2", ID: "h1-2"}, 1, 0)
return b
}
var newTestToc = func() *Fragments {
return newTestTocBuilder().Build()
}
func TestToc(t *testing.T) {
c := qt.New(t)
toc := &Fragments{}
toc.addAt(&Heading{Title: "Heading 1", ID: "h1-1"}, 0, 0)
toc.addAt(&Heading{Title: "1-H2-1", ID: "1-h2-1"}, 0, 1)
toc.addAt(&Heading{Title: "1-H2-2", ID: "1-h2-2"}, 0, 1)
toc.addAt(&Heading{Title: "1-H3-1", ID: "1-h2-2"}, 0, 2)
toc.addAt(&Heading{Title: "Heading 2", ID: "h1-2"}, 1, 0)
tocHTML, _ := toc.ToHTML(1, -1, false)
got := string(tocHTML)
c.Assert(got, qt.Equals, `<nav id="TableOfContents">
<ul>
<li><a href="#h1-1">Heading 1</a>
<ul>
<li><a href="#1-h2-1">1-H2-1</a></li>
<li><a href="#1-h2-2">1-H2-2</a>
<ul>
<li><a href="#1-h2-2">1-H3-1</a></li>
</ul>
</li>
</ul>
</li>
<li><a href="#h1-2">Heading 2</a></li>
</ul>
</nav>`, qt.Commentf(got))
tocHTML, _ = toc.ToHTML(1, 1, false)
got = string(tocHTML)
c.Assert(got, qt.Equals, `<nav id="TableOfContents">
<ul>
<li><a href="#h1-1">Heading 1</a></li>
<li><a href="#h1-2">Heading 2</a></li>
</ul>
</nav>`, qt.Commentf(got))
tocHTML, _ = toc.ToHTML(1, 2, false)
got = string(tocHTML)
c.Assert(got, qt.Equals, `<nav id="TableOfContents">
<ul>
<li><a href="#h1-1">Heading 1</a>
<ul>
<li><a href="#1-h2-1">1-H2-1</a></li>
<li><a href="#1-h2-2">1-H2-2</a></li>
</ul>
</li>
<li><a href="#h1-2">Heading 2</a></li>
</ul>
</nav>`, qt.Commentf(got))
tocHTML, _ = toc.ToHTML(2, 2, false)
got = string(tocHTML)
c.Assert(got, qt.Equals, `<nav id="TableOfContents">
<ul>
<li><a href="#1-h2-1">1-H2-1</a></li>
<li><a href="#1-h2-2">1-H2-2</a></li>
</ul>
</nav>`, qt.Commentf(got))
tocHTML, _ = toc.ToHTML(1, -1, true)
got = string(tocHTML)
c.Assert(got, qt.Equals, `<nav id="TableOfContents">
<ol>
<li><a href="#h1-1">Heading 1</a>
<ol>
<li><a href="#1-h2-1">1-H2-1</a></li>
<li><a href="#1-h2-2">1-H2-2</a>
<ol>
<li><a href="#1-h2-2">1-H3-1</a></li>
</ol>
</li>
</ol>
</li>
<li><a href="#h1-2">Heading 2</a></li>
</ol>
</nav>`, qt.Commentf(got))
}
func TestTocMissingParent(t *testing.T) {
c := qt.New(t)
toc := &Fragments{}
toc.addAt(&Heading{Title: "H2", ID: "h2"}, 0, 1)
toc.addAt(&Heading{Title: "H3", ID: "h3"}, 1, 2)
toc.addAt(&Heading{Title: "H3", ID: "h3"}, 1, 2)
tocHTML, _ := toc.ToHTML(1, -1, false)
got := string(tocHTML)
c.Assert(got, qt.Equals, `<nav id="TableOfContents">
<ul>
<li>
<ul>
<li><a href="#h2">H2</a></li>
</ul>
</li>
<li>
<ul>
<li>
<ul>
<li><a href="#h3">H3</a></li>
<li><a href="#h3">H3</a></li>
</ul>
</li>
</ul>
</li>
</ul>
</nav>`, qt.Commentf(got))
tocHTML, _ = toc.ToHTML(3, 3, false)
got = string(tocHTML)
c.Assert(got, qt.Equals, `<nav id="TableOfContents">
<ul>
<li><a href="#h3">H3</a></li>
<li><a href="#h3">H3</a></li>
</ul>
</nav>`, qt.Commentf(got))
tocHTML, _ = toc.ToHTML(1, -1, true)
got = string(tocHTML)
c.Assert(got, qt.Equals, `<nav id="TableOfContents">
<ol>
<li>
<ol>
<li><a href="#h2">H2</a></li>
</ol>
</li>
<li>
<ol>
<li>
<ol>
<li><a href="#h3">H3</a></li>
<li><a href="#h3">H3</a></li>
</ol>
</li>
</ol>
</li>
</ol>
</nav>`, qt.Commentf(got))
}
func TestTocMisc(t *testing.T) {
c := qt.New(t)
c.Run("Identifiers", func(c *qt.C) {
toc := newTestToc()
c.Assert(toc.Identifiers, qt.DeepEquals, collections.SortedStringSlice{"1-h2-1", "1-h2-2", "1-h2-2", "h1-1", "h1-2"})
})
c.Run("HeadingsMap", func(c *qt.C) {
toc := newTestToc()
m := toc.HeadingsMap
c.Assert(m["h1-1"].Title, qt.Equals, "Heading 1")
c.Assert(m["doesnot exist"], qt.IsNil)
})
}
// Note that some of these cannot use b.Loop() because of golang/go#27217.
func BenchmarkToc(b *testing.B) {
newTocs := func(n int) []*Fragments {
var tocs []*Fragments
for range n {
tocs = append(tocs, newTestToc())
}
return tocs
}
b.Run("Build", func(b *testing.B) {
var builders []Builder
for i := 0; i < b.N; i++ {
builders = append(builders, newTestTocBuilder())
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
b := builders[i]
b.Build()
}
})
b.Run("ToHTML", func(b *testing.B) {
const size = 1000
tocs := newTocs(size)
for i := 0; b.Loop(); i++ {
toc := tocs[i%size]
toc.ToHTML(1, -1, false)
}
})
}
| go | Apache-2.0 | 5ea3e13db6e436904ee8154bba77af8247b7e534 | 2026-01-07T08:35:43.452707Z | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.