all: Run modernize -fix ./...

This commit is contained in:
Bjørn Erik Pedersen 2025-02-26 10:15:04 +01:00
parent b7ae24b9c2
commit 521911a576
141 changed files with 302 additions and 354 deletions

View file

@ -191,16 +191,16 @@ func TestPanicInCreate(t *testing.T) {
return err return err
} }
for i := 0; i < 3; i++ { for i := range 3 {
for j := 0; j < 3; j++ { for range 3 {
c.Assert(willPanic(i), qt.PanicMatches, fmt.Sprintf("panic-%d", i)) c.Assert(willPanic(i), qt.PanicMatches, fmt.Sprintf("panic-%d", i))
c.Assert(willErr(i), qt.ErrorMatches, fmt.Sprintf("error-%d", i)) c.Assert(willErr(i), qt.ErrorMatches, fmt.Sprintf("error-%d", i))
} }
} }
// Test the same keys again without the panic. // Test the same keys again without the panic.
for i := 0; i < 3; i++ { for i := range 3 {
for j := 0; j < 3; j++ { for range 3 {
v, err := p1.GetOrCreate(fmt.Sprintf("panic-%d", i), func(key string) (testItem, error) { v, err := p1.GetOrCreate(fmt.Sprintf("panic-%d", i), func(key string) (testItem, error) {
return testItem{ return testItem{
name: key, name: key,

View file

@ -59,7 +59,7 @@ dir = ":resourceDir/_gen"
caches, err := filecache.NewCaches(p) caches, err := filecache.NewCaches(p)
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
cache := caches[name] cache := caches[name]
for i := 0; i < 10; i++ { for i := range 10 {
id := fmt.Sprintf("i%d", i) id := fmt.Sprintf("i%d", i)
cache.GetOrCreateBytes(id, func() ([]byte, error) { cache.GetOrCreateBytes(id, func() ([]byte, error) {
return []byte("abc"), nil return []byte("abc"), nil
@ -74,7 +74,7 @@ dir = ":resourceDir/_gen"
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
c.Assert(count, qt.Equals, 5, msg) c.Assert(count, qt.Equals, 5, msg)
for i := 0; i < 10; i++ { for i := range 10 {
id := fmt.Sprintf("i%d", i) id := fmt.Sprintf("i%d", i)
v := cache.GetString(id) v := cache.GetString(id)
if i < 5 { if i < 5 {
@ -97,7 +97,7 @@ dir = ":resourceDir/_gen"
c.Assert(count, qt.Equals, 4) c.Assert(count, qt.Equals, 4)
// Now only the i5 should be left. // Now only the i5 should be left.
for i := 0; i < 10; i++ { for i := range 10 {
id := fmt.Sprintf("i%d", i) id := fmt.Sprintf("i%d", i)
v := cache.GetString(id) v := cache.GetString(id)
if i != 5 { if i != 5 {

View file

@ -105,7 +105,7 @@ dir = ":cacheDir/c"
} }
for _, ca := range []*filecache.Cache{caches.ImageCache(), caches.AssetsCache(), caches.GetJSONCache(), caches.GetCSVCache()} { for _, ca := range []*filecache.Cache{caches.ImageCache(), caches.AssetsCache(), caches.GetJSONCache(), caches.GetCSVCache()} {
for i := 0; i < 2; i++ { for range 2 {
info, r, err := ca.GetOrCreate("a", rf("abc")) info, r, err := ca.GetOrCreate("a", rf("abc"))
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
c.Assert(r, qt.Not(qt.IsNil)) c.Assert(r, qt.Not(qt.IsNil))
@ -193,11 +193,11 @@ dir = "/cache/c"
var wg sync.WaitGroup var wg sync.WaitGroup
for i := 0; i < 50; i++ { for i := range 50 {
wg.Add(1) wg.Add(1)
go func(i int) { go func(i int) {
defer wg.Done() defer wg.Done()
for j := 0; j < 20; j++ { for range 20 {
ca := caches.Get(cacheName) ca := caches.Get(cacheName)
c.Assert(ca, qt.Not(qt.IsNil)) c.Assert(ca, qt.Not(qt.IsNil))
filename, data := filenameData(i) filename, data := filenameData(i)

View file

@ -26,6 +26,7 @@ import (
"path/filepath" "path/filepath"
"reflect" "reflect"
"regexp" "regexp"
"slices"
"sort" "sort"
"strings" "strings"
"sync" "sync"
@ -102,7 +103,7 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
} }
for _, t := range include { for _, t := range include {
for i := 0; i < t.NumMethod(); i++ { for i := range t.NumMethod() {
m := t.Method(i) m := t.Method(i)
if excludes[m.Name] || seen[m.Name] { if excludes[m.Name] || seen[m.Name] {
@ -122,7 +123,7 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
method := Method{Owner: t, OwnerName: ownerName, Name: m.Name} method := Method{Owner: t, OwnerName: ownerName, Name: m.Name}
for i := 0; i < numIn; i++ { for i := range numIn {
in := m.Type.In(i) in := m.Type.In(i)
name, pkg := nameAndPackage(in) name, pkg := nameAndPackage(in)
@ -137,7 +138,7 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
numOut := m.Type.NumOut() numOut := m.Type.NumOut()
if numOut > 0 { if numOut > 0 {
for i := 0; i < numOut; i++ { for i := range numOut {
out := m.Type.Out(i) out := m.Type.Out(i)
name, pkg := nameAndPackage(out) name, pkg := nameAndPackage(out)
@ -304,7 +305,7 @@ func (m Method) inOutStr() string {
} }
args := make([]string, len(m.In)) args := make([]string, len(m.In))
for i := 0; i < len(args); i++ { for i := range args {
args[i] = fmt.Sprintf("arg%d", i) args[i] = fmt.Sprintf("arg%d", i)
} }
return "(" + strings.Join(args, ", ") + ")" return "(" + strings.Join(args, ", ") + ")"
@ -316,7 +317,7 @@ func (m Method) inStr() string {
} }
args := make([]string, len(m.In)) args := make([]string, len(m.In))
for i := 0; i < len(args); i++ { for i := range args {
args[i] = fmt.Sprintf("arg%d %s", i, m.In[i]) args[i] = fmt.Sprintf("arg%d %s", i, m.In[i])
} }
return "(" + strings.Join(args, ", ") + ")" return "(" + strings.Join(args, ", ") + ")"
@ -339,7 +340,7 @@ func (m Method) outStrNamed() string {
} }
outs := make([]string, len(m.Out)) outs := make([]string, len(m.Out))
for i := 0; i < len(outs); i++ { for i := range outs {
outs[i] = fmt.Sprintf("o%d %s", i, m.Out[i]) outs[i] = fmt.Sprintf("o%d %s", i, m.Out[i])
} }
@ -435,7 +436,7 @@ func (m Methods) ToMarshalJSON(receiver, pkgPath string, excludes ...string) (st
// Exclude self // Exclude self
for i, pkgImp := range pkgImports { for i, pkgImp := range pkgImports {
if pkgImp == pkgPath { if pkgImp == pkgPath {
pkgImports = append(pkgImports[:i], pkgImports[i+1:]...) pkgImports = slices.Delete(pkgImports, i, i+1)
} }
} }
} }

View file

@ -101,8 +101,8 @@ type configKey struct {
// This is the root command. // This is the root command.
type rootCommand struct { type rootCommand struct {
Printf func(format string, v ...interface{}) Printf func(format string, v ...any)
Println func(a ...interface{}) Println func(a ...any)
StdOut io.Writer StdOut io.Writer
StdErr io.Writer StdErr io.Writer
@ -431,12 +431,12 @@ func (r *rootCommand) PreRun(cd, runner *simplecobra.Commandeer) error {
// Used by mkcert (server). // Used by mkcert (server).
log.SetOutput(r.StdOut) log.SetOutput(r.StdOut)
r.Printf = func(format string, v ...interface{}) { r.Printf = func(format string, v ...any) {
if !r.quiet { if !r.quiet {
fmt.Fprintf(r.StdOut, format, v...) fmt.Fprintf(r.StdOut, format, v...)
} }
} }
r.Println = func(a ...interface{}) { r.Println = func(a ...any) {
if !r.quiet { if !r.quiet {
fmt.Fprintln(r.StdOut, a...) fmt.Fprintln(r.StdOut, a...)
} }

View file

@ -90,7 +90,7 @@ func (c *configCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, arg
os.Stdout.Write(buf.Bytes()) os.Stdout.Write(buf.Bytes())
default: default:
// Decode the JSON to a map[string]interface{} and then unmarshal it again to the correct format. // Decode the JSON to a map[string]interface{} and then unmarshal it again to the correct format.
var m map[string]interface{} var m map[string]any
if err := json.Unmarshal(buf.Bytes(), &m); err != nil { if err := json.Unmarshal(buf.Bytes(), &m); err != nil {
return err return err
} }

View file

@ -222,7 +222,7 @@ url: %s
} }
// Decode the JSON to a map[string]interface{} and then unmarshal it again to the correct format. // Decode the JSON to a map[string]interface{} and then unmarshal it again to the correct format.
var m map[string]interface{} var m map[string]any
if err := json.Unmarshal(buf.Bytes(), &m); err != nil { if err := json.Unmarshal(buf.Bytes(), &m); err != nil {
return err return err
} }

View file

@ -65,6 +65,7 @@ import (
"github.com/spf13/fsync" "github.com/spf13/fsync"
"golang.org/x/sync/errgroup" "golang.org/x/sync/errgroup"
"golang.org/x/sync/semaphore" "golang.org/x/sync/semaphore"
"maps"
) )
var ( var (
@ -195,9 +196,7 @@ func (f *fileChangeDetector) PrepareNew() {
} }
f.prev = make(map[string]uint64) f.prev = make(map[string]uint64)
for k, v := range f.current { maps.Copy(f.prev, f.current)
f.prev[k] = v
}
f.current = make(map[string]uint64) f.current = make(map[string]uint64)
} }
@ -759,7 +758,7 @@ func (c *serverCommand) createServerPorts(cd *simplecobra.Commandeer) error {
c.serverPorts = make([]serverPortListener, len(conf.configs.Languages)) c.serverPorts = make([]serverPortListener, len(conf.configs.Languages))
} }
currentServerPort := c.serverPort currentServerPort := c.serverPort
for i := 0; i < len(c.serverPorts); i++ { for i := range c.serverPorts {
l, err := net.Listen("tcp", net.JoinHostPort(c.serverInterface, strconv.Itoa(currentServerPort))) l, err := net.Listen("tcp", net.JoinHostPort(c.serverInterface, strconv.Itoa(currentServerPort)))
if err == nil { if err == nil {
c.serverPorts[i] = serverPortListener{ln: l, p: currentServerPort} c.serverPorts[i] = serverPortListener{ln: l, p: currentServerPort}

View file

@ -117,7 +117,7 @@ func appendToInterfaceSliceFromValues(slice1, slice2 reflect.Value) ([]any, erro
tos = append(tos, nil) tos = append(tos, nil)
continue continue
} }
for i := 0; i < slice.Len(); i++ { for i := range slice.Len() {
tos = append(tos, slice.Index(i).Interface()) tos = append(tos, slice.Index(i).Interface())
} }
} }
@ -128,7 +128,7 @@ func appendToInterfaceSliceFromValues(slice1, slice2 reflect.Value) ([]any, erro
func appendToInterfaceSlice(tov reflect.Value, from ...any) ([]any, error) { func appendToInterfaceSlice(tov reflect.Value, from ...any) ([]any, error) {
var tos []any var tos []any
for i := 0; i < tov.Len(); i++ { for i := range tov.Len() {
tos = append(tos, tov.Index(i).Interface()) tos = append(tos, tov.Index(i).Interface())
} }

View file

@ -13,6 +13,8 @@
package collections package collections
import "slices"
import "sync" import "sync"
// Stack is a simple LIFO stack that is safe for concurrent use. // Stack is a simple LIFO stack that is safe for concurrent use.
@ -73,7 +75,7 @@ func (s *Stack[T]) DrainMatching(predicate func(T) bool) []T {
for i := len(s.items) - 1; i >= 0; i-- { for i := len(s.items) - 1; i >= 0; i-- {
if predicate(s.items[i]) { if predicate(s.items[i]) {
items = append(items, s.items[i]) items = append(items, s.items[i])
s.items = append(s.items[:i], s.items[i+1:]...) s.items = slices.Delete(s.items, i, i+1)
} }
} }
return items return items

View file

@ -37,12 +37,12 @@ func TestXxHashFromReaderPara(t *testing.T) {
c := qt.New(t) c := qt.New(t)
var wg sync.WaitGroup var wg sync.WaitGroup
for i := 0; i < 10; i++ { for i := range 10 {
i := i i := i
wg.Add(1) wg.Add(1)
go func() { go func() {
defer wg.Done() defer wg.Done()
for j := 0; j < 100; j++ { for j := range 100 {
s := strings.Repeat("Hello ", i+j+1*42) s := strings.Repeat("Hello ", i+j+1*42)
r := strings.NewReader(s) r := strings.NewReader(s)
got, size, err := XXHashFromReader(r) got, size, err := XXHashFromReader(r)
@ -144,8 +144,8 @@ func BenchmarkHashString(b *testing.B) {
} }
func BenchmarkHashMap(b *testing.B) { func BenchmarkHashMap(b *testing.B) {
m := map[string]interface{}{} m := map[string]any{}
for i := 0; i < 1000; i++ { for i := range 1000 {
m[fmt.Sprintf("key%d", i)] = i m[fmt.Sprintf("key%d", i)] = i
} }

View file

@ -152,10 +152,7 @@ func locateError(r io.Reader, le FileError, matches LineMatcherFn) *ErrorContext
} }
if ectx.Position.LineNumber > 0 { if ectx.Position.LineNumber > 0 {
low := ectx.Position.LineNumber - 3 low := max(ectx.Position.LineNumber-3, 0)
if low < 0 {
low = 0
}
if ectx.Position.LineNumber > 2 { if ectx.Position.LineNumber > 2 {
ectx.LinesPos = 2 ectx.LinesPos = 2
@ -163,10 +160,7 @@ func locateError(r io.Reader, le FileError, matches LineMatcherFn) *ErrorContext
ectx.LinesPos = ectx.Position.LineNumber - 1 ectx.LinesPos = ectx.Position.LineNumber - 1
} }
high := ectx.Position.LineNumber + 2 high := min(ectx.Position.LineNumber+2, len(lines))
if high > len(lines) {
high = len(lines)
}
ectx.Lines = lines[low:high] ectx.Lines = lines[low:high]

View file

@ -245,7 +245,7 @@ func ToSliceAny(v any) ([]any, bool) {
vvv := reflect.ValueOf(v) vvv := reflect.ValueOf(v)
if vvv.Kind() == reflect.Slice { if vvv.Kind() == reflect.Slice {
out := make([]any, vvv.Len()) out := make([]any, vvv.Len())
for i := 0; i < vvv.Len(); i++ { for i := range vvv.Len() {
out[i] = vvv.Index(i).Interface() out[i] = vvv.Index(i).Interface()
} }
return out, true return out, true

View file

@ -20,6 +20,7 @@ import (
"sync" "sync"
"github.com/gohugoio/hugo/compare" "github.com/gohugoio/hugo/compare"
"slices"
) )
var _ compare.Eqer = StringEqualFold("") var _ compare.Eqer = StringEqualFold("")
@ -50,12 +51,7 @@ func (s StringEqualFold) Eq(s2 any) bool {
// EqualAny returns whether a string is equal to any of the given strings. // EqualAny returns whether a string is equal to any of the given strings.
func EqualAny(a string, b ...string) bool { func EqualAny(a string, b ...string) bool {
for _, s := range b { return slices.Contains(b, a)
if a == s {
return true
}
}
return false
} }
// regexpCache represents a cache of regexp objects protected by a mutex. // regexpCache represents a cache of regexp objects protected by a mutex.
@ -103,12 +99,7 @@ func GetOrCompileRegexp(pattern string) (re *regexp.Regexp, err error) {
// InSlice checks if a string is an element of a slice of strings // InSlice checks if a string is an element of a slice of strings
// and returns a boolean value. // and returns a boolean value.
func InSlice(arr []string, el string) bool { func InSlice(arr []string, el string) bool {
for _, v := range arr { return slices.Contains(arr, el)
if v == el {
return true
}
}
return false
} }
// InSlicEqualFold checks if a string is an element of a slice of strings // InSlicEqualFold checks if a string is an element of a slice of strings

View file

@ -46,7 +46,7 @@ func TestHasBytesWriter(t *testing.T) {
return strings.Repeat("ab cfo", r.Intn(33)) return strings.Repeat("ab cfo", r.Intn(33))
} }
for i := 0; i < 22; i++ { for range 22 {
h, w := neww() h, w := neww()
fmt.Fprint(w, rndStr()+"abc __foobar"+rndStr()) fmt.Fprint(w, rndStr()+"abc __foobar"+rndStr())
c.Assert(h.Patterns[0].Match, qt.Equals, true) c.Assert(h.Patterns[0].Match, qt.Equals, true)

View file

@ -416,10 +416,7 @@ func Deprecate(item, alternative string, version string) {
// DeprecateLevelMin informs about a deprecation starting at the given version, but with a minimum log level. // DeprecateLevelMin informs about a deprecation starting at the given version, but with a minimum log level.
func DeprecateLevelMin(item, alternative string, version string, minLevel logg.Level) { func DeprecateLevelMin(item, alternative string, version string, minLevel logg.Level) {
level := deprecationLogLevelFromVersion(version) level := max(deprecationLogLevelFromVersion(version), minLevel)
if level < minLevel {
level = minLevel
}
DeprecateLevel(item, alternative, version, level) DeprecateLevel(item, alternative, version, level)
} }

View file

@ -37,7 +37,7 @@ func TestLogDistinct(t *testing.T) {
l := loggers.New(opts) l := loggers.New(opts)
for i := 0; i < 10; i++ { for range 10 {
l.Errorln("error 1") l.Errorln("error 1")
l.Errorln("error 2") l.Errorln("error 2")
l.Warnln("warn 1") l.Warnln("warn 1")
@ -137,7 +137,7 @@ func TestReset(t *testing.T) {
l := loggers.New(opts) l := loggers.New(opts)
for i := 0; i < 3; i++ { for range 3 {
l.Errorln("error 1") l.Errorln("error 1")
l.Errorln("error 2") l.Errorln("error 2")
l.Errorln("error 1") l.Errorln("error 1")

View file

@ -15,6 +15,7 @@ package maps
import ( import (
"github.com/gohugoio/hugo/common/hashing" "github.com/gohugoio/hugo/common/hashing"
"slices"
) )
// Ordered is a map that can be iterated in the order of insertion. // Ordered is a map that can be iterated in the order of insertion.
@ -64,7 +65,7 @@ func (m *Ordered[K, T]) Delete(key K) {
delete(m.values, key) delete(m.values, key)
for i, k := range m.keys { for i, k := range m.keys {
if k == key { if k == key {
m.keys = append(m.keys[:i], m.keys[i+1:]...) m.keys = slices.Delete(m.keys, i, i+1)
break break
} }
} }

View file

@ -140,7 +140,7 @@ func TestScratchInParallel(t *testing.T) {
for i := 1; i <= 10; i++ { for i := 1; i <= 10; i++ {
wg.Add(1) wg.Add(1)
go func(j int) { go func(j int) {
for k := 0; k < 10; k++ { for k := range 10 {
newVal := int64(k + j) newVal := int64(k + j)
_, err := scratch.Add(key, newVal) _, err := scratch.Add(key, newVal)

View file

@ -42,7 +42,7 @@ func TestPara(t *testing.T) {
c.Run("Order", func(c *qt.C) { c.Run("Order", func(c *qt.C) {
n := 500 n := 500
ints := make([]int, n) ints := make([]int, n)
for i := 0; i < n; i++ { for i := range n {
ints[i] = i ints[i] = i
} }
@ -51,7 +51,7 @@ func TestPara(t *testing.T) {
var result []int var result []int
var mu sync.Mutex var mu sync.Mutex
for i := 0; i < n; i++ { for i := range n {
i := i i := i
r.Run(func() error { r.Run(func() error {
mu.Lock() mu.Lock()
@ -78,7 +78,7 @@ func TestPara(t *testing.T) {
var counter int64 var counter int64
for i := 0; i < n; i++ { for range n {
r.Run(func() error { r.Run(func() error {
atomic.AddInt64(&counter, 1) atomic.AddInt64(&counter, 1)
time.Sleep(1 * time.Millisecond) time.Sleep(1 * time.Millisecond)

View file

@ -51,7 +51,7 @@ func Run[T any](ctx context.Context, cfg Config[T]) Group[T] {
// Buffered for performance. // Buffered for performance.
ch := make(chan T, cfg.NumWorkers) ch := make(chan T, cfg.NumWorkers)
for i := 0; i < cfg.NumWorkers; i++ { for range cfg.NumWorkers {
g.Go(func() error { g.Go(func() error {
for { for {
select { select {

View file

@ -103,10 +103,7 @@ func (r *RunEvery) Add(name string, f Func) {
f.IntervalHigh = 20 * time.Second f.IntervalHigh = 20 * time.Second
} }
start := f.IntervalHigh / 3 start := max(f.IntervalHigh/3, f.IntervalLow)
if start < f.IntervalLow {
start = f.IntervalLow
}
f.interval = start f.interval = start
f.last = time.Now() f.last = time.Now()

View file

@ -69,7 +69,7 @@ func ToStringSlicePreserveStringE(v any) ([]string, error) {
switch vv.Kind() { switch vv.Kind() {
case reflect.Slice, reflect.Array: case reflect.Slice, reflect.Array:
result = make([]string, vv.Len()) result = make([]string, vv.Len())
for i := 0; i < vv.Len(); i++ { for i := range vv.Len() {
s, err := cast.ToStringE(vv.Index(i).Interface()) s, err := cast.ToStringE(vv.Index(i).Interface())
if err != nil { if err != nil {
return nil, err return nil, err

View file

@ -15,6 +15,7 @@
package types package types
import ( import (
"slices"
"sync" "sync"
) )
@ -45,7 +46,7 @@ func (q *EvictingQueue[T]) Add(v T) *EvictingQueue[T] {
if len(q.set) == q.size { if len(q.set) == q.size {
// Full // Full
delete(q.set, q.vals[0]) delete(q.set, q.vals[0])
q.vals = append(q.vals[:0], q.vals[1:]...) q.vals = slices.Delete(q.vals, 0, 1)
} }
q.set[v] = true q.set[v] = true
q.vals = append(q.vals, v) q.vals = append(q.vals, v)

View file

@ -55,7 +55,7 @@ func TestEvictingStringQueueConcurrent(t *testing.T) {
queue := NewEvictingQueue[string](3) queue := NewEvictingQueue[string](3)
for j := 0; j < 100; j++ { for range 100 {
wg.Add(1) wg.Add(1)
go func() { go func() {
defer wg.Done() defer wg.Done()

View file

@ -59,7 +59,7 @@ func (k KeyValues) String() string {
// KeyValues struct. // KeyValues struct.
func NewKeyValuesStrings(key string, values ...string) KeyValues { func NewKeyValuesStrings(key string, values ...string) KeyValues {
iv := make([]any, len(values)) iv := make([]any, len(values))
for i := 0; i < len(values); i++ { for i := range values {
iv[i] = values[i] iv[i] = values[i]
} }
return KeyValues{Key: key, Values: iv} return KeyValues{Key: key, Values: iv}

View file

@ -82,7 +82,7 @@ func init() {
} }
configLanguageKeys = make(map[string]bool) configLanguageKeys = make(map[string]bool)
addKeys := func(v reflect.Value) { addKeys := func(v reflect.Value) {
for i := 0; i < v.NumField(); i++ { for i := range v.NumField() {
name := strings.ToLower(v.Type().Field(i).Name) name := strings.ToLower(v.Type().Field(i).Name)
if skip[name] { if skip[name] {
continue continue

View file

@ -305,7 +305,7 @@ func (l configLoader) applyOsEnvOverrides(environ []string) error {
_, ok := allDecoderSetups[key] _, ok := allDecoderSetups[key]
if ok { if ok {
// A map. // A map.
if v, err := metadecoders.Default.UnmarshalStringTo(env.Value, map[string]interface{}{}); err == nil { if v, err := metadecoders.Default.UnmarshalStringTo(env.Value, map[string]any{}); err == nil {
val = v val = v
} }
} }

View file

@ -28,6 +28,7 @@ import (
"github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/herrors"
"github.com/mitchellh/mapstructure" "github.com/mitchellh/mapstructure"
"github.com/spf13/cast" "github.com/spf13/cast"
"slices"
) )
type BaseConfig struct { type BaseConfig struct {
@ -128,7 +129,7 @@ func (w BuildStats) Enabled() bool {
} }
func (b BuildConfig) clone() BuildConfig { func (b BuildConfig) clone() BuildConfig {
b.CacheBusters = append([]CacheBuster{}, b.CacheBusters...) b.CacheBusters = slices.Clone(b.CacheBusters)
return b return b
} }

View file

@ -166,7 +166,7 @@ func TestBuildConfigCacheBusters(t *testing.T) {
func TestBuildConfigCacheBusterstTailwindSetup(t *testing.T) { func TestBuildConfigCacheBusterstTailwindSetup(t *testing.T) {
c := qt.New(t) c := qt.New(t)
cfg := New() cfg := New()
cfg.Set("build", map[string]interface{}{ cfg.Set("build", map[string]any{
"cacheBusters": []map[string]string{ "cacheBusters": []map[string]string{
{ {
"source": "assets/watching/hugo_stats\\.json", "source": "assets/watching/hugo_stats\\.json",

View file

@ -345,7 +345,7 @@ func (c *defaultConfigProvider) getNestedKeyAndMap(key string, create bool) (str
c.keyCache.Store(key, parts) c.keyCache.Store(key, parts)
} }
current := c.root current := c.root
for i := 0; i < len(parts)-1; i++ { for i := range len(parts) - 1 {
next, found := current[parts[i]] next, found := current[parts[i]]
if !found { if !found {
if create { if create {

View file

@ -332,7 +332,7 @@ func TestDefaultConfigProvider(t *testing.T) {
return nil return nil
} }
for i := 0; i < 20; i++ { for i := range 20 {
i := i i := i
r.Run(func() error { r.Run(func() error {
const v = 42 const v = 42

View file

@ -29,7 +29,7 @@ func TestNamespace(t *testing.T) {
// ns, err := config.DecodeNamespace[map[string]DocsMediaTypeConfig](in, defaultMediaTypesConfig, buildConfig) // ns, err := config.DecodeNamespace[map[string]DocsMediaTypeConfig](in, defaultMediaTypesConfig, buildConfig)
ns, err := DecodeNamespace[[]*tstNsExt]( ns, err := DecodeNamespace[[]*tstNsExt](
map[string]interface{}{"foo": "bar"}, map[string]any{"foo": "bar"},
func(v any) (*tstNsExt, any, error) { func(v any) (*tstNsExt, any, error) {
t := &tstNsExt{} t := &tstNsExt{}
m, err := maps.ToStringMapE(v) m, err := maps.ToStringMapE(v)
@ -42,7 +42,7 @@ func TestNamespace(t *testing.T) {
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
c.Assert(ns, qt.Not(qt.IsNil)) c.Assert(ns, qt.Not(qt.IsNil))
c.Assert(ns.SourceStructure, qt.DeepEquals, map[string]interface{}{"foo": "bar"}) c.Assert(ns.SourceStructure, qt.DeepEquals, map[string]any{"foo": "bar"})
c.Assert(ns.SourceHash, qt.Equals, "1420f6c7782f7459") c.Assert(ns.SourceHash, qt.Equals, "1420f6c7782f7459")
c.Assert(ns.Config, qt.DeepEquals, &tstNsExt{Foo: "bar"}) c.Assert(ns.Config, qt.DeepEquals, &tstNsExt{Foo: "bar"})
c.Assert(ns.Signature(), qt.DeepEquals, []*tstNsExt(nil)) c.Assert(ns.Signature(), qt.DeepEquals, []*tstNsExt(nil))

View file

@ -73,7 +73,7 @@ func NewWhitelist(patterns ...string) (Whitelist, error) {
var patternsr []*regexp.Regexp var patternsr []*regexp.Regexp
for i := 0; i < len(patterns); i++ { for i := range patterns {
p := strings.TrimSpace(patterns[i]) p := strings.TrimSpace(patterns[i])
if p == "" { if p == "" {
continue continue

View file

@ -129,7 +129,7 @@ site RegularPages: {{ len site.RegularPages }}
` `
c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0o755), qt.IsNil) c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.md"), fmt.Appendf(nil, contentFile, "index.md"), 0o755), qt.IsNil)
c.Assert(afero.WriteFile(mm, filepath.Join(defaultArchetypeDir, "index.md"), []byte("default archetype index.md"), 0o755), qt.IsNil) c.Assert(afero.WriteFile(mm, filepath.Join(defaultArchetypeDir, "index.md"), []byte("default archetype index.md"), 0o755), qt.IsNil)
c.Assert(initFs(mm), qt.IsNil) c.Assert(initFs(mm), qt.IsNil)

View file

@ -109,10 +109,7 @@ func ExtractTOC(content []byte) (newcontent []byte, toc []byte) {
startOfTOC := bytes.Index(content, first) startOfTOC := bytes.Index(content, first)
peekEnd := len(content) peekEnd := min(len(content), 70+startOfTOC)
if peekEnd > 70+startOfTOC {
peekEnd = 70 + startOfTOC
}
if startOfTOC < 0 { if startOfTOC < 0 {
return stripEmptyNav(content), toc return stripEmptyNav(content), toc

View file

@ -43,11 +43,7 @@ func Emojify(source []byte) []byte {
j := start + k j := start + k
upper := j + emojiMaxSize upper := min(j+emojiMaxSize, len(source))
if upper > len(source) {
upper = len(source)
}
endEmoji := bytes.Index(source[j+1:upper], emojiDelim) endEmoji := bytes.Index(source[j+1:upper], emojiDelim)
nextWordDelim := bytes.Index(source[j:upper], emojiWordDelim) nextWordDelim := bytes.Index(source[j:upper], emojiWordDelim)

View file

@ -63,7 +63,7 @@ func UniqueStrings(s []string) []string {
unique := make([]string, 0, len(s)) unique := make([]string, 0, len(s))
for i, val := range s { for i, val := range s {
var seen bool var seen bool
for j := 0; j < i; j++ { for j := range i {
if s[j] == val { if s[j] == val {
seen = true seen = true
break break
@ -83,7 +83,7 @@ func UniqueStringsReuse(s []string) []string {
for i, val := range s { for i, val := range s {
var seen bool var seen bool
for j := 0; j < i; j++ { for j := range i {
if s[j] == val { if s[j] == val {
seen = true seen = true
break break

View file

@ -89,7 +89,7 @@ func ProcessingStatsTable(w io.Writer, stats ...*ProcessingStats) {
var data [][]string var data [][]string
for i := 0; i < len(stats); i++ { for i := range stats {
stat := stats[i] stat := stats[i]
names[i+1] = stat.Name names[i+1] = stat.Name

View file

@ -101,10 +101,10 @@ func doTestAbsURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool,
v := config.New() v := config.New()
if multilingual { if multilingual {
v.Set("languages", map[string]any{ v.Set("languages", map[string]any{
"fr": map[string]interface{}{ "fr": map[string]any{
"weight": 20, "weight": 20,
}, },
"en": map[string]interface{}{ "en": map[string]any{
"weight": 10, "weight": 10,
}, },
}) })
@ -112,7 +112,7 @@ func doTestAbsURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool,
} else { } else {
v.Set("defaultContentLanguage", lang) v.Set("defaultContentLanguage", lang)
v.Set("languages", map[string]any{ v.Set("languages", map[string]any{
lang: map[string]interface{}{ lang: map[string]any{
"weight": 10, "weight": 10,
}, },
}) })
@ -167,10 +167,10 @@ func doTestRelURL(t testing.TB, defaultInSubDir, addLanguage, multilingual bool,
v := config.New() v := config.New()
if multilingual { if multilingual {
v.Set("languages", map[string]any{ v.Set("languages", map[string]any{
"fr": map[string]interface{}{ "fr": map[string]any{
"weight": 20, "weight": 20,
}, },
"en": map[string]interface{}{ "en": map[string]any{
"weight": 10, "weight": 10,
}, },
}) })
@ -178,7 +178,7 @@ func doTestRelURL(t testing.TB, defaultInSubDir, addLanguage, multilingual bool,
} else { } else {
v.Set("defaultContentLanguage", lang) v.Set("defaultContentLanguage", lang)
v.Set("languages", map[string]any{ v.Set("languages", map[string]any{
lang: map[string]interface{}{ lang: map[string]any{
"weight": 10, "weight": 10,
}, },
}) })

View file

@ -151,7 +151,7 @@ func structTypes(v reflect.Value, m map[reflect.Type]struct{}) {
structTypes(v.Elem(), m) structTypes(v.Elem(), m)
} }
case reflect.Slice, reflect.Array: case reflect.Slice, reflect.Array:
for i := 0; i < v.Len(); i++ { for i := range v.Len() {
structTypes(v.Index(i), m) structTypes(v.Index(i), m)
} }
case reflect.Map: case reflect.Map:
@ -160,7 +160,7 @@ func structTypes(v reflect.Value, m map[reflect.Type]struct{}) {
} }
case reflect.Struct: case reflect.Struct:
m[v.Type()] = struct{}{} m[v.Type()] = struct{}{}
for i := 0; i < v.NumField(); i++ { for i := range v.NumField() {
structTypes(v.Field(i), m) structTypes(v.Field(i), m)
} }
} }

View file

@ -93,7 +93,7 @@ func (m *FileMeta) Merge(from *FileMeta) {
dstv := reflect.Indirect(reflect.ValueOf(m)) dstv := reflect.Indirect(reflect.ValueOf(m))
srcv := reflect.Indirect(reflect.ValueOf(from)) srcv := reflect.Indirect(reflect.ValueOf(from))
for i := 0; i < dstv.NumField(); i++ { for i := range dstv.NumField() {
v := dstv.Field(i) v := dstv.Field(i)
if !v.CanSet() { if !v.CanSet() {
continue continue

View file

@ -214,7 +214,7 @@ func WalkFilesystems(fs afero.Fs, fn WalkFn) bool {
} }
} }
} else if cfs, ok := fs.(overlayfs.FilesystemIterator); ok { } else if cfs, ok := fs.(overlayfs.FilesystemIterator); ok {
for i := 0; i < cfs.NumFilesystems(); i++ { for i := range cfs.NumFilesystems() {
if WalkFilesystems(cfs.Filesystem(i), fn) { if WalkFilesystems(cfs.Filesystem(i), fn) {
return true return true
} }

View file

@ -166,7 +166,7 @@ func FilterGlobParts(a []string) []string {
// HasGlobChar returns whether s contains any glob wildcards. // HasGlobChar returns whether s contains any glob wildcards.
func HasGlobChar(s string) bool { func HasGlobChar(s string) bool {
for i := 0; i < len(s); i++ { for i := range len(s) {
if syntax.Special(s[i]) { if syntax.Special(s[i]) {
return true return true
} }

View file

@ -91,7 +91,7 @@ func TestWalkRootMappingFs(t *testing.T) {
p := para.New(4) p := para.New(4)
r, _ := p.Start(context.Background()) r, _ := p.Start(context.Background())
for i := 0; i < 8; i++ { for range 8 {
r.Run(func() error { r.Run(func() error {
_, err := collectPaths(bfs, "") _, err := collectPaths(bfs, "")
if err != nil { if err != nil {
@ -153,7 +153,7 @@ func BenchmarkWalk(b *testing.B) {
fs := NewBaseFileDecorator(afero.NewMemMapFs()) fs := NewBaseFileDecorator(afero.NewMemMapFs())
writeFiles := func(dir string, numfiles int) { writeFiles := func(dir string, numfiles int) {
for i := 0; i < numfiles; i++ { for i := range numfiles {
filename := filepath.Join(dir, fmt.Sprintf("file%d.txt", i)) filename := filepath.Join(dir, fmt.Sprintf("file%d.txt", i))
c.Assert(afero.WriteFile(fs, filename, []byte("content"), 0o777), qt.IsNil) c.Assert(afero.WriteFile(fs, filename, []byte("content"), 0o777), qt.IsNil)
} }

View file

@ -871,7 +871,7 @@ Background: {{ .Params.background }}|
{{ .Title }}| {{ .Title }}|
` `
for i := 0; i < 10; i++ { for range 10 {
b := Test(t, files) b := Test(t, files)
b.AssertFileContent("public/p1/index.html", "Background: yosemite.jpg") b.AssertFileContent("public/p1/index.html", "Background: yosemite.jpg")
} }

View file

@ -793,7 +793,7 @@ Single.
files := strings.ReplaceAll(filesTemplate, "WEIGHT_EN", "2") files := strings.ReplaceAll(filesTemplate, "WEIGHT_EN", "2")
files = strings.ReplaceAll(files, "WEIGHT_SV", "1") files = strings.ReplaceAll(files, "WEIGHT_SV", "1")
for i := 0; i < 20; i++ { for range 20 {
cfg := config.New() cfg := config.New()
b, err := NewIntegrationTestBuilder( b, err := NewIntegrationTestBuilder(
IntegrationTestConfig{ IntegrationTestConfig{

View file

@ -323,7 +323,7 @@ R: {{ with $r }}{{ .Content }}{{ end }}|Len: {{ len $bundle.Resources }}|$
` `
for i := 0; i < 3; i++ { for range 3 {
b := Test(t, files) b := Test(t, files)
b.AssertFileContent("public/index.html", "R: Data 1.txt|", "Len: 1|") b.AssertFileContent("public/index.html", "R: Data 1.txt|", "Len: 1|")
} }
@ -435,14 +435,14 @@ func TestContentTreeReverseIndex(t *testing.T) {
pageReverseIndex := newContentTreeTreverseIndex( pageReverseIndex := newContentTreeTreverseIndex(
func(get func(key any) (contentNodeI, bool), set func(key any, val contentNodeI)) { func(get func(key any) (contentNodeI, bool), set func(key any, val contentNodeI)) {
for i := 0; i < 10; i++ { for i := range 10 {
key := fmt.Sprint(i) key := fmt.Sprint(i)
set(key, &testContentNode{key: key}) set(key, &testContentNode{key: key})
} }
}, },
) )
for i := 0; i < 10; i++ { for i := range 10 {
key := fmt.Sprint(i) key := fmt.Sprint(i)
v := pageReverseIndex.Get(key) v := pageReverseIndex.Get(key)
c.Assert(v, qt.Not(qt.IsNil)) c.Assert(v, qt.Not(qt.IsNil))
@ -456,17 +456,17 @@ func TestContentTreeReverseIndexPara(t *testing.T) {
var wg sync.WaitGroup var wg sync.WaitGroup
for i := 0; i < 10; i++ { for range 10 {
pageReverseIndex := newContentTreeTreverseIndex( pageReverseIndex := newContentTreeTreverseIndex(
func(get func(key any) (contentNodeI, bool), set func(key any, val contentNodeI)) { func(get func(key any) (contentNodeI, bool), set func(key any, val contentNodeI)) {
for i := 0; i < 10; i++ { for i := range 10 {
key := fmt.Sprint(i) key := fmt.Sprint(i)
set(key, &testContentNode{key: key}) set(key, &testContentNode{key: key})
} }
}, },
) )
for j := 0; j < 10; j++ { for j := range 10 {
wg.Add(1) wg.Add(1)
go func(i int) { go func(i int) {
defer wg.Done() defer wg.Done()

View file

@ -193,7 +193,7 @@ func TestTreePara(t *testing.T) {
}, },
) )
for i := 0; i < 8; i++ { for i := range 8 {
i := i i := i
r.Run(func() error { r.Run(func() error {
a := &testValue{ID: "/a"} a := &testValue{ID: "/a"}
@ -289,7 +289,7 @@ func BenchmarkTreeInsert(b *testing.B) {
}, },
) )
for i := 0; i < numElements; i++ { for i := range numElements {
lang := rand.Intn(2) lang := rand.Intn(2)
tree.InsertIntoValuesDimension(fmt.Sprintf("/%d", i), &testValue{ID: fmt.Sprintf("/%d", i), Lang: lang, Weight: i, NoCopy: true}) tree.InsertIntoValuesDimension(fmt.Sprintf("/%d", i), &testValue{ID: fmt.Sprintf("/%d", i), Lang: lang, Weight: i, NoCopy: true})
} }
@ -323,7 +323,7 @@ func BenchmarkWalk(b *testing.B) {
}, },
) )
for i := 0; i < numElements; i++ { for i := range numElements {
lang := rand.Intn(2) lang := rand.Intn(2)
tree.InsertIntoValuesDimension(fmt.Sprintf("/%d", i), &testValue{ID: fmt.Sprintf("/%d", i), Lang: lang, Weight: i, NoCopy: true}) tree.InsertIntoValuesDimension(fmt.Sprintf("/%d", i), &testValue{ID: fmt.Sprintf("/%d", i), Lang: lang, Weight: i, NoCopy: true})
} }
@ -355,8 +355,8 @@ func BenchmarkWalk(b *testing.B) {
base := createTree() base := createTree()
b.ResetTimer() b.ResetTimer()
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
for d1 := 0; d1 < 1; d1++ { for d1 := range 1 {
for d2 := 0; d2 < 2; d2++ { for d2 := range 2 {
tree := base.Shape(d1, d2) tree := base.Shape(d1, d2)
w := &doctree.NodeShiftTreeWalker[*testValue]{ w := &doctree.NodeShiftTreeWalker[*testValue]{
Tree: tree, Tree: tree,

View file

@ -363,7 +363,7 @@ func (r *NodeShiftTreeWalker[T]) Walk(ctx context.Context) error {
main := r.Tree main := r.Tree
var err error var err error
fnMain := func(s string, v interface{}) bool { fnMain := func(s string, v any) bool {
if r.ShouldSkip(s) { if r.ShouldSkip(s) {
return false return false
} }

View file

@ -34,7 +34,7 @@ func NewTreeShiftTree[T comparable](d, length int) *TreeShiftTree[T] {
panic("length must be > 0") panic("length must be > 0")
} }
trees := make([]*SimpleTree[T], length) trees := make([]*SimpleTree[T], length)
for i := 0; i < length; i++ { for i := range length {
trees[i] = NewSimpleTree[T]() trees[i] = NewSimpleTree[T]()
} }
return &TreeShiftTree[T]{d: d, trees: trees} return &TreeShiftTree[T]{d: d, trees: trees}

View file

@ -634,7 +634,7 @@ func (b *sourceFilesystemsBuilder) createMainOverlayFs(p *paths.Paths) (*filesys
mounts := make([]mountsDescriptor, len(mods)) mounts := make([]mountsDescriptor, len(mods))
for i := 0; i < len(mods); i++ { for i := range mods {
mod := mods[i] mod := mods[i]
dir := mod.Dir() dir := mod.Dir()

View file

@ -57,14 +57,14 @@ func TestNewBaseFs(t *testing.T) {
filenameTheme := filepath.Join(base, fmt.Sprintf("theme-file-%s.txt", theme)) filenameTheme := filepath.Join(base, fmt.Sprintf("theme-file-%s.txt", theme))
filenameOverlap := filepath.Join(base, "f3.txt") filenameOverlap := filepath.Join(base, "f3.txt")
afs.Mkdir(base, 0o755) afs.Mkdir(base, 0o755)
content := []byte(fmt.Sprintf("content:%s:%s", theme, dir)) content := fmt.Appendf(nil, "content:%s:%s", theme, dir)
afero.WriteFile(afs, filenameTheme, content, 0o755) afero.WriteFile(afs, filenameTheme, content, 0o755)
afero.WriteFile(afs, filenameOverlap, content, 0o755) afero.WriteFile(afs, filenameOverlap, content, 0o755)
} }
// Write some files to the root of the theme // Write some files to the root of the theme
base := filepath.Join(workingDir, "themes", theme) base := filepath.Join(workingDir, "themes", theme)
afero.WriteFile(afs, filepath.Join(base, fmt.Sprintf("theme-root-%s.txt", theme)), []byte(fmt.Sprintf("content:%s", theme)), 0o755) afero.WriteFile(afs, filepath.Join(base, fmt.Sprintf("theme-root-%s.txt", theme)), fmt.Appendf(nil, "content:%s", theme), 0o755)
afero.WriteFile(afs, filepath.Join(base, "file-theme-root.txt"), []byte(fmt.Sprintf("content:%s", theme)), 0o755) afero.WriteFile(afs, filepath.Join(base, "file-theme-root.txt"), fmt.Appendf(nil, "content:%s", theme), 0o755)
} }
afero.WriteFile(afs, filepath.Join(workingDir, "file-root.txt"), []byte("content-project"), 0o755) afero.WriteFile(afs, filepath.Join(workingDir, "file-root.txt"), []byte("content-project"), 0o755)
@ -683,8 +683,8 @@ func setConfigAndWriteSomeFilesTo(fs afero.Fs, v config.Provider, key, val strin
workingDir := v.GetString("workingDir") workingDir := v.GetString("workingDir")
v.Set(key, val) v.Set(key, val)
fs.Mkdir(val, 0o755) fs.Mkdir(val, 0o755)
for i := 0; i < num; i++ { for i := range num {
filename := filepath.Join(workingDir, val, fmt.Sprintf("f%d.txt", i+1)) filename := filepath.Join(workingDir, val, fmt.Sprintf("f%d.txt", i+1))
afero.WriteFile(fs, filename, []byte(fmt.Sprintf("content:%s:%d", key, i+1)), 0o755) afero.WriteFile(fs, filename, fmt.Appendf(nil, "content:%s:%d", key, i+1), 0o755)
} }
} }

View file

@ -311,7 +311,7 @@ func (h *HugoSites) NumLogErrors() int {
func (h *HugoSites) PrintProcessingStats(w io.Writer) { func (h *HugoSites) PrintProcessingStats(w io.Writer) {
stats := make([]*helpers.ProcessingStats, len(h.Sites)) stats := make([]*helpers.ProcessingStats, len(h.Sites))
for i := 0; i < len(h.Sites); i++ { for i := range h.Sites {
stats[i] = h.Sites[i].PathSpec.ProcessingStats stats[i] = h.Sites[i].PathSpec.ProcessingStats
} }
helpers.ProcessingStatsTable(w, stats...) helpers.ProcessingStatsTable(w, stats...)

View file

@ -707,7 +707,7 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error {
cp := p.pageOutput.pco cp := p.pageOutput.pco
if cp == nil && p.reusePageOutputContent() { if cp == nil && p.reusePageOutputContent() {
// Look for content to reuse. // Look for content to reuse.
for i := 0; i < len(p.pageOutputs); i++ { for i := range p.pageOutputs {
if i == idx { if i == idx {
continue continue
} }

View file

@ -45,6 +45,7 @@ import (
"github.com/gohugoio/hugo/tpl" "github.com/gohugoio/hugo/tpl"
"github.com/mitchellh/mapstructure" "github.com/mitchellh/mapstructure"
"github.com/spf13/cast" "github.com/spf13/cast"
maps0 "maps"
) )
const ( const (
@ -696,9 +697,7 @@ func (c *cachedContentScope) contentToC(ctx context.Context) (contentTableOfCont
cp.otherOutputs.Set(cp2.po.p.pid, cp2) cp.otherOutputs.Set(cp2.po.p.pid, cp2)
// Merge content placeholders // Merge content placeholders
for k, v := range ct2.contentPlaceholders { maps0.Copy(ct.contentPlaceholders, ct2.contentPlaceholders)
ct.contentPlaceholders[k] = v
}
if p.s.conf.Internal.Watch { if p.s.conf.Internal.Watch {
for _, s := range cp2.po.p.m.content.shortcodeState.shortcodes { for _, s := range cp2.po.p.m.content.shortcodeState.shortcodes {

View file

@ -690,7 +690,7 @@ bundle min min key: {{ $jsonMinMin.Key }}
`) `)
for i := 0; i < 3; i++ { for range 3 {
b.Build(BuildCfg{}) b.Build(BuildCfg{})

View file

@ -47,8 +47,8 @@ func BenchmarkGetPage(b *testing.B) {
b.Fatal(err) b.Fatal(err)
} }
for i := 0; i < 10; i++ { for i := range 10 {
for j := 0; j < 100; j++ { for j := range 100 {
writeSource(b, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", j)), "CONTENT") writeSource(b, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", j)), "CONTENT")
} }
} }
@ -91,8 +91,8 @@ func createGetPageRegularBenchmarkSite(t testing.TB) *Site {
return fmt.Sprintf(pageCollectionsPageTemplate, title) return fmt.Sprintf(pageCollectionsPageTemplate, title)
} }
for i := 0; i < 10; i++ { for i := range 10 {
for j := 0; j < 100; j++ { for j := range 100 {
content := pc(fmt.Sprintf("Title%d_%d", i, j)) content := pc(fmt.Sprintf("Title%d_%d", i, j))
writeSource(c, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", j)), content) writeSource(c, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", j)), content)
} }
@ -105,7 +105,7 @@ func TestBenchmarkGetPageRegular(t *testing.T) {
c := qt.New(t) c := qt.New(t)
s := createGetPageRegularBenchmarkSite(t) s := createGetPageRegularBenchmarkSite(t)
for i := 0; i < 10; i++ { for i := range 10 {
pp := path.Join("/", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", i)) pp := path.Join("/", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", i))
page, _ := s.getPage(nil, pp) page, _ := s.getPage(nil, pp)
c.Assert(page, qt.Not(qt.IsNil), qt.Commentf(pp)) c.Assert(page, qt.Not(qt.IsNil), qt.Commentf(pp))
@ -192,8 +192,8 @@ func TestGetPage(t *testing.T) {
return fmt.Sprintf(pageCollectionsPageTemplate, title) return fmt.Sprintf(pageCollectionsPageTemplate, title)
} }
for i := 0; i < 10; i++ { for i := range 10 {
for j := 0; j < 10; j++ { for j := range 10 {
content := pc(fmt.Sprintf("Title%d_%d", i, j)) content := pc(fmt.Sprintf("Title%d_%d", i, j))
writeSource(t, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", j)), content) writeSource(t, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", j)), content)
} }

View file

@ -42,7 +42,7 @@ func TestMergeLanguages(t *testing.T) {
c.Assert(len(frSite.RegularPages()), qt.Equals, 6) c.Assert(len(frSite.RegularPages()), qt.Equals, 6)
c.Assert(len(nnSite.RegularPages()), qt.Equals, 12) c.Assert(len(nnSite.RegularPages()), qt.Equals, 12)
for i := 0; i < 2; i++ { for range 2 {
mergedNN := nnSite.RegularPages().MergeByLanguage(enSite.RegularPages()) mergedNN := nnSite.RegularPages().MergeByLanguage(enSite.RegularPages())
c.Assert(len(mergedNN), qt.Equals, 31) c.Assert(len(mergedNN), qt.Equals, 31)
for i := 1; i <= 31; i++ { for i := 1; i <= 31; i++ {
@ -163,7 +163,7 @@ date: "2018-02-28"
// Add a bundles // Add a bundles
j := 100 j := 100
contentPairs = append(contentPairs, []string{"bundle/index.md", fmt.Sprintf(contentTemplate, j, j)}...) contentPairs = append(contentPairs, []string{"bundle/index.md", fmt.Sprintf(contentTemplate, j, j)}...)
for i := 0; i < 6; i++ { for i := range 6 {
contentPairs = append(contentPairs, []string{fmt.Sprintf("bundle/pb%d.md", i), fmt.Sprintf(contentTemplate, i+j, i+j)}...) contentPairs = append(contentPairs, []string{fmt.Sprintf("bundle/pb%d.md", i), fmt.Sprintf(contentTemplate, i+j, i+j)}...)
} }
contentPairs = append(contentPairs, []string{"bundle/index.nn.md", fmt.Sprintf(contentTemplate, j, j)}...) contentPairs = append(contentPairs, []string{"bundle/index.nn.md", fmt.Sprintf(contentTemplate, j, j)}...)

View file

@ -40,7 +40,7 @@ contentDir = "content/nn"
` `
b := newTestSitesBuilder(t).WithConfigFile("toml", configFile) b := newTestSitesBuilder(t).WithConfigFile("toml", configFile)
var content []string var content []string
for i := 0; i < 9; i++ { for i := range 9 {
for _, contentDir := range []string{"content/en", "content/nn"} { for _, contentDir := range []string{"content/en", "content/nn"} {
content = append(content, fmt.Sprintf(contentDir+"/blog/page%d.md", i), fmt.Sprintf(`--- content = append(content, fmt.Sprintf(contentDir+"/blog/page%d.md", i), fmt.Sprintf(`---
title: Page %d title: Page %d
@ -118,7 +118,7 @@ cascade:
- JSON - JSON
---`) ---`)
for i := 0; i < 22; i++ { for i := range 22 {
b.WithContent(fmt.Sprintf("p%d.md", i+1), fmt.Sprintf(`--- b.WithContent(fmt.Sprintf("p%d.md", i+1), fmt.Sprintf(`---
title: "Page" title: "Page"
weight: %d weight: %d

View file

@ -124,7 +124,7 @@ func TestRebuildEditTextFileInLeafBundle(t *testing.T) {
func TestRebuildEditTextFileInShortcode(t *testing.T) { func TestRebuildEditTextFileInShortcode(t *testing.T) {
t.Parallel() t.Parallel()
for i := 0; i < 3; i++ { for range 3 {
b := TestRunning(t, rebuildFilesSimple) b := TestRunning(t, rebuildFilesSimple)
b.AssertFileContent("public/mythirdsection/mythirdsectionpage/index.html", b.AssertFileContent("public/mythirdsection/mythirdsectionpage/index.html",
"Text: Assets My Shortcode Text.") "Text: Assets My Shortcode Text.")
@ -138,7 +138,7 @@ func TestRebuildEditTextFileInShortcode(t *testing.T) {
func TestRebuildEditTextFileInHook(t *testing.T) { func TestRebuildEditTextFileInHook(t *testing.T) {
t.Parallel() t.Parallel()
for i := 0; i < 3; i++ { for range 3 {
b := TestRunning(t, rebuildFilesSimple) b := TestRunning(t, rebuildFilesSimple)
b.AssertFileContent("public/mythirdsection/mythirdsectionpage/index.html", b.AssertFileContent("public/mythirdsection/mythirdsectionpage/index.html",
"Text: Assets My Other Text.") "Text: Assets My Other Text.")
@ -1545,7 +1545,7 @@ title: "P%d"
P%d Content. P%d Content.
` `
for i := 0; i < count; i++ { for i := range count {
files += fmt.Sprintf("-- content/mysect/p%d/index.md --\n%s", i, fmt.Sprintf(contentTemplate, i, i)) files += fmt.Sprintf("-- content/mysect/p%d/index.md --\n%s", i, fmt.Sprintf(contentTemplate, i, i))
} }

View file

@ -99,7 +99,7 @@ FAILED REMOTE ERROR DETAILS CONTENT: {{ with $failedImg }}{{ with .Err }}{{ with
b.Running() b.Running()
for i := 0; i < 2; i++ { for i := range 2 {
b.Logf("Test run %d", i) b.Logf("Test run %d", i)
b.Build(BuildCfg{}) b.Build(BuildCfg{})
@ -200,7 +200,7 @@ func BenchmarkResourceChainPostProcess(b *testing.B) {
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
b.StopTimer() b.StopTimer()
s := newTestSitesBuilder(b) s := newTestSitesBuilder(b)
for i := 0; i < 300; i++ { for i := range 300 {
s.WithContent(fmt.Sprintf("page%d.md", i+1), "---\ntitle: Page\n---") s.WithContent(fmt.Sprintf("page%d.md", i+1), "---\ntitle: Page\n---")
} }
s.WithTemplates("_default/single.html", `Start. s.WithTemplates("_default/single.html", `Start.

View file

@ -865,13 +865,13 @@ Content: {{ .Content }}|
func TestShortcodeStableOutputFormatTemplates(t *testing.T) { func TestShortcodeStableOutputFormatTemplates(t *testing.T) {
t.Parallel() t.Parallel()
for i := 0; i < 5; i++ { for range 5 {
b := newTestSitesBuilder(t) b := newTestSitesBuilder(t)
const numPages = 10 const numPages = 10
for i := 0; i < numPages; i++ { for i := range numPages {
b.WithContent(fmt.Sprintf("page%d.md", i), `--- b.WithContent(fmt.Sprintf("page%d.md", i), `---
title: "Page" title: "Page"
outputs: ["html", "css", "csv", "json"] outputs: ["html", "css", "csv", "json"]
@ -894,14 +894,14 @@ outputs: ["html", "css", "csv", "json"]
// helpers.PrintFs(b.Fs.Destination, "public", os.Stdout) // helpers.PrintFs(b.Fs.Destination, "public", os.Stdout)
for i := 0; i < numPages; i++ { for i := range numPages {
b.AssertFileContent(fmt.Sprintf("public/page%d/index.html", i), "Short-HTML") b.AssertFileContent(fmt.Sprintf("public/page%d/index.html", i), "Short-HTML")
b.AssertFileContent(fmt.Sprintf("public/page%d/index.csv", i), "Short-CSV") b.AssertFileContent(fmt.Sprintf("public/page%d/index.csv", i), "Short-CSV")
b.AssertFileContent(fmt.Sprintf("public/page%d/index.json", i), "Short-HTML") b.AssertFileContent(fmt.Sprintf("public/page%d/index.json", i), "Short-HTML")
} }
for i := 0; i < numPages; i++ { for i := range numPages {
b.AssertFileContent(fmt.Sprintf("public/page%d/styles.css", i), "Short-HTML") b.AssertFileContent(fmt.Sprintf("public/page%d/styles.css", i), "Short-HTML")
} }

View file

@ -330,10 +330,7 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) {
func newHugoSites(cfg deps.DepsCfg, d *deps.Deps, pageTrees *pageTrees, sites []*Site) (*HugoSites, error) { func newHugoSites(cfg deps.DepsCfg, d *deps.Deps, pageTrees *pageTrees, sites []*Site) (*HugoSites, error) {
numWorkers := config.GetNumWorkerMultiplier() numWorkers := config.GetNumWorkerMultiplier()
numWorkersSite := numWorkers numWorkersSite := min(numWorkers, len(sites))
if numWorkersSite > len(sites) {
numWorkersSite = len(sites)
}
workersSite := para.New(numWorkersSite) workersSite := para.New(numWorkersSite)
h := &HugoSites{ h := &HugoSites{

View file

@ -78,7 +78,7 @@ func (s *Site) renderPages(ctx *siteRenderContext) error {
wg := &sync.WaitGroup{} wg := &sync.WaitGroup{}
for i := 0; i < numWorkers; i++ { for range numWorkers {
wg.Add(1) wg.Add(1)
go pageRenderer(ctx, s, pages, results, wg) go pageRenderer(ctx, s, pages, results, wg)
} }

View file

@ -69,15 +69,15 @@ aliases: [/Ali%d]
"_default/terms.html", "Terms List|{{ .Title }}|{{ .Content }}", "_default/terms.html", "Terms List|{{ .Title }}|{{ .Content }}",
) )
for i := 0; i < 2; i++ { for i := range 2 {
for j := 0; j < 2; j++ { for j := range 2 {
pageID := i + j + 1 pageID := i + j + 1
b.WithContent(fmt.Sprintf("content/sect/p%d.md", pageID), b.WithContent(fmt.Sprintf("content/sect/p%d.md", pageID),
fmt.Sprintf(pageTemplate, pageID, fmt.Sprintf("- tag%d", j), fmt.Sprintf("- category%d", j), pageID)) fmt.Sprintf(pageTemplate, pageID, fmt.Sprintf("- tag%d", j), fmt.Sprintf("- category%d", j), pageID))
} }
} }
for i := 0; i < 5; i++ { for i := range 5 {
b.WithContent(fmt.Sprintf("assets/image%d.png", i+1), "image") b.WithContent(fmt.Sprintf("assets/image%d.png", i+1), "image")
} }

View file

@ -372,14 +372,14 @@ func TestMainSections(t *testing.T) {
b := newTestSitesBuilder(c).WithViper(v) b := newTestSitesBuilder(c).WithViper(v)
for i := 0; i < 20; i++ { for i := range 20 {
b.WithContent(fmt.Sprintf("page%d.md", i), `--- b.WithContent(fmt.Sprintf("page%d.md", i), `---
title: "Page" title: "Page"
--- ---
`) `)
} }
for i := 0; i < 5; i++ { for i := range 5 {
b.WithContent(fmt.Sprintf("blog/page%d.md", i), `--- b.WithContent(fmt.Sprintf("blog/page%d.md", i), `---
title: "Page" title: "Page"
tags: ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"] tags: ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"]
@ -387,7 +387,7 @@ tags: ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"]
`) `)
} }
for i := 0; i < 3; i++ { for i := range 3 {
b.WithContent(fmt.Sprintf("docs/page%d.md", i), `--- b.WithContent(fmt.Sprintf("docs/page%d.md", i), `---
title: "Page" title: "Page"
--- ---

View file

@ -97,7 +97,7 @@ Do not go gentle into that good night.
writeSource(t, fs, filepath.Join("content", "sect1", "_index.md"), fmt.Sprintf(st, "/ss1/")) writeSource(t, fs, filepath.Join("content", "sect1", "_index.md"), fmt.Sprintf(st, "/ss1/"))
writeSource(t, fs, filepath.Join("content", "sect2", "_index.md"), fmt.Sprintf(st, "/ss2/")) writeSource(t, fs, filepath.Join("content", "sect2", "_index.md"), fmt.Sprintf(st, "/ss2/"))
for i := 0; i < 5; i++ { for i := range 5 {
writeSource(t, fs, filepath.Join("content", "sect1", fmt.Sprintf("p%d.md", i+1)), pt) writeSource(t, fs, filepath.Join("content", "sect1", fmt.Sprintf("p%d.md", i+1)), pt)
writeSource(t, fs, filepath.Join("content", "sect2", fmt.Sprintf("p%d.md", i+1)), pt) writeSource(t, fs, filepath.Join("content", "sect2", fmt.Sprintf("p%d.md", i+1)), pt)
} }

View file

@ -314,7 +314,7 @@ func TestTaxonomiesNextGenLoops(t *testing.T) {
</ul> </ul>
`) `)
for i := 0; i < 10; i++ { for i := range 10 {
b.WithContent(fmt.Sprintf("page%d.md", i+1), ` b.WithContent(fmt.Sprintf("page%d.md", i+1), `
--- ---
Title: "Taxonomy!" Title: "Taxonomy!"

View file

@ -250,7 +250,7 @@ Content.
Base %d: {{ block "main" . }}FOO{{ end }} Base %d: {{ block "main" . }}FOO{{ end }}
` `
for i := 0; i < numPages; i++ { for i := range numPages {
id := i + 1 id := i + 1
b.WithContent(fmt.Sprintf("page%d.md", id), fmt.Sprintf(pageTemplate, id, id)) b.WithContent(fmt.Sprintf("page%d.md", id), fmt.Sprintf(pageTemplate, id, id))
b.WithTemplates(fmt.Sprintf("_default/layout%d.html", id), fmt.Sprintf(singleTemplate, id)) b.WithTemplates(fmt.Sprintf("_default/layout%d.html", id), fmt.Sprintf(singleTemplate, id))
@ -258,7 +258,7 @@ Base %d: {{ block "main" . }}FOO{{ end }}
} }
b.Build(BuildCfg{}) b.Build(BuildCfg{})
for i := 0; i < numPages; i++ { for i := range numPages {
id := i + 1 id := i + 1
b.AssertFileContent(fmt.Sprintf("public/page%d/index.html", id), fmt.Sprintf(`Base %d: %d`, id, id)) b.AssertFileContent(fmt.Sprintf("public/page%d/index.html", id), fmt.Sprintf(`Base %d: %d`, id, id))
} }

View file

@ -27,7 +27,7 @@ func NewFinder(cfg FinderConfig) *Finder {
} }
var searchIDPool = sync.Pool{ var searchIDPool = sync.Pool{
New: func() interface{} { New: func() any {
return &searchID{seen: make(map[Manager]bool)} return &searchID{seen: make(map[Manager]bool)}
}, },
} }

View file

@ -25,7 +25,7 @@ import (
func BenchmarkIdentityManager(b *testing.B) { func BenchmarkIdentityManager(b *testing.B) {
createIds := func(num int) []identity.Identity { createIds := func(num int) []identity.Identity {
ids := make([]identity.Identity, num) ids := make([]identity.Identity, num)
for i := 0; i < num; i++ { for i := range num {
name := fmt.Sprintf("id%d", i) name := fmt.Sprintf("id%d", i)
ids[i] = &testIdentity{base: name, name: name} ids[i] = &testIdentity{base: name, name: name}
} }
@ -108,10 +108,10 @@ func BenchmarkIsNotDependent(b *testing.B) {
newNestedManager := func(depth, count int) identity.Manager { newNestedManager := func(depth, count int) identity.Manager {
m1 := identity.NewManager("") m1 := identity.NewManager("")
for i := 0; i < depth; i++ { for range depth {
m2 := identity.NewManager("") m2 := identity.NewManager("")
m1.AddIdentity(m2) m1.AddIdentity(m2)
for j := 0; j < count; j++ { for j := range count {
id := fmt.Sprintf("id%d", j) id := fmt.Sprintf("id%d", j)
m2.AddIdentity(&testIdentity{id, id, "", ""}) m2.AddIdentity(&testIdentity{id, id, "", ""})
} }

View file

@ -27,6 +27,7 @@ import (
"github.com/gohugoio/hugo/resources" "github.com/gohugoio/hugo/resources"
"github.com/gohugoio/hugo/resources/resource" "github.com/gohugoio/hugo/resources/resource"
"github.com/spf13/afero" "github.com/spf13/afero"
"slices"
) )
const ( const (
@ -167,15 +168,11 @@ func createBuildPlugins(rs *resources.Spec, assetsResolver *fsResolver, depsMana
} }
} }
for _, ext := range opts.Externals { if slices.Contains(opts.Externals, impPath) {
// ESBuild will do a more thorough check for packages resolved in node_modules, return api.OnResolveResult{
// but we need to make sure that we don't try to resolve these in the /assets folder. Path: impPath,
if ext == impPath { External: true,
return api.OnResolveResult{ }, nil
Path: impPath,
External: true,
}, nil
}
} }
if opts.ImportOnResolveFunc != nil { if opts.ImportOnResolveFunc != nil {

View file

@ -384,7 +384,7 @@ func newDispatcher[Q, R any](opts Options) (*dispatcherPool[Q, R], error) {
} }
inOuts := make([]*inOut, opts.PoolSize) inOuts := make([]*inOut, opts.PoolSize)
for i := 0; i < opts.PoolSize; i++ { for i := range opts.PoolSize {
var stdin, stdout hugio.ReadWriteCloser var stdin, stdout hugio.ReadWriteCloser
stdin = hugio.NewPipeReadWriteCloser() stdin = hugio.NewPipeReadWriteCloser()
@ -478,7 +478,7 @@ func newDispatcher[Q, R any](opts Options) (*dispatcherPool[Q, R], error) {
close(dp.donec) close(dp.donec)
}() }()
for i := 0; i < len(inOuts); i++ { for i := range inOuts {
d := &dispatcher[Q, R]{ d := &dispatcher[Q, R]{
pending: make(map[uint32]*call[Q, R]), pending: make(map[uint32]*call[Q, R]),
inOut: inOuts[i], inOut: inOuts[i],

View file

@ -101,7 +101,7 @@ func TestGreet(t *testing.T) {
Infof: t.Logf, Infof: t.Logf,
} }
for i := 0; i < 2; i++ { for range 2 {
func() { func() {
d, err := Start[person, greeting](opts) d, err := Start[person, greeting](opts)
if err != nil { if err != nil {
@ -123,7 +123,7 @@ func TestGreet(t *testing.T) {
}, },
} }
for j := 0; j < 20; j++ { for j := range 20 {
inputMessage.Header.ID = uint32(j + 1) inputMessage.Header.ID = uint32(j + 1)
g, err := d.Execute(ctx, inputMessage) g, err := d.Execute(ctx, inputMessage)
if err != nil { if err != nil {
@ -163,7 +163,7 @@ func TestGreetParallel(t *testing.T) {
ctx := context.Background() ctx := context.Background()
for j := 0; j < 5; j++ { for j := range 5 {
base := i * 100 base := i * 100
id := uint32(base + j) id := uint32(base + j)
@ -217,7 +217,7 @@ func TestKatexParallel(t *testing.T) {
ctx := context.Background() ctx := context.Background()
for j := 0; j < 1; j++ { for j := range 1 {
base := i * 100 base := i * 100
id := uint32(base + j) id := uint32(base + j)

View file

@ -29,13 +29,13 @@ func TestCollator(t *testing.T) {
coll := &Collator{c: collate.New(language.English, collate.Loose)} coll := &Collator{c: collate.New(language.English, collate.Loose)}
for i := 0; i < 10; i++ { for range 10 {
wg.Add(1) wg.Add(1)
go func() { go func() {
coll.Lock() coll.Lock()
defer coll.Unlock() defer coll.Unlock()
defer wg.Done() defer wg.Done()
for j := 0; j < 10; j++ { for range 10 {
k := coll.CompareStrings("abc", "def") k := coll.CompareStrings("abc", "def")
c.Assert(k, qt.Equals, -1) c.Assert(k, qt.Equals, -1)
} }
@ -48,7 +48,7 @@ func BenchmarkCollator(b *testing.B) {
s := []string{"foo", "bar", "éntre", "baz", "qux", "quux", "corge", "grault", "garply", "waldo", "fred", "plugh", "xyzzy", "thud"} s := []string{"foo", "bar", "éntre", "baz", "qux", "quux", "corge", "grault", "garply", "waldo", "fred", "plugh", "xyzzy", "thud"}
doWork := func(coll *Collator) { doWork := func(coll *Collator) {
for i := 0; i < len(s); i++ { for i := range s {
for j := i + 1; j < len(s); j++ { for j := i + 1; j < len(s); j++ {
_ = coll.CompareStrings(s[i], s[j]) _ = coll.CompareStrings(s[i], s[j])
} }

View file

@ -79,7 +79,7 @@ func TestInit(t *testing.T) {
// Add some concurrency and randomness to verify thread safety and // Add some concurrency and randomness to verify thread safety and
// init order. // init order.
for i := 0; i < 100; i++ { for i := range 100 {
wg.Add(1) wg.Add(1)
go func(i int) { go func(i int) {
defer wg.Done() defer wg.Done()

View file

@ -77,7 +77,7 @@ func (r *htmlRenderer) renderCodeBlock(w util.BufWriter, src []byte, node ast.No
var buff bytes.Buffer var buff bytes.Buffer
l := n.Lines().Len() l := n.Lines().Len()
for i := 0; i < l; i++ { for i := range l {
line := n.Lines().At(i) line := n.Lines().At(i)
buff.Write(line.Value(src)) buff.Write(line.Value(src))
} }

View file

@ -182,7 +182,7 @@ func (r *hugoContextRenderer) renderHTMLBlock(
if entering { if entering {
if r.Unsafe { if r.Unsafe {
l := n.Lines().Len() l := n.Lines().Len()
for i := 0; i < l; i++ { for i := range l {
line := n.Lines().At(i) line := n.Lines().At(i)
linev := line.Value(source) linev := line.Value(source)
var stripped bool var stripped bool
@ -226,7 +226,7 @@ func (r *hugoContextRenderer) renderRawHTML(
n := node.(*ast.RawHTML) n := node.(*ast.RawHTML)
l := n.Segments.Len() l := n.Segments.Len()
if r.Unsafe { if r.Unsafe {
for i := 0; i < l; i++ { for i := range l {
segment := n.Segments.At(i) segment := n.Segments.At(i)
_, _ = w.Write(segment.Value(source)) _, _ = w.Write(segment.Value(source))
} }

View file

@ -110,7 +110,7 @@ func (r *htmlRenderer) renderPassthroughBlock(w util.BufWriter, src []byte, node
case (*passthrough.PassthroughBlock): case (*passthrough.PassthroughBlock):
l := nn.Lines().Len() l := nn.Lines().Len()
var buff bytes.Buffer var buff bytes.Buffer
for i := 0; i < l; i++ { for i := range l {
line := nn.Lines().At(i) line := nn.Lines().At(i)
buff.Write(line.Value(src)) buff.Write(line.Value(src))
} }

View file

@ -100,10 +100,7 @@ func (c *rstConverter) getRstContent(src []byte, ctx converter.DocumentContext)
bodyEnd := bytes.Index(result, []byte("\n</body>")) bodyEnd := bytes.Index(result, []byte("\n</body>"))
if bodyEnd < 0 || bodyEnd >= len(result) { if bodyEnd < 0 || bodyEnd >= len(result) {
bodyEnd = len(result) - 1 bodyEnd = max(len(result)-1, 0)
if bodyEnd < 0 {
bodyEnd = 0
}
} }
return result[bodyStart+7 : bodyEnd], err return result[bodyStart+7 : bodyEnd], err

View file

@ -250,7 +250,7 @@ func (b *tocBuilder) writeHeading(level, indent int, h *Heading) {
} }
func (b *tocBuilder) indent(n int) { func (b *tocBuilder) indent(n int) {
for i := 0; i < n; i++ { for range n {
b.s.WriteString(" ") b.s.WriteString(" ")
} }
} }

View file

@ -196,7 +196,7 @@ func TestTocMisc(t *testing.T) {
func BenchmarkToc(b *testing.B) { func BenchmarkToc(b *testing.B) {
newTocs := func(n int) []*Fragments { newTocs := func(n int) []*Fragments {
var tocs []*Fragments var tocs []*Fragments
for i := 0; i < n; i++ { for range n {
tocs = append(tocs, newTestToc()) tocs = append(tocs, newTestToc())
} }
return tocs return tocs

View file

@ -26,6 +26,7 @@ import (
"github.com/mitchellh/mapstructure" "github.com/mitchellh/mapstructure"
"github.com/spf13/cast" "github.com/spf13/cast"
"slices"
) )
// DefaultTypes is the default media types supported by Hugo. // DefaultTypes is the default media types supported by Hugo.
@ -46,7 +47,7 @@ func init() {
// Initialize the Builtin types with values from DefaultTypes. // Initialize the Builtin types with values from DefaultTypes.
v := reflect.ValueOf(&Builtin).Elem() v := reflect.ValueOf(&Builtin).Elem()
for i := 0; i < v.NumField(); i++ { for i := range v.NumField() {
f := v.Field(i) f := v.Field(i)
fieldName := v.Type().Field(i).Name fieldName := v.Type().Field(i).Name
builtinType := f.Interface().(Type) builtinType := f.Interface().(Type)
@ -149,12 +150,7 @@ func (t ContentTypes) IsIndexContentFile(filename string) bool {
// IsHTMLSuffix returns whether the given suffix is a HTML media type. // IsHTMLSuffix returns whether the given suffix is a HTML media type.
func (t ContentTypes) IsHTMLSuffix(suffix string) bool { func (t ContentTypes) IsHTMLSuffix(suffix string) bool {
for _, s := range t.HTML.Suffixes() { return slices.Contains(t.HTML.Suffixes(), suffix)
if s == suffix {
return true
}
}
return false
} }
// Types is a slice of media types. // Types is a slice of media types.

View file

@ -380,14 +380,12 @@ func (c *Client) Verify(clean bool) error {
if err != nil { if err != nil {
if clean { if clean {
m := verifyErrorDirRe.FindAllStringSubmatch(err.Error(), -1) m := verifyErrorDirRe.FindAllStringSubmatch(err.Error(), -1)
if m != nil { for i := range m {
for i := 0; i < len(m); i++ { c, err := hugofs.MakeReadableAndRemoveAllModulePkgDir(c.fs, m[i][1])
c, err := hugofs.MakeReadableAndRemoveAllModulePkgDir(c.fs, m[i][1]) if err != nil {
if err != nil { return err
return err
}
fmt.Println("Cleaned", c)
} }
fmt.Println("Cleaned", c)
} }
// Try to verify it again. // Try to verify it again.
err = c.runVerify() err = c.runVerify()

View file

@ -25,6 +25,7 @@ import (
"github.com/mitchellh/mapstructure" "github.com/mitchellh/mapstructure"
"github.com/spf13/cast" "github.com/spf13/cast"
"slices"
) )
var smc = newMenuCache() var smc = newMenuCache()
@ -267,7 +268,7 @@ func (m Menu) Reverse() Menu {
// Clone clones the menu entries. // Clone clones the menu entries.
// This is for internal use only. // This is for internal use only.
func (m Menu) Clone() Menu { func (m Menu) Clone() Menu {
return append(Menu(nil), m...) return slices.Clone(m)
} }
func DecodeConfig(in any) (*config.ConfigNamespace[map[string]MenuConfig, Menus], error) { func DecodeConfig(in any) (*config.ConfigNamespace[map[string]MenuConfig, Menus], error) {

View file

@ -14,6 +14,7 @@
package navigation package navigation
import ( import (
"slices"
"sync" "sync"
) )
@ -84,7 +85,7 @@ func (c *menuCache) getP(key string, apply func(m *Menu), menuLists ...Menu) (Me
} }
m := menuLists[0] m := menuLists[0]
menuCopy := append(Menu(nil), m...) menuCopy := slices.Clone(m)
if apply != nil { if apply != nil {
apply(&menuCopy) apply(&menuCopy)

View file

@ -23,7 +23,7 @@ import (
func createSortTestMenu(num int) Menu { func createSortTestMenu(num int) Menu {
menu := make(Menu, num) menu := make(Menu, num)
for i := 0; i < num; i++ { for i := range num {
m := &MenuEntry{} m := &MenuEntry{}
menu[i] = m menu[i] = m
} }
@ -49,11 +49,11 @@ func TestMenuCache(t *testing.T) {
var testMenuSets []Menu var testMenuSets []Menu
for i := 0; i < 50; i++ { for i := range 50 {
testMenuSets = append(testMenuSets, createSortTestMenu(i+1)) testMenuSets = append(testMenuSets, createSortTestMenu(i+1))
} }
for j := 0; j < 100; j++ { for range 100 {
wg.Add(1) wg.Add(1)
go func() { go func() {
defer wg.Done() defer wg.Done()

View file

@ -321,7 +321,7 @@ func uniqueStringsReuse(s []string) []string {
for i, val := range s { for i, val := range s {
var seen bool var seen bool
for j := 0; j < i; j++ { for j := range i {
if s[j] == val { if s[j] == val {
seen = true seen = true
break break

View file

@ -99,7 +99,7 @@ func (c ReplacingJSONMarshaller) MarshalJSON() ([]byte, error) {
if c.OmitEmpty { if c.OmitEmpty {
// It's tricky to do this with a regexp, so convert it to a map, remove zero values and convert back. // It's tricky to do this with a regexp, so convert it to a map, remove zero values and convert back.
var m map[string]interface{} var m map[string]any
err = json.Unmarshal(converted, &m) err = json.Unmarshal(converted, &m)
if err != nil { if err != nil {
return nil, err return nil, err
@ -111,9 +111,9 @@ func (c ReplacingJSONMarshaller) MarshalJSON() ([]byte, error) {
delete(m, k) delete(m, k)
} else { } else {
switch vv := v.(type) { switch vv := v.(type) {
case map[string]interface{}: case map[string]any:
removeZeroVAlues(vv) removeZeroVAlues(vv)
case []interface{}: case []any:
for _, vvv := range vv { for _, vvv := range vv {
if m, ok := vvv.(map[string]any); ok { if m, ok := vvv.(map[string]any); ok {
removeZeroVAlues(m) removeZeroVAlues(m)

View file

@ -123,7 +123,7 @@ LOOP:
// Handle YAML or TOML front matter. // Handle YAML or TOML front matter.
func (l *pageLexer) lexFrontMatterSection(tp ItemType, delimr rune, name string, delim []byte) stateFunc { func (l *pageLexer) lexFrontMatterSection(tp ItemType, delimr rune, name string, delim []byte) stateFunc {
for i := 0; i < 2; i++ { for range 2 {
if r := l.next(); r != delimr { if r := l.next(); r != delimr {
return l.errorf("invalid %s delimiter", name) return l.errorf("invalid %s delimiter", name)
} }

View file

@ -192,7 +192,7 @@ func (t *Iterator) PeekWalk(walkFn func(item Item) bool) {
// Consume is a convenience method to consume the next n tokens, // Consume is a convenience method to consume the next n tokens,
// but back off Errors and EOF. // but back off Errors and EOF.
func (t *Iterator) Consume(cnt int) { func (t *Iterator) Consume(cnt int) {
for i := 0; i < cnt; i++ { for range cnt {
token := t.Next() token := t.Next()
if token.Type == tError || token.Type == tEOF { if token.Type == tError || token.Type == tEOF {
t.Backup() t.Backup()

View file

@ -292,7 +292,7 @@ func (r *rank) addWeight(w int) {
} }
var rankPool = sync.Pool{ var rankPool = sync.Pool{
New: func() interface{} { New: func() any {
return &rank{} return &rank{}
}, },
} }
@ -433,7 +433,7 @@ func (cfg IndexConfig) ToKeywords(v any) ([]Keyword, error) {
keywords = append(keywords, cfg.stringToKeyword(vv)) keywords = append(keywords, cfg.stringToKeyword(vv))
case []string: case []string:
vvv := make([]Keyword, len(vv)) vvv := make([]Keyword, len(vv))
for i := 0; i < len(vvv); i++ { for i := range vvv {
vvv[i] = cfg.stringToKeyword(vv[i]) vvv[i] = cfg.stringToKeyword(vv[i])
} }
keywords = append(keywords, vvv...) keywords = append(keywords, vvv...)
@ -623,7 +623,7 @@ type Keyword interface {
func (cfg IndexConfig) StringsToKeywords(s ...string) []Keyword { func (cfg IndexConfig) StringsToKeywords(s ...string) []Keyword {
kw := make([]Keyword, len(s)) kw := make([]Keyword, len(s))
for i := 0; i < len(s); i++ { for i := range s {
kw[i] = cfg.stringToKeyword(s[i]) kw[i] = cfg.stringToKeyword(s[i])
} }

View file

@ -65,7 +65,7 @@ func (d *testDoc) addKeywords(name string, keywords ...string) *testDoc {
for k, v := range keywordm { for k, v := range keywordm {
keywords := make([]Keyword, len(v)) keywords := make([]Keyword, len(v))
for i := 0; i < len(v); i++ { for i := range v {
keywords[i] = StringKeyword(v[i]) keywords[i] = StringKeyword(v[i])
} }
d.keywords[k] = keywords d.keywords[k] = keywords
@ -221,7 +221,7 @@ func TestSearch(t *testing.T) {
doc := newTestDocWithDate("keywords", date, "a", "b") doc := newTestDocWithDate("keywords", date, "a", "b")
doc.name = "thedoc" doc.name = "thedoc"
for i := 0; i < 10; i++ { for i := range 10 {
docc := *doc docc := *doc
docc.name = fmt.Sprintf("doc%d", i) docc.name = fmt.Sprintf("doc%d", i)
idx.Add(context.Background(), &docc) idx.Add(context.Background(), &docc)
@ -230,7 +230,7 @@ func TestSearch(t *testing.T) {
m, err := idx.Search(context.Background(), SearchOpts{Document: doc, Indices: []string{"keywords"}}) m, err := idx.Search(context.Background(), SearchOpts{Document: doc, Indices: []string{"keywords"}})
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
c.Assert(len(m), qt.Equals, 10) c.Assert(len(m), qt.Equals, 10)
for i := 0; i < 10; i++ { for i := range 10 {
c.Assert(m[i].Name(), qt.Equals, fmt.Sprintf("doc%d", i)) c.Assert(m[i].Name(), qt.Equals, fmt.Sprintf("doc%d", i))
} }
}) })
@ -311,11 +311,11 @@ func BenchmarkRelatedNewIndex(b *testing.B) {
pages := make([]*testDoc, 100) pages := make([]*testDoc, 100)
numkeywords := 30 numkeywords := 30
allKeywords := make([]string, numkeywords) allKeywords := make([]string, numkeywords)
for i := 0; i < numkeywords; i++ { for i := range numkeywords {
allKeywords[i] = fmt.Sprintf("keyword%d", i+1) allKeywords[i] = fmt.Sprintf("keyword%d", i+1)
} }
for i := 0; i < len(pages); i++ { for i := range pages {
start := rand.Intn(len(allKeywords)) start := rand.Intn(len(allKeywords))
end := start + 3 end := start + 3
if end >= len(allKeywords) { if end >= len(allKeywords) {
@ -356,7 +356,7 @@ func BenchmarkRelatedNewIndex(b *testing.B) {
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
idx := NewInvertedIndex(cfg) idx := NewInvertedIndex(cfg)
docs := make([]Document, len(pages)) docs := make([]Document, len(pages))
for i := 0; i < len(pages); i++ { for i := range pages {
docs[i] = pages[i] docs[i] = pages[i]
} }
idx.Add(context.Background(), docs...) idx.Add(context.Background(), docs...)
@ -372,7 +372,7 @@ func BenchmarkRelatedMatchesIn(b *testing.B) {
docs := make([]*testDoc, 1000) docs := make([]*testDoc, 1000)
numkeywords := 20 numkeywords := 20
allKeywords := make([]string, numkeywords) allKeywords := make([]string, numkeywords)
for i := 0; i < numkeywords; i++ { for i := range numkeywords {
allKeywords[i] = fmt.Sprintf("keyword%d", i+1) allKeywords[i] = fmt.Sprintf("keyword%d", i+1)
} }
@ -386,7 +386,7 @@ func BenchmarkRelatedMatchesIn(b *testing.B) {
idx := NewInvertedIndex(cfg) idx := NewInvertedIndex(cfg)
for i := 0; i < len(docs); i++ { for i := range docs {
start := rand.Intn(len(allKeywords)) start := rand.Intn(len(allKeywords))
end := start + 3 end := start + 3
if end >= len(allKeywords) { if end >= len(allKeywords) {

View file

@ -160,7 +160,7 @@ keywords: ['k%d']
--- ---
` `
for i := 0; i < 32; i++ { for range 32 {
base += fmt.Sprintf("\n## Title %d", rand.Intn(100)) base += fmt.Sprintf("\n## Title %d", rand.Intn(100))
} }

View file

@ -230,10 +230,10 @@ func git(args ...string) (string, error) {
return string(out), nil return string(out), nil
} }
func logf(format string, args ...interface{}) { func logf(format string, args ...any) {
fmt.Fprintf(os.Stderr, format, args...) fmt.Fprintf(os.Stderr, format, args...)
} }
func logln(args ...interface{}) { func logln(args ...any) {
fmt.Fprintln(os.Stderr, args...) fmt.Fprintln(os.Stderr, args...)
} }

View file

@ -348,13 +348,13 @@ func TestImageTransformConcurrent(t *testing.T) {
image := fetchImageForSpec(spec, c, "sunset.jpg") image := fetchImageForSpec(spec, c, "sunset.jpg")
for i := 0; i < 4; i++ { for i := range 4 {
wg.Add(1) wg.Add(1)
go func(id int) { go func(id int) {
defer wg.Done() defer wg.Done()
for j := 0; j < 5; j++ { for j := range 5 {
img := image img := image
for k := 0; k < 2; k++ { for k := range 2 {
r1, err := img.Resize(fmt.Sprintf("%dx", id-k)) r1, err := img.Resize(fmt.Sprintf("%dx", id-k))
if err != nil { if err != nil {
t.Error(err) t.Error(err)
@ -499,7 +499,7 @@ func BenchmarkImageExif(b *testing.B) {
b.StartTimer() b.StartTimer()
for i := 0; i < b.N; i++ { for i := 0; i < b.N; i++ {
for j := 0; j < 10; j++ { for range 10 {
getAndCheckExif(c, images[i]) getAndCheckExif(c, images[i])
} }
} }

View file

@ -22,6 +22,7 @@ import (
"strings" "strings"
"github.com/gohugoio/hugo/common/hstrings" "github.com/gohugoio/hugo/common/hstrings"
"slices"
) )
type colorGoProvider interface { type colorGoProvider interface {
@ -91,11 +92,8 @@ func (c Color) toSRGB(i uint8) float64 {
// that the palette is valid for the relevant format. // that the palette is valid for the relevant format.
func AddColorToPalette(c color.Color, p color.Palette) color.Palette { func AddColorToPalette(c color.Color, p color.Palette) color.Palette {
var found bool var found bool
for _, cc := range p { if slices.Contains(p, c) {
if c == cc { found = true
found = true
break
}
} }
if !found { if !found {

View file

@ -209,7 +209,7 @@ func goldenEqual(img1, img2 *image.NRGBA) bool {
if len(img1.Pix) != len(img2.Pix) { if len(img1.Pix) != len(img2.Pix) {
return false return false
} }
for i := 0; i < len(img1.Pix); i++ { for i := range img1.Pix {
diff := int(img1.Pix[i]) - int(img2.Pix[i]) diff := int(img1.Pix[i]) - int(img2.Pix[i])
if diff < 0 { if diff < 0 {
diff = -diff diff = -diff

View file

@ -24,6 +24,7 @@ import (
"github.com/gohugoio/hugo/hugofs/glob" "github.com/gohugoio/hugo/hugofs/glob"
"github.com/gohugoio/hugo/resources/kinds" "github.com/gohugoio/hugo/resources/kinds"
"github.com/mitchellh/mapstructure" "github.com/mitchellh/mapstructure"
"slices"
) )
// A PageMatcher can be used to match a Page with Glob patterns. // A PageMatcher can be used to match a Page with Glob patterns.
@ -208,13 +209,7 @@ func decodePageMatcher(m any, v *PageMatcher) error {
v.Kind = strings.ToLower(v.Kind) v.Kind = strings.ToLower(v.Kind)
if v.Kind != "" { if v.Kind != "" {
g, _ := glob.GetGlob(v.Kind) g, _ := glob.GetGlob(v.Kind)
found := false found := slices.ContainsFunc(kinds.AllKindsInPages, g.Match)
for _, k := range kinds.AllKindsInPages {
if g.Match(k) {
found = true
break
}
}
if !found { if !found {
return fmt.Errorf("%q did not match a valid Page Kind", v.Kind) return fmt.Errorf("%q did not match a valid Page Kind", v.Kind)
} }

Some files were not shown because too many files have changed in this diff Show more