mirror of https://github.com/gohugoio/hugo
all: Run modernize -fix ./...
parent
b7ae24b9c2
commit
521911a576
cache
dynacache
codegen
common
hashing
herrors
hreflect
hstrings
hugo
loggers
para
rungroup
tasks
create
htesting/hqt
hugofs
hugolib
filesystems
identity
internal
js/esbuild
langs
lazy
markup
goldmark
rst
tableofcontents
media
modules
navigation
output/layouts
parser
releaser
resources
|
@ -191,16 +191,16 @@ func TestPanicInCreate(t *testing.T) {
|
|||
return err
|
||||
}
|
||||
|
||||
for i := 0; i < 3; i++ {
|
||||
for j := 0; j < 3; j++ {
|
||||
for i := range 3 {
|
||||
for range 3 {
|
||||
c.Assert(willPanic(i), qt.PanicMatches, fmt.Sprintf("panic-%d", i))
|
||||
c.Assert(willErr(i), qt.ErrorMatches, fmt.Sprintf("error-%d", i))
|
||||
}
|
||||
}
|
||||
|
||||
// Test the same keys again without the panic.
|
||||
for i := 0; i < 3; i++ {
|
||||
for j := 0; j < 3; j++ {
|
||||
for i := range 3 {
|
||||
for range 3 {
|
||||
v, err := p1.GetOrCreate(fmt.Sprintf("panic-%d", i), func(key string) (testItem, error) {
|
||||
return testItem{
|
||||
name: key,
|
||||
|
|
|
@ -59,7 +59,7 @@ dir = ":resourceDir/_gen"
|
|||
caches, err := filecache.NewCaches(p)
|
||||
c.Assert(err, qt.IsNil)
|
||||
cache := caches[name]
|
||||
for i := 0; i < 10; i++ {
|
||||
for i := range 10 {
|
||||
id := fmt.Sprintf("i%d", i)
|
||||
cache.GetOrCreateBytes(id, func() ([]byte, error) {
|
||||
return []byte("abc"), nil
|
||||
|
@ -74,7 +74,7 @@ dir = ":resourceDir/_gen"
|
|||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(count, qt.Equals, 5, msg)
|
||||
|
||||
for i := 0; i < 10; i++ {
|
||||
for i := range 10 {
|
||||
id := fmt.Sprintf("i%d", i)
|
||||
v := cache.GetString(id)
|
||||
if i < 5 {
|
||||
|
@ -97,7 +97,7 @@ dir = ":resourceDir/_gen"
|
|||
c.Assert(count, qt.Equals, 4)
|
||||
|
||||
// Now only the i5 should be left.
|
||||
for i := 0; i < 10; i++ {
|
||||
for i := range 10 {
|
||||
id := fmt.Sprintf("i%d", i)
|
||||
v := cache.GetString(id)
|
||||
if i != 5 {
|
||||
|
|
|
@ -105,7 +105,7 @@ dir = ":cacheDir/c"
|
|||
}
|
||||
|
||||
for _, ca := range []*filecache.Cache{caches.ImageCache(), caches.AssetsCache(), caches.GetJSONCache(), caches.GetCSVCache()} {
|
||||
for i := 0; i < 2; i++ {
|
||||
for range 2 {
|
||||
info, r, err := ca.GetOrCreate("a", rf("abc"))
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(r, qt.Not(qt.IsNil))
|
||||
|
@ -193,11 +193,11 @@ dir = "/cache/c"
|
|||
|
||||
var wg sync.WaitGroup
|
||||
|
||||
for i := 0; i < 50; i++ {
|
||||
for i := range 50 {
|
||||
wg.Add(1)
|
||||
go func(i int) {
|
||||
defer wg.Done()
|
||||
for j := 0; j < 20; j++ {
|
||||
for range 20 {
|
||||
ca := caches.Get(cacheName)
|
||||
c.Assert(ca, qt.Not(qt.IsNil))
|
||||
filename, data := filenameData(i)
|
||||
|
|
|
@ -26,6 +26,7 @@ import (
|
|||
"path/filepath"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"slices"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
|
@ -102,7 +103,7 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
|
|||
}
|
||||
|
||||
for _, t := range include {
|
||||
for i := 0; i < t.NumMethod(); i++ {
|
||||
for i := range t.NumMethod() {
|
||||
|
||||
m := t.Method(i)
|
||||
if excludes[m.Name] || seen[m.Name] {
|
||||
|
@ -122,7 +123,7 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
|
|||
|
||||
method := Method{Owner: t, OwnerName: ownerName, Name: m.Name}
|
||||
|
||||
for i := 0; i < numIn; i++ {
|
||||
for i := range numIn {
|
||||
in := m.Type.In(i)
|
||||
|
||||
name, pkg := nameAndPackage(in)
|
||||
|
@ -137,7 +138,7 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
|
|||
numOut := m.Type.NumOut()
|
||||
|
||||
if numOut > 0 {
|
||||
for i := 0; i < numOut; i++ {
|
||||
for i := range numOut {
|
||||
out := m.Type.Out(i)
|
||||
name, pkg := nameAndPackage(out)
|
||||
|
||||
|
@ -304,7 +305,7 @@ func (m Method) inOutStr() string {
|
|||
}
|
||||
|
||||
args := make([]string, len(m.In))
|
||||
for i := 0; i < len(args); i++ {
|
||||
for i := range args {
|
||||
args[i] = fmt.Sprintf("arg%d", i)
|
||||
}
|
||||
return "(" + strings.Join(args, ", ") + ")"
|
||||
|
@ -316,7 +317,7 @@ func (m Method) inStr() string {
|
|||
}
|
||||
|
||||
args := make([]string, len(m.In))
|
||||
for i := 0; i < len(args); i++ {
|
||||
for i := range args {
|
||||
args[i] = fmt.Sprintf("arg%d %s", i, m.In[i])
|
||||
}
|
||||
return "(" + strings.Join(args, ", ") + ")"
|
||||
|
@ -339,7 +340,7 @@ func (m Method) outStrNamed() string {
|
|||
}
|
||||
|
||||
outs := make([]string, len(m.Out))
|
||||
for i := 0; i < len(outs); i++ {
|
||||
for i := range outs {
|
||||
outs[i] = fmt.Sprintf("o%d %s", i, m.Out[i])
|
||||
}
|
||||
|
||||
|
@ -435,7 +436,7 @@ func (m Methods) ToMarshalJSON(receiver, pkgPath string, excludes ...string) (st
|
|||
// Exclude self
|
||||
for i, pkgImp := range pkgImports {
|
||||
if pkgImp == pkgPath {
|
||||
pkgImports = append(pkgImports[:i], pkgImports[i+1:]...)
|
||||
pkgImports = slices.Delete(pkgImports, i, i+1)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
|
|
@ -101,8 +101,8 @@ type configKey struct {
|
|||
|
||||
// This is the root command.
|
||||
type rootCommand struct {
|
||||
Printf func(format string, v ...interface{})
|
||||
Println func(a ...interface{})
|
||||
Printf func(format string, v ...any)
|
||||
Println func(a ...any)
|
||||
StdOut io.Writer
|
||||
StdErr io.Writer
|
||||
|
||||
|
@ -431,12 +431,12 @@ func (r *rootCommand) PreRun(cd, runner *simplecobra.Commandeer) error {
|
|||
// Used by mkcert (server).
|
||||
log.SetOutput(r.StdOut)
|
||||
|
||||
r.Printf = func(format string, v ...interface{}) {
|
||||
r.Printf = func(format string, v ...any) {
|
||||
if !r.quiet {
|
||||
fmt.Fprintf(r.StdOut, format, v...)
|
||||
}
|
||||
}
|
||||
r.Println = func(a ...interface{}) {
|
||||
r.Println = func(a ...any) {
|
||||
if !r.quiet {
|
||||
fmt.Fprintln(r.StdOut, a...)
|
||||
}
|
||||
|
|
|
@ -90,7 +90,7 @@ func (c *configCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, arg
|
|||
os.Stdout.Write(buf.Bytes())
|
||||
default:
|
||||
// Decode the JSON to a map[string]interface{} and then unmarshal it again to the correct format.
|
||||
var m map[string]interface{}
|
||||
var m map[string]any
|
||||
if err := json.Unmarshal(buf.Bytes(), &m); err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -222,7 +222,7 @@ url: %s
|
|||
}
|
||||
|
||||
// Decode the JSON to a map[string]interface{} and then unmarshal it again to the correct format.
|
||||
var m map[string]interface{}
|
||||
var m map[string]any
|
||||
if err := json.Unmarshal(buf.Bytes(), &m); err != nil {
|
||||
return err
|
||||
}
|
||||
|
|
|
@ -65,6 +65,7 @@ import (
|
|||
"github.com/spf13/fsync"
|
||||
"golang.org/x/sync/errgroup"
|
||||
"golang.org/x/sync/semaphore"
|
||||
"maps"
|
||||
)
|
||||
|
||||
var (
|
||||
|
@ -195,9 +196,7 @@ func (f *fileChangeDetector) PrepareNew() {
|
|||
}
|
||||
|
||||
f.prev = make(map[string]uint64)
|
||||
for k, v := range f.current {
|
||||
f.prev[k] = v
|
||||
}
|
||||
maps.Copy(f.prev, f.current)
|
||||
f.current = make(map[string]uint64)
|
||||
}
|
||||
|
||||
|
@ -759,7 +758,7 @@ func (c *serverCommand) createServerPorts(cd *simplecobra.Commandeer) error {
|
|||
c.serverPorts = make([]serverPortListener, len(conf.configs.Languages))
|
||||
}
|
||||
currentServerPort := c.serverPort
|
||||
for i := 0; i < len(c.serverPorts); i++ {
|
||||
for i := range c.serverPorts {
|
||||
l, err := net.Listen("tcp", net.JoinHostPort(c.serverInterface, strconv.Itoa(currentServerPort)))
|
||||
if err == nil {
|
||||
c.serverPorts[i] = serverPortListener{ln: l, p: currentServerPort}
|
||||
|
|
|
@ -117,7 +117,7 @@ func appendToInterfaceSliceFromValues(slice1, slice2 reflect.Value) ([]any, erro
|
|||
tos = append(tos, nil)
|
||||
continue
|
||||
}
|
||||
for i := 0; i < slice.Len(); i++ {
|
||||
for i := range slice.Len() {
|
||||
tos = append(tos, slice.Index(i).Interface())
|
||||
}
|
||||
}
|
||||
|
@ -128,7 +128,7 @@ func appendToInterfaceSliceFromValues(slice1, slice2 reflect.Value) ([]any, erro
|
|||
func appendToInterfaceSlice(tov reflect.Value, from ...any) ([]any, error) {
|
||||
var tos []any
|
||||
|
||||
for i := 0; i < tov.Len(); i++ {
|
||||
for i := range tov.Len() {
|
||||
tos = append(tos, tov.Index(i).Interface())
|
||||
}
|
||||
|
||||
|
|
|
@ -13,6 +13,8 @@
|
|||
|
||||
package collections
|
||||
|
||||
import "slices"
|
||||
|
||||
import "sync"
|
||||
|
||||
// Stack is a simple LIFO stack that is safe for concurrent use.
|
||||
|
@ -73,7 +75,7 @@ func (s *Stack[T]) DrainMatching(predicate func(T) bool) []T {
|
|||
for i := len(s.items) - 1; i >= 0; i-- {
|
||||
if predicate(s.items[i]) {
|
||||
items = append(items, s.items[i])
|
||||
s.items = append(s.items[:i], s.items[i+1:]...)
|
||||
s.items = slices.Delete(s.items, i, i+1)
|
||||
}
|
||||
}
|
||||
return items
|
||||
|
|
|
@ -37,12 +37,12 @@ func TestXxHashFromReaderPara(t *testing.T) {
|
|||
c := qt.New(t)
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for i := 0; i < 10; i++ {
|
||||
for i := range 10 {
|
||||
i := i
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
for j := 0; j < 100; j++ {
|
||||
for j := range 100 {
|
||||
s := strings.Repeat("Hello ", i+j+1*42)
|
||||
r := strings.NewReader(s)
|
||||
got, size, err := XXHashFromReader(r)
|
||||
|
@ -144,8 +144,8 @@ func BenchmarkHashString(b *testing.B) {
|
|||
}
|
||||
|
||||
func BenchmarkHashMap(b *testing.B) {
|
||||
m := map[string]interface{}{}
|
||||
for i := 0; i < 1000; i++ {
|
||||
m := map[string]any{}
|
||||
for i := range 1000 {
|
||||
m[fmt.Sprintf("key%d", i)] = i
|
||||
}
|
||||
|
||||
|
|
|
@ -152,10 +152,7 @@ func locateError(r io.Reader, le FileError, matches LineMatcherFn) *ErrorContext
|
|||
}
|
||||
|
||||
if ectx.Position.LineNumber > 0 {
|
||||
low := ectx.Position.LineNumber - 3
|
||||
if low < 0 {
|
||||
low = 0
|
||||
}
|
||||
low := max(ectx.Position.LineNumber-3, 0)
|
||||
|
||||
if ectx.Position.LineNumber > 2 {
|
||||
ectx.LinesPos = 2
|
||||
|
@ -163,10 +160,7 @@ func locateError(r io.Reader, le FileError, matches LineMatcherFn) *ErrorContext
|
|||
ectx.LinesPos = ectx.Position.LineNumber - 1
|
||||
}
|
||||
|
||||
high := ectx.Position.LineNumber + 2
|
||||
if high > len(lines) {
|
||||
high = len(lines)
|
||||
}
|
||||
high := min(ectx.Position.LineNumber+2, len(lines))
|
||||
|
||||
ectx.Lines = lines[low:high]
|
||||
|
||||
|
|
|
@ -245,7 +245,7 @@ func ToSliceAny(v any) ([]any, bool) {
|
|||
vvv := reflect.ValueOf(v)
|
||||
if vvv.Kind() == reflect.Slice {
|
||||
out := make([]any, vvv.Len())
|
||||
for i := 0; i < vvv.Len(); i++ {
|
||||
for i := range vvv.Len() {
|
||||
out[i] = vvv.Index(i).Interface()
|
||||
}
|
||||
return out, true
|
||||
|
|
|
@ -20,6 +20,7 @@ import (
|
|||
"sync"
|
||||
|
||||
"github.com/gohugoio/hugo/compare"
|
||||
"slices"
|
||||
)
|
||||
|
||||
var _ compare.Eqer = StringEqualFold("")
|
||||
|
@ -50,12 +51,7 @@ func (s StringEqualFold) Eq(s2 any) bool {
|
|||
|
||||
// EqualAny returns whether a string is equal to any of the given strings.
|
||||
func EqualAny(a string, b ...string) bool {
|
||||
for _, s := range b {
|
||||
if a == s {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
return slices.Contains(b, a)
|
||||
}
|
||||
|
||||
// regexpCache represents a cache of regexp objects protected by a mutex.
|
||||
|
@ -103,12 +99,7 @@ func GetOrCompileRegexp(pattern string) (re *regexp.Regexp, err error) {
|
|||
// InSlice checks if a string is an element of a slice of strings
|
||||
// and returns a boolean value.
|
||||
func InSlice(arr []string, el string) bool {
|
||||
for _, v := range arr {
|
||||
if v == el {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
return slices.Contains(arr, el)
|
||||
}
|
||||
|
||||
// InSlicEqualFold checks if a string is an element of a slice of strings
|
||||
|
|
|
@ -46,7 +46,7 @@ func TestHasBytesWriter(t *testing.T) {
|
|||
return strings.Repeat("ab cfo", r.Intn(33))
|
||||
}
|
||||
|
||||
for i := 0; i < 22; i++ {
|
||||
for range 22 {
|
||||
h, w := neww()
|
||||
fmt.Fprint(w, rndStr()+"abc __foobar"+rndStr())
|
||||
c.Assert(h.Patterns[0].Match, qt.Equals, true)
|
||||
|
|
|
@ -416,10 +416,7 @@ func Deprecate(item, alternative string, version string) {
|
|||
|
||||
// DeprecateLevelMin informs about a deprecation starting at the given version, but with a minimum log level.
|
||||
func DeprecateLevelMin(item, alternative string, version string, minLevel logg.Level) {
|
||||
level := deprecationLogLevelFromVersion(version)
|
||||
if level < minLevel {
|
||||
level = minLevel
|
||||
}
|
||||
level := max(deprecationLogLevelFromVersion(version), minLevel)
|
||||
DeprecateLevel(item, alternative, version, level)
|
||||
}
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ func TestLogDistinct(t *testing.T) {
|
|||
|
||||
l := loggers.New(opts)
|
||||
|
||||
for i := 0; i < 10; i++ {
|
||||
for range 10 {
|
||||
l.Errorln("error 1")
|
||||
l.Errorln("error 2")
|
||||
l.Warnln("warn 1")
|
||||
|
@ -137,7 +137,7 @@ func TestReset(t *testing.T) {
|
|||
|
||||
l := loggers.New(opts)
|
||||
|
||||
for i := 0; i < 3; i++ {
|
||||
for range 3 {
|
||||
l.Errorln("error 1")
|
||||
l.Errorln("error 2")
|
||||
l.Errorln("error 1")
|
||||
|
|
|
@ -15,6 +15,7 @@ package maps
|
|||
|
||||
import (
|
||||
"github.com/gohugoio/hugo/common/hashing"
|
||||
"slices"
|
||||
)
|
||||
|
||||
// Ordered is a map that can be iterated in the order of insertion.
|
||||
|
@ -64,7 +65,7 @@ func (m *Ordered[K, T]) Delete(key K) {
|
|||
delete(m.values, key)
|
||||
for i, k := range m.keys {
|
||||
if k == key {
|
||||
m.keys = append(m.keys[:i], m.keys[i+1:]...)
|
||||
m.keys = slices.Delete(m.keys, i, i+1)
|
||||
break
|
||||
}
|
||||
}
|
||||
|
|
|
@ -140,7 +140,7 @@ func TestScratchInParallel(t *testing.T) {
|
|||
for i := 1; i <= 10; i++ {
|
||||
wg.Add(1)
|
||||
go func(j int) {
|
||||
for k := 0; k < 10; k++ {
|
||||
for k := range 10 {
|
||||
newVal := int64(k + j)
|
||||
|
||||
_, err := scratch.Add(key, newVal)
|
||||
|
|
|
@ -42,7 +42,7 @@ func TestPara(t *testing.T) {
|
|||
c.Run("Order", func(c *qt.C) {
|
||||
n := 500
|
||||
ints := make([]int, n)
|
||||
for i := 0; i < n; i++ {
|
||||
for i := range n {
|
||||
ints[i] = i
|
||||
}
|
||||
|
||||
|
@ -51,7 +51,7 @@ func TestPara(t *testing.T) {
|
|||
|
||||
var result []int
|
||||
var mu sync.Mutex
|
||||
for i := 0; i < n; i++ {
|
||||
for i := range n {
|
||||
i := i
|
||||
r.Run(func() error {
|
||||
mu.Lock()
|
||||
|
@ -78,7 +78,7 @@ func TestPara(t *testing.T) {
|
|||
|
||||
var counter int64
|
||||
|
||||
for i := 0; i < n; i++ {
|
||||
for range n {
|
||||
r.Run(func() error {
|
||||
atomic.AddInt64(&counter, 1)
|
||||
time.Sleep(1 * time.Millisecond)
|
||||
|
|
|
@ -51,7 +51,7 @@ func Run[T any](ctx context.Context, cfg Config[T]) Group[T] {
|
|||
// Buffered for performance.
|
||||
ch := make(chan T, cfg.NumWorkers)
|
||||
|
||||
for i := 0; i < cfg.NumWorkers; i++ {
|
||||
for range cfg.NumWorkers {
|
||||
g.Go(func() error {
|
||||
for {
|
||||
select {
|
||||
|
|
|
@ -103,10 +103,7 @@ func (r *RunEvery) Add(name string, f Func) {
|
|||
f.IntervalHigh = 20 * time.Second
|
||||
}
|
||||
|
||||
start := f.IntervalHigh / 3
|
||||
if start < f.IntervalLow {
|
||||
start = f.IntervalLow
|
||||
}
|
||||
start := max(f.IntervalHigh/3, f.IntervalLow)
|
||||
f.interval = start
|
||||
f.last = time.Now()
|
||||
|
||||
|
|
|
@ -69,7 +69,7 @@ func ToStringSlicePreserveStringE(v any) ([]string, error) {
|
|||
switch vv.Kind() {
|
||||
case reflect.Slice, reflect.Array:
|
||||
result = make([]string, vv.Len())
|
||||
for i := 0; i < vv.Len(); i++ {
|
||||
for i := range vv.Len() {
|
||||
s, err := cast.ToStringE(vv.Index(i).Interface())
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
|
|
@ -15,6 +15,7 @@
|
|||
package types
|
||||
|
||||
import (
|
||||
"slices"
|
||||
"sync"
|
||||
)
|
||||
|
||||
|
@ -45,7 +46,7 @@ func (q *EvictingQueue[T]) Add(v T) *EvictingQueue[T] {
|
|||
if len(q.set) == q.size {
|
||||
// Full
|
||||
delete(q.set, q.vals[0])
|
||||
q.vals = append(q.vals[:0], q.vals[1:]...)
|
||||
q.vals = slices.Delete(q.vals, 0, 1)
|
||||
}
|
||||
q.set[v] = true
|
||||
q.vals = append(q.vals, v)
|
||||
|
|
|
@ -55,7 +55,7 @@ func TestEvictingStringQueueConcurrent(t *testing.T) {
|
|||
|
||||
queue := NewEvictingQueue[string](3)
|
||||
|
||||
for j := 0; j < 100; j++ {
|
||||
for range 100 {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
|
|
@ -59,7 +59,7 @@ func (k KeyValues) String() string {
|
|||
// KeyValues struct.
|
||||
func NewKeyValuesStrings(key string, values ...string) KeyValues {
|
||||
iv := make([]any, len(values))
|
||||
for i := 0; i < len(values); i++ {
|
||||
for i := range values {
|
||||
iv[i] = values[i]
|
||||
}
|
||||
return KeyValues{Key: key, Values: iv}
|
||||
|
|
|
@ -82,7 +82,7 @@ func init() {
|
|||
}
|
||||
configLanguageKeys = make(map[string]bool)
|
||||
addKeys := func(v reflect.Value) {
|
||||
for i := 0; i < v.NumField(); i++ {
|
||||
for i := range v.NumField() {
|
||||
name := strings.ToLower(v.Type().Field(i).Name)
|
||||
if skip[name] {
|
||||
continue
|
||||
|
|
|
@ -305,7 +305,7 @@ func (l configLoader) applyOsEnvOverrides(environ []string) error {
|
|||
_, ok := allDecoderSetups[key]
|
||||
if ok {
|
||||
// A map.
|
||||
if v, err := metadecoders.Default.UnmarshalStringTo(env.Value, map[string]interface{}{}); err == nil {
|
||||
if v, err := metadecoders.Default.UnmarshalStringTo(env.Value, map[string]any{}); err == nil {
|
||||
val = v
|
||||
}
|
||||
}
|
||||
|
|
|
@ -28,6 +28,7 @@ import (
|
|||
"github.com/gohugoio/hugo/common/herrors"
|
||||
"github.com/mitchellh/mapstructure"
|
||||
"github.com/spf13/cast"
|
||||
"slices"
|
||||
)
|
||||
|
||||
type BaseConfig struct {
|
||||
|
@ -128,7 +129,7 @@ func (w BuildStats) Enabled() bool {
|
|||
}
|
||||
|
||||
func (b BuildConfig) clone() BuildConfig {
|
||||
b.CacheBusters = append([]CacheBuster{}, b.CacheBusters...)
|
||||
b.CacheBusters = slices.Clone(b.CacheBusters)
|
||||
return b
|
||||
}
|
||||
|
||||
|
|
|
@ -166,7 +166,7 @@ func TestBuildConfigCacheBusters(t *testing.T) {
|
|||
func TestBuildConfigCacheBusterstTailwindSetup(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
cfg := New()
|
||||
cfg.Set("build", map[string]interface{}{
|
||||
cfg.Set("build", map[string]any{
|
||||
"cacheBusters": []map[string]string{
|
||||
{
|
||||
"source": "assets/watching/hugo_stats\\.json",
|
||||
|
|
|
@ -345,7 +345,7 @@ func (c *defaultConfigProvider) getNestedKeyAndMap(key string, create bool) (str
|
|||
c.keyCache.Store(key, parts)
|
||||
}
|
||||
current := c.root
|
||||
for i := 0; i < len(parts)-1; i++ {
|
||||
for i := range len(parts) - 1 {
|
||||
next, found := current[parts[i]]
|
||||
if !found {
|
||||
if create {
|
||||
|
|
|
@ -332,7 +332,7 @@ func TestDefaultConfigProvider(t *testing.T) {
|
|||
return nil
|
||||
}
|
||||
|
||||
for i := 0; i < 20; i++ {
|
||||
for i := range 20 {
|
||||
i := i
|
||||
r.Run(func() error {
|
||||
const v = 42
|
||||
|
|
|
@ -29,7 +29,7 @@ func TestNamespace(t *testing.T) {
|
|||
// ns, err := config.DecodeNamespace[map[string]DocsMediaTypeConfig](in, defaultMediaTypesConfig, buildConfig)
|
||||
|
||||
ns, err := DecodeNamespace[[]*tstNsExt](
|
||||
map[string]interface{}{"foo": "bar"},
|
||||
map[string]any{"foo": "bar"},
|
||||
func(v any) (*tstNsExt, any, error) {
|
||||
t := &tstNsExt{}
|
||||
m, err := maps.ToStringMapE(v)
|
||||
|
@ -42,7 +42,7 @@ func TestNamespace(t *testing.T) {
|
|||
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(ns, qt.Not(qt.IsNil))
|
||||
c.Assert(ns.SourceStructure, qt.DeepEquals, map[string]interface{}{"foo": "bar"})
|
||||
c.Assert(ns.SourceStructure, qt.DeepEquals, map[string]any{"foo": "bar"})
|
||||
c.Assert(ns.SourceHash, qt.Equals, "1420f6c7782f7459")
|
||||
c.Assert(ns.Config, qt.DeepEquals, &tstNsExt{Foo: "bar"})
|
||||
c.Assert(ns.Signature(), qt.DeepEquals, []*tstNsExt(nil))
|
||||
|
|
|
@ -73,7 +73,7 @@ func NewWhitelist(patterns ...string) (Whitelist, error) {
|
|||
|
||||
var patternsr []*regexp.Regexp
|
||||
|
||||
for i := 0; i < len(patterns); i++ {
|
||||
for i := range patterns {
|
||||
p := strings.TrimSpace(patterns[i])
|
||||
if p == "" {
|
||||
continue
|
||||
|
|
|
@ -129,7 +129,7 @@ site RegularPages: {{ len site.RegularPages }}
|
|||
|
||||
`
|
||||
|
||||
c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.md"), []byte(fmt.Sprintf(contentFile, "index.md")), 0o755), qt.IsNil)
|
||||
c.Assert(afero.WriteFile(mm, filepath.Join(archetypeDir, "index.md"), fmt.Appendf(nil, contentFile, "index.md"), 0o755), qt.IsNil)
|
||||
c.Assert(afero.WriteFile(mm, filepath.Join(defaultArchetypeDir, "index.md"), []byte("default archetype index.md"), 0o755), qt.IsNil)
|
||||
|
||||
c.Assert(initFs(mm), qt.IsNil)
|
||||
|
|
|
@ -109,10 +109,7 @@ func ExtractTOC(content []byte) (newcontent []byte, toc []byte) {
|
|||
|
||||
startOfTOC := bytes.Index(content, first)
|
||||
|
||||
peekEnd := len(content)
|
||||
if peekEnd > 70+startOfTOC {
|
||||
peekEnd = 70 + startOfTOC
|
||||
}
|
||||
peekEnd := min(len(content), 70+startOfTOC)
|
||||
|
||||
if startOfTOC < 0 {
|
||||
return stripEmptyNav(content), toc
|
||||
|
|
|
@ -43,11 +43,7 @@ func Emojify(source []byte) []byte {
|
|||
|
||||
j := start + k
|
||||
|
||||
upper := j + emojiMaxSize
|
||||
|
||||
if upper > len(source) {
|
||||
upper = len(source)
|
||||
}
|
||||
upper := min(j+emojiMaxSize, len(source))
|
||||
|
||||
endEmoji := bytes.Index(source[j+1:upper], emojiDelim)
|
||||
nextWordDelim := bytes.Index(source[j:upper], emojiWordDelim)
|
||||
|
|
|
@ -63,7 +63,7 @@ func UniqueStrings(s []string) []string {
|
|||
unique := make([]string, 0, len(s))
|
||||
for i, val := range s {
|
||||
var seen bool
|
||||
for j := 0; j < i; j++ {
|
||||
for j := range i {
|
||||
if s[j] == val {
|
||||
seen = true
|
||||
break
|
||||
|
@ -83,7 +83,7 @@ func UniqueStringsReuse(s []string) []string {
|
|||
for i, val := range s {
|
||||
var seen bool
|
||||
|
||||
for j := 0; j < i; j++ {
|
||||
for j := range i {
|
||||
if s[j] == val {
|
||||
seen = true
|
||||
break
|
||||
|
|
|
@ -89,7 +89,7 @@ func ProcessingStatsTable(w io.Writer, stats ...*ProcessingStats) {
|
|||
|
||||
var data [][]string
|
||||
|
||||
for i := 0; i < len(stats); i++ {
|
||||
for i := range stats {
|
||||
stat := stats[i]
|
||||
names[i+1] = stat.Name
|
||||
|
||||
|
|
|
@ -101,10 +101,10 @@ func doTestAbsURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool,
|
|||
v := config.New()
|
||||
if multilingual {
|
||||
v.Set("languages", map[string]any{
|
||||
"fr": map[string]interface{}{
|
||||
"fr": map[string]any{
|
||||
"weight": 20,
|
||||
},
|
||||
"en": map[string]interface{}{
|
||||
"en": map[string]any{
|
||||
"weight": 10,
|
||||
},
|
||||
})
|
||||
|
@ -112,7 +112,7 @@ func doTestAbsURL(t *testing.T, defaultInSubDir, addLanguage, multilingual bool,
|
|||
} else {
|
||||
v.Set("defaultContentLanguage", lang)
|
||||
v.Set("languages", map[string]any{
|
||||
lang: map[string]interface{}{
|
||||
lang: map[string]any{
|
||||
"weight": 10,
|
||||
},
|
||||
})
|
||||
|
@ -167,10 +167,10 @@ func doTestRelURL(t testing.TB, defaultInSubDir, addLanguage, multilingual bool,
|
|||
v := config.New()
|
||||
if multilingual {
|
||||
v.Set("languages", map[string]any{
|
||||
"fr": map[string]interface{}{
|
||||
"fr": map[string]any{
|
||||
"weight": 20,
|
||||
},
|
||||
"en": map[string]interface{}{
|
||||
"en": map[string]any{
|
||||
"weight": 10,
|
||||
},
|
||||
})
|
||||
|
@ -178,7 +178,7 @@ func doTestRelURL(t testing.TB, defaultInSubDir, addLanguage, multilingual bool,
|
|||
} else {
|
||||
v.Set("defaultContentLanguage", lang)
|
||||
v.Set("languages", map[string]any{
|
||||
lang: map[string]interface{}{
|
||||
lang: map[string]any{
|
||||
"weight": 10,
|
||||
},
|
||||
})
|
||||
|
|
|
@ -151,7 +151,7 @@ func structTypes(v reflect.Value, m map[reflect.Type]struct{}) {
|
|||
structTypes(v.Elem(), m)
|
||||
}
|
||||
case reflect.Slice, reflect.Array:
|
||||
for i := 0; i < v.Len(); i++ {
|
||||
for i := range v.Len() {
|
||||
structTypes(v.Index(i), m)
|
||||
}
|
||||
case reflect.Map:
|
||||
|
@ -160,7 +160,7 @@ func structTypes(v reflect.Value, m map[reflect.Type]struct{}) {
|
|||
}
|
||||
case reflect.Struct:
|
||||
m[v.Type()] = struct{}{}
|
||||
for i := 0; i < v.NumField(); i++ {
|
||||
for i := range v.NumField() {
|
||||
structTypes(v.Field(i), m)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -93,7 +93,7 @@ func (m *FileMeta) Merge(from *FileMeta) {
|
|||
dstv := reflect.Indirect(reflect.ValueOf(m))
|
||||
srcv := reflect.Indirect(reflect.ValueOf(from))
|
||||
|
||||
for i := 0; i < dstv.NumField(); i++ {
|
||||
for i := range dstv.NumField() {
|
||||
v := dstv.Field(i)
|
||||
if !v.CanSet() {
|
||||
continue
|
||||
|
|
|
@ -214,7 +214,7 @@ func WalkFilesystems(fs afero.Fs, fn WalkFn) bool {
|
|||
}
|
||||
}
|
||||
} else if cfs, ok := fs.(overlayfs.FilesystemIterator); ok {
|
||||
for i := 0; i < cfs.NumFilesystems(); i++ {
|
||||
for i := range cfs.NumFilesystems() {
|
||||
if WalkFilesystems(cfs.Filesystem(i), fn) {
|
||||
return true
|
||||
}
|
||||
|
|
|
@ -166,7 +166,7 @@ func FilterGlobParts(a []string) []string {
|
|||
|
||||
// HasGlobChar returns whether s contains any glob wildcards.
|
||||
func HasGlobChar(s string) bool {
|
||||
for i := 0; i < len(s); i++ {
|
||||
for i := range len(s) {
|
||||
if syntax.Special(s[i]) {
|
||||
return true
|
||||
}
|
||||
|
|
|
@ -91,7 +91,7 @@ func TestWalkRootMappingFs(t *testing.T) {
|
|||
p := para.New(4)
|
||||
r, _ := p.Start(context.Background())
|
||||
|
||||
for i := 0; i < 8; i++ {
|
||||
for range 8 {
|
||||
r.Run(func() error {
|
||||
_, err := collectPaths(bfs, "")
|
||||
if err != nil {
|
||||
|
@ -153,7 +153,7 @@ func BenchmarkWalk(b *testing.B) {
|
|||
fs := NewBaseFileDecorator(afero.NewMemMapFs())
|
||||
|
||||
writeFiles := func(dir string, numfiles int) {
|
||||
for i := 0; i < numfiles; i++ {
|
||||
for i := range numfiles {
|
||||
filename := filepath.Join(dir, fmt.Sprintf("file%d.txt", i))
|
||||
c.Assert(afero.WriteFile(fs, filename, []byte("content"), 0o777), qt.IsNil)
|
||||
}
|
||||
|
|
|
@ -871,7 +871,7 @@ Background: {{ .Params.background }}|
|
|||
{{ .Title }}|
|
||||
`
|
||||
|
||||
for i := 0; i < 10; i++ {
|
||||
for range 10 {
|
||||
b := Test(t, files)
|
||||
b.AssertFileContent("public/p1/index.html", "Background: yosemite.jpg")
|
||||
}
|
||||
|
|
|
@ -793,7 +793,7 @@ Single.
|
|||
files := strings.ReplaceAll(filesTemplate, "WEIGHT_EN", "2")
|
||||
files = strings.ReplaceAll(files, "WEIGHT_SV", "1")
|
||||
|
||||
for i := 0; i < 20; i++ {
|
||||
for range 20 {
|
||||
cfg := config.New()
|
||||
b, err := NewIntegrationTestBuilder(
|
||||
IntegrationTestConfig{
|
||||
|
|
|
@ -323,7 +323,7 @@ R: {{ with $r }}{{ .Content }}{{ end }}|Len: {{ len $bundle.Resources }}|$
|
|||
|
||||
`
|
||||
|
||||
for i := 0; i < 3; i++ {
|
||||
for range 3 {
|
||||
b := Test(t, files)
|
||||
b.AssertFileContent("public/index.html", "R: Data 1.txt|", "Len: 1|")
|
||||
}
|
||||
|
@ -435,14 +435,14 @@ func TestContentTreeReverseIndex(t *testing.T) {
|
|||
|
||||
pageReverseIndex := newContentTreeTreverseIndex(
|
||||
func(get func(key any) (contentNodeI, bool), set func(key any, val contentNodeI)) {
|
||||
for i := 0; i < 10; i++ {
|
||||
for i := range 10 {
|
||||
key := fmt.Sprint(i)
|
||||
set(key, &testContentNode{key: key})
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
for i := 0; i < 10; i++ {
|
||||
for i := range 10 {
|
||||
key := fmt.Sprint(i)
|
||||
v := pageReverseIndex.Get(key)
|
||||
c.Assert(v, qt.Not(qt.IsNil))
|
||||
|
@ -456,17 +456,17 @@ func TestContentTreeReverseIndexPara(t *testing.T) {
|
|||
|
||||
var wg sync.WaitGroup
|
||||
|
||||
for i := 0; i < 10; i++ {
|
||||
for range 10 {
|
||||
pageReverseIndex := newContentTreeTreverseIndex(
|
||||
func(get func(key any) (contentNodeI, bool), set func(key any, val contentNodeI)) {
|
||||
for i := 0; i < 10; i++ {
|
||||
for i := range 10 {
|
||||
key := fmt.Sprint(i)
|
||||
set(key, &testContentNode{key: key})
|
||||
}
|
||||
},
|
||||
)
|
||||
|
||||
for j := 0; j < 10; j++ {
|
||||
for j := range 10 {
|
||||
wg.Add(1)
|
||||
go func(i int) {
|
||||
defer wg.Done()
|
||||
|
|
|
@ -193,7 +193,7 @@ func TestTreePara(t *testing.T) {
|
|||
},
|
||||
)
|
||||
|
||||
for i := 0; i < 8; i++ {
|
||||
for i := range 8 {
|
||||
i := i
|
||||
r.Run(func() error {
|
||||
a := &testValue{ID: "/a"}
|
||||
|
@ -289,7 +289,7 @@ func BenchmarkTreeInsert(b *testing.B) {
|
|||
},
|
||||
)
|
||||
|
||||
for i := 0; i < numElements; i++ {
|
||||
for i := range numElements {
|
||||
lang := rand.Intn(2)
|
||||
tree.InsertIntoValuesDimension(fmt.Sprintf("/%d", i), &testValue{ID: fmt.Sprintf("/%d", i), Lang: lang, Weight: i, NoCopy: true})
|
||||
}
|
||||
|
@ -323,7 +323,7 @@ func BenchmarkWalk(b *testing.B) {
|
|||
},
|
||||
)
|
||||
|
||||
for i := 0; i < numElements; i++ {
|
||||
for i := range numElements {
|
||||
lang := rand.Intn(2)
|
||||
tree.InsertIntoValuesDimension(fmt.Sprintf("/%d", i), &testValue{ID: fmt.Sprintf("/%d", i), Lang: lang, Weight: i, NoCopy: true})
|
||||
}
|
||||
|
@ -355,8 +355,8 @@ func BenchmarkWalk(b *testing.B) {
|
|||
base := createTree()
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
for d1 := 0; d1 < 1; d1++ {
|
||||
for d2 := 0; d2 < 2; d2++ {
|
||||
for d1 := range 1 {
|
||||
for d2 := range 2 {
|
||||
tree := base.Shape(d1, d2)
|
||||
w := &doctree.NodeShiftTreeWalker[*testValue]{
|
||||
Tree: tree,
|
||||
|
|
|
@ -363,7 +363,7 @@ func (r *NodeShiftTreeWalker[T]) Walk(ctx context.Context) error {
|
|||
main := r.Tree
|
||||
|
||||
var err error
|
||||
fnMain := func(s string, v interface{}) bool {
|
||||
fnMain := func(s string, v any) bool {
|
||||
if r.ShouldSkip(s) {
|
||||
return false
|
||||
}
|
||||
|
|
|
@ -34,7 +34,7 @@ func NewTreeShiftTree[T comparable](d, length int) *TreeShiftTree[T] {
|
|||
panic("length must be > 0")
|
||||
}
|
||||
trees := make([]*SimpleTree[T], length)
|
||||
for i := 0; i < length; i++ {
|
||||
for i := range length {
|
||||
trees[i] = NewSimpleTree[T]()
|
||||
}
|
||||
return &TreeShiftTree[T]{d: d, trees: trees}
|
||||
|
|
|
@ -634,7 +634,7 @@ func (b *sourceFilesystemsBuilder) createMainOverlayFs(p *paths.Paths) (*filesys
|
|||
|
||||
mounts := make([]mountsDescriptor, len(mods))
|
||||
|
||||
for i := 0; i < len(mods); i++ {
|
||||
for i := range mods {
|
||||
mod := mods[i]
|
||||
dir := mod.Dir()
|
||||
|
||||
|
|
|
@ -57,14 +57,14 @@ func TestNewBaseFs(t *testing.T) {
|
|||
filenameTheme := filepath.Join(base, fmt.Sprintf("theme-file-%s.txt", theme))
|
||||
filenameOverlap := filepath.Join(base, "f3.txt")
|
||||
afs.Mkdir(base, 0o755)
|
||||
content := []byte(fmt.Sprintf("content:%s:%s", theme, dir))
|
||||
content := fmt.Appendf(nil, "content:%s:%s", theme, dir)
|
||||
afero.WriteFile(afs, filenameTheme, content, 0o755)
|
||||
afero.WriteFile(afs, filenameOverlap, content, 0o755)
|
||||
}
|
||||
// Write some files to the root of the theme
|
||||
base := filepath.Join(workingDir, "themes", theme)
|
||||
afero.WriteFile(afs, filepath.Join(base, fmt.Sprintf("theme-root-%s.txt", theme)), []byte(fmt.Sprintf("content:%s", theme)), 0o755)
|
||||
afero.WriteFile(afs, filepath.Join(base, "file-theme-root.txt"), []byte(fmt.Sprintf("content:%s", theme)), 0o755)
|
||||
afero.WriteFile(afs, filepath.Join(base, fmt.Sprintf("theme-root-%s.txt", theme)), fmt.Appendf(nil, "content:%s", theme), 0o755)
|
||||
afero.WriteFile(afs, filepath.Join(base, "file-theme-root.txt"), fmt.Appendf(nil, "content:%s", theme), 0o755)
|
||||
}
|
||||
|
||||
afero.WriteFile(afs, filepath.Join(workingDir, "file-root.txt"), []byte("content-project"), 0o755)
|
||||
|
@ -683,8 +683,8 @@ func setConfigAndWriteSomeFilesTo(fs afero.Fs, v config.Provider, key, val strin
|
|||
workingDir := v.GetString("workingDir")
|
||||
v.Set(key, val)
|
||||
fs.Mkdir(val, 0o755)
|
||||
for i := 0; i < num; i++ {
|
||||
for i := range num {
|
||||
filename := filepath.Join(workingDir, val, fmt.Sprintf("f%d.txt", i+1))
|
||||
afero.WriteFile(fs, filename, []byte(fmt.Sprintf("content:%s:%d", key, i+1)), 0o755)
|
||||
afero.WriteFile(fs, filename, fmt.Appendf(nil, "content:%s:%d", key, i+1), 0o755)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -311,7 +311,7 @@ func (h *HugoSites) NumLogErrors() int {
|
|||
|
||||
func (h *HugoSites) PrintProcessingStats(w io.Writer) {
|
||||
stats := make([]*helpers.ProcessingStats, len(h.Sites))
|
||||
for i := 0; i < len(h.Sites); i++ {
|
||||
for i := range h.Sites {
|
||||
stats[i] = h.Sites[i].PathSpec.ProcessingStats
|
||||
}
|
||||
helpers.ProcessingStatsTable(w, stats...)
|
||||
|
|
|
@ -707,7 +707,7 @@ func (p *pageState) shiftToOutputFormat(isRenderingSite bool, idx int) error {
|
|||
cp := p.pageOutput.pco
|
||||
if cp == nil && p.reusePageOutputContent() {
|
||||
// Look for content to reuse.
|
||||
for i := 0; i < len(p.pageOutputs); i++ {
|
||||
for i := range p.pageOutputs {
|
||||
if i == idx {
|
||||
continue
|
||||
}
|
||||
|
|
|
@ -45,6 +45,7 @@ import (
|
|||
"github.com/gohugoio/hugo/tpl"
|
||||
"github.com/mitchellh/mapstructure"
|
||||
"github.com/spf13/cast"
|
||||
maps0 "maps"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -696,9 +697,7 @@ func (c *cachedContentScope) contentToC(ctx context.Context) (contentTableOfCont
|
|||
cp.otherOutputs.Set(cp2.po.p.pid, cp2)
|
||||
|
||||
// Merge content placeholders
|
||||
for k, v := range ct2.contentPlaceholders {
|
||||
ct.contentPlaceholders[k] = v
|
||||
}
|
||||
maps0.Copy(ct.contentPlaceholders, ct2.contentPlaceholders)
|
||||
|
||||
if p.s.conf.Internal.Watch {
|
||||
for _, s := range cp2.po.p.m.content.shortcodeState.shortcodes {
|
||||
|
|
|
@ -690,7 +690,7 @@ bundle min min key: {{ $jsonMinMin.Key }}
|
|||
|
||||
`)
|
||||
|
||||
for i := 0; i < 3; i++ {
|
||||
for range 3 {
|
||||
|
||||
b.Build(BuildCfg{})
|
||||
|
||||
|
|
|
@ -47,8 +47,8 @@ func BenchmarkGetPage(b *testing.B) {
|
|||
b.Fatal(err)
|
||||
}
|
||||
|
||||
for i := 0; i < 10; i++ {
|
||||
for j := 0; j < 100; j++ {
|
||||
for i := range 10 {
|
||||
for j := range 100 {
|
||||
writeSource(b, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", j)), "CONTENT")
|
||||
}
|
||||
}
|
||||
|
@ -91,8 +91,8 @@ func createGetPageRegularBenchmarkSite(t testing.TB) *Site {
|
|||
return fmt.Sprintf(pageCollectionsPageTemplate, title)
|
||||
}
|
||||
|
||||
for i := 0; i < 10; i++ {
|
||||
for j := 0; j < 100; j++ {
|
||||
for i := range 10 {
|
||||
for j := range 100 {
|
||||
content := pc(fmt.Sprintf("Title%d_%d", i, j))
|
||||
writeSource(c, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", j)), content)
|
||||
}
|
||||
|
@ -105,7 +105,7 @@ func TestBenchmarkGetPageRegular(t *testing.T) {
|
|||
c := qt.New(t)
|
||||
s := createGetPageRegularBenchmarkSite(t)
|
||||
|
||||
for i := 0; i < 10; i++ {
|
||||
for i := range 10 {
|
||||
pp := path.Join("/", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", i))
|
||||
page, _ := s.getPage(nil, pp)
|
||||
c.Assert(page, qt.Not(qt.IsNil), qt.Commentf(pp))
|
||||
|
@ -192,8 +192,8 @@ func TestGetPage(t *testing.T) {
|
|||
return fmt.Sprintf(pageCollectionsPageTemplate, title)
|
||||
}
|
||||
|
||||
for i := 0; i < 10; i++ {
|
||||
for j := 0; j < 10; j++ {
|
||||
for i := range 10 {
|
||||
for j := range 10 {
|
||||
content := pc(fmt.Sprintf("Title%d_%d", i, j))
|
||||
writeSource(t, fs, filepath.Join("content", fmt.Sprintf("sect%d", i), fmt.Sprintf("page%d.md", j)), content)
|
||||
}
|
||||
|
|
|
@ -42,7 +42,7 @@ func TestMergeLanguages(t *testing.T) {
|
|||
c.Assert(len(frSite.RegularPages()), qt.Equals, 6)
|
||||
c.Assert(len(nnSite.RegularPages()), qt.Equals, 12)
|
||||
|
||||
for i := 0; i < 2; i++ {
|
||||
for range 2 {
|
||||
mergedNN := nnSite.RegularPages().MergeByLanguage(enSite.RegularPages())
|
||||
c.Assert(len(mergedNN), qt.Equals, 31)
|
||||
for i := 1; i <= 31; i++ {
|
||||
|
@ -163,7 +163,7 @@ date: "2018-02-28"
|
|||
// Add a bundles
|
||||
j := 100
|
||||
contentPairs = append(contentPairs, []string{"bundle/index.md", fmt.Sprintf(contentTemplate, j, j)}...)
|
||||
for i := 0; i < 6; i++ {
|
||||
for i := range 6 {
|
||||
contentPairs = append(contentPairs, []string{fmt.Sprintf("bundle/pb%d.md", i), fmt.Sprintf(contentTemplate, i+j, i+j)}...)
|
||||
}
|
||||
contentPairs = append(contentPairs, []string{"bundle/index.nn.md", fmt.Sprintf(contentTemplate, j, j)}...)
|
||||
|
|
|
@ -40,7 +40,7 @@ contentDir = "content/nn"
|
|||
`
|
||||
b := newTestSitesBuilder(t).WithConfigFile("toml", configFile)
|
||||
var content []string
|
||||
for i := 0; i < 9; i++ {
|
||||
for i := range 9 {
|
||||
for _, contentDir := range []string{"content/en", "content/nn"} {
|
||||
content = append(content, fmt.Sprintf(contentDir+"/blog/page%d.md", i), fmt.Sprintf(`---
|
||||
title: Page %d
|
||||
|
@ -118,7 +118,7 @@ cascade:
|
|||
- JSON
|
||||
---`)
|
||||
|
||||
for i := 0; i < 22; i++ {
|
||||
for i := range 22 {
|
||||
b.WithContent(fmt.Sprintf("p%d.md", i+1), fmt.Sprintf(`---
|
||||
title: "Page"
|
||||
weight: %d
|
||||
|
|
|
@ -124,7 +124,7 @@ func TestRebuildEditTextFileInLeafBundle(t *testing.T) {
|
|||
|
||||
func TestRebuildEditTextFileInShortcode(t *testing.T) {
|
||||
t.Parallel()
|
||||
for i := 0; i < 3; i++ {
|
||||
for range 3 {
|
||||
b := TestRunning(t, rebuildFilesSimple)
|
||||
b.AssertFileContent("public/mythirdsection/mythirdsectionpage/index.html",
|
||||
"Text: Assets My Shortcode Text.")
|
||||
|
@ -138,7 +138,7 @@ func TestRebuildEditTextFileInShortcode(t *testing.T) {
|
|||
|
||||
func TestRebuildEditTextFileInHook(t *testing.T) {
|
||||
t.Parallel()
|
||||
for i := 0; i < 3; i++ {
|
||||
for range 3 {
|
||||
b := TestRunning(t, rebuildFilesSimple)
|
||||
b.AssertFileContent("public/mythirdsection/mythirdsectionpage/index.html",
|
||||
"Text: Assets My Other Text.")
|
||||
|
@ -1545,7 +1545,7 @@ title: "P%d"
|
|||
P%d Content.
|
||||
`
|
||||
|
||||
for i := 0; i < count; i++ {
|
||||
for i := range count {
|
||||
files += fmt.Sprintf("-- content/mysect/p%d/index.md --\n%s", i, fmt.Sprintf(contentTemplate, i, i))
|
||||
}
|
||||
|
||||
|
|
|
@ -99,7 +99,7 @@ FAILED REMOTE ERROR DETAILS CONTENT: {{ with $failedImg }}{{ with .Err }}{{ with
|
|||
|
||||
b.Running()
|
||||
|
||||
for i := 0; i < 2; i++ {
|
||||
for i := range 2 {
|
||||
b.Logf("Test run %d", i)
|
||||
b.Build(BuildCfg{})
|
||||
|
||||
|
@ -200,7 +200,7 @@ func BenchmarkResourceChainPostProcess(b *testing.B) {
|
|||
for i := 0; i < b.N; i++ {
|
||||
b.StopTimer()
|
||||
s := newTestSitesBuilder(b)
|
||||
for i := 0; i < 300; i++ {
|
||||
for i := range 300 {
|
||||
s.WithContent(fmt.Sprintf("page%d.md", i+1), "---\ntitle: Page\n---")
|
||||
}
|
||||
s.WithTemplates("_default/single.html", `Start.
|
||||
|
|
|
@ -865,13 +865,13 @@ Content: {{ .Content }}|
|
|||
func TestShortcodeStableOutputFormatTemplates(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
for i := 0; i < 5; i++ {
|
||||
for range 5 {
|
||||
|
||||
b := newTestSitesBuilder(t)
|
||||
|
||||
const numPages = 10
|
||||
|
||||
for i := 0; i < numPages; i++ {
|
||||
for i := range numPages {
|
||||
b.WithContent(fmt.Sprintf("page%d.md", i), `---
|
||||
title: "Page"
|
||||
outputs: ["html", "css", "csv", "json"]
|
||||
|
@ -894,14 +894,14 @@ outputs: ["html", "css", "csv", "json"]
|
|||
|
||||
// helpers.PrintFs(b.Fs.Destination, "public", os.Stdout)
|
||||
|
||||
for i := 0; i < numPages; i++ {
|
||||
for i := range numPages {
|
||||
b.AssertFileContent(fmt.Sprintf("public/page%d/index.html", i), "Short-HTML")
|
||||
b.AssertFileContent(fmt.Sprintf("public/page%d/index.csv", i), "Short-CSV")
|
||||
b.AssertFileContent(fmt.Sprintf("public/page%d/index.json", i), "Short-HTML")
|
||||
|
||||
}
|
||||
|
||||
for i := 0; i < numPages; i++ {
|
||||
for i := range numPages {
|
||||
b.AssertFileContent(fmt.Sprintf("public/page%d/styles.css", i), "Short-HTML")
|
||||
}
|
||||
|
||||
|
|
|
@ -330,10 +330,7 @@ func NewHugoSites(cfg deps.DepsCfg) (*HugoSites, error) {
|
|||
|
||||
func newHugoSites(cfg deps.DepsCfg, d *deps.Deps, pageTrees *pageTrees, sites []*Site) (*HugoSites, error) {
|
||||
numWorkers := config.GetNumWorkerMultiplier()
|
||||
numWorkersSite := numWorkers
|
||||
if numWorkersSite > len(sites) {
|
||||
numWorkersSite = len(sites)
|
||||
}
|
||||
numWorkersSite := min(numWorkers, len(sites))
|
||||
workersSite := para.New(numWorkersSite)
|
||||
|
||||
h := &HugoSites{
|
||||
|
|
|
@ -78,7 +78,7 @@ func (s *Site) renderPages(ctx *siteRenderContext) error {
|
|||
|
||||
wg := &sync.WaitGroup{}
|
||||
|
||||
for i := 0; i < numWorkers; i++ {
|
||||
for range numWorkers {
|
||||
wg.Add(1)
|
||||
go pageRenderer(ctx, s, pages, results, wg)
|
||||
}
|
||||
|
|
|
@ -69,15 +69,15 @@ aliases: [/Ali%d]
|
|||
"_default/terms.html", "Terms List|{{ .Title }}|{{ .Content }}",
|
||||
)
|
||||
|
||||
for i := 0; i < 2; i++ {
|
||||
for j := 0; j < 2; j++ {
|
||||
for i := range 2 {
|
||||
for j := range 2 {
|
||||
pageID := i + j + 1
|
||||
b.WithContent(fmt.Sprintf("content/sect/p%d.md", pageID),
|
||||
fmt.Sprintf(pageTemplate, pageID, fmt.Sprintf("- tag%d", j), fmt.Sprintf("- category%d", j), pageID))
|
||||
}
|
||||
}
|
||||
|
||||
for i := 0; i < 5; i++ {
|
||||
for i := range 5 {
|
||||
b.WithContent(fmt.Sprintf("assets/image%d.png", i+1), "image")
|
||||
}
|
||||
|
||||
|
|
|
@ -372,14 +372,14 @@ func TestMainSections(t *testing.T) {
|
|||
|
||||
b := newTestSitesBuilder(c).WithViper(v)
|
||||
|
||||
for i := 0; i < 20; i++ {
|
||||
for i := range 20 {
|
||||
b.WithContent(fmt.Sprintf("page%d.md", i), `---
|
||||
title: "Page"
|
||||
---
|
||||
`)
|
||||
}
|
||||
|
||||
for i := 0; i < 5; i++ {
|
||||
for i := range 5 {
|
||||
b.WithContent(fmt.Sprintf("blog/page%d.md", i), `---
|
||||
title: "Page"
|
||||
tags: ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"]
|
||||
|
@ -387,7 +387,7 @@ tags: ["a", "b", "c", "d", "e", "f", "g", "h", "i", "j"]
|
|||
`)
|
||||
}
|
||||
|
||||
for i := 0; i < 3; i++ {
|
||||
for i := range 3 {
|
||||
b.WithContent(fmt.Sprintf("docs/page%d.md", i), `---
|
||||
title: "Page"
|
||||
---
|
||||
|
|
|
@ -97,7 +97,7 @@ Do not go gentle into that good night.
|
|||
writeSource(t, fs, filepath.Join("content", "sect1", "_index.md"), fmt.Sprintf(st, "/ss1/"))
|
||||
writeSource(t, fs, filepath.Join("content", "sect2", "_index.md"), fmt.Sprintf(st, "/ss2/"))
|
||||
|
||||
for i := 0; i < 5; i++ {
|
||||
for i := range 5 {
|
||||
writeSource(t, fs, filepath.Join("content", "sect1", fmt.Sprintf("p%d.md", i+1)), pt)
|
||||
writeSource(t, fs, filepath.Join("content", "sect2", fmt.Sprintf("p%d.md", i+1)), pt)
|
||||
}
|
||||
|
|
|
@ -314,7 +314,7 @@ func TestTaxonomiesNextGenLoops(t *testing.T) {
|
|||
</ul>
|
||||
`)
|
||||
|
||||
for i := 0; i < 10; i++ {
|
||||
for i := range 10 {
|
||||
b.WithContent(fmt.Sprintf("page%d.md", i+1), `
|
||||
---
|
||||
Title: "Taxonomy!"
|
||||
|
|
|
@ -250,7 +250,7 @@ Content.
|
|||
Base %d: {{ block "main" . }}FOO{{ end }}
|
||||
`
|
||||
|
||||
for i := 0; i < numPages; i++ {
|
||||
for i := range numPages {
|
||||
id := i + 1
|
||||
b.WithContent(fmt.Sprintf("page%d.md", id), fmt.Sprintf(pageTemplate, id, id))
|
||||
b.WithTemplates(fmt.Sprintf("_default/layout%d.html", id), fmt.Sprintf(singleTemplate, id))
|
||||
|
@ -258,7 +258,7 @@ Base %d: {{ block "main" . }}FOO{{ end }}
|
|||
}
|
||||
|
||||
b.Build(BuildCfg{})
|
||||
for i := 0; i < numPages; i++ {
|
||||
for i := range numPages {
|
||||
id := i + 1
|
||||
b.AssertFileContent(fmt.Sprintf("public/page%d/index.html", id), fmt.Sprintf(`Base %d: %d`, id, id))
|
||||
}
|
||||
|
|
|
@ -27,7 +27,7 @@ func NewFinder(cfg FinderConfig) *Finder {
|
|||
}
|
||||
|
||||
var searchIDPool = sync.Pool{
|
||||
New: func() interface{} {
|
||||
New: func() any {
|
||||
return &searchID{seen: make(map[Manager]bool)}
|
||||
},
|
||||
}
|
||||
|
|
|
@ -25,7 +25,7 @@ import (
|
|||
func BenchmarkIdentityManager(b *testing.B) {
|
||||
createIds := func(num int) []identity.Identity {
|
||||
ids := make([]identity.Identity, num)
|
||||
for i := 0; i < num; i++ {
|
||||
for i := range num {
|
||||
name := fmt.Sprintf("id%d", i)
|
||||
ids[i] = &testIdentity{base: name, name: name}
|
||||
}
|
||||
|
@ -108,10 +108,10 @@ func BenchmarkIsNotDependent(b *testing.B) {
|
|||
|
||||
newNestedManager := func(depth, count int) identity.Manager {
|
||||
m1 := identity.NewManager("")
|
||||
for i := 0; i < depth; i++ {
|
||||
for range depth {
|
||||
m2 := identity.NewManager("")
|
||||
m1.AddIdentity(m2)
|
||||
for j := 0; j < count; j++ {
|
||||
for j := range count {
|
||||
id := fmt.Sprintf("id%d", j)
|
||||
m2.AddIdentity(&testIdentity{id, id, "", ""})
|
||||
}
|
||||
|
|
|
@ -27,6 +27,7 @@ import (
|
|||
"github.com/gohugoio/hugo/resources"
|
||||
"github.com/gohugoio/hugo/resources/resource"
|
||||
"github.com/spf13/afero"
|
||||
"slices"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -167,15 +168,11 @@ func createBuildPlugins(rs *resources.Spec, assetsResolver *fsResolver, depsMana
|
|||
}
|
||||
}
|
||||
|
||||
for _, ext := range opts.Externals {
|
||||
// ESBuild will do a more thorough check for packages resolved in node_modules,
|
||||
// but we need to make sure that we don't try to resolve these in the /assets folder.
|
||||
if ext == impPath {
|
||||
return api.OnResolveResult{
|
||||
Path: impPath,
|
||||
External: true,
|
||||
}, nil
|
||||
}
|
||||
if slices.Contains(opts.Externals, impPath) {
|
||||
return api.OnResolveResult{
|
||||
Path: impPath,
|
||||
External: true,
|
||||
}, nil
|
||||
}
|
||||
|
||||
if opts.ImportOnResolveFunc != nil {
|
||||
|
|
|
@ -384,7 +384,7 @@ func newDispatcher[Q, R any](opts Options) (*dispatcherPool[Q, R], error) {
|
|||
}
|
||||
|
||||
inOuts := make([]*inOut, opts.PoolSize)
|
||||
for i := 0; i < opts.PoolSize; i++ {
|
||||
for i := range opts.PoolSize {
|
||||
var stdin, stdout hugio.ReadWriteCloser
|
||||
|
||||
stdin = hugio.NewPipeReadWriteCloser()
|
||||
|
@ -478,7 +478,7 @@ func newDispatcher[Q, R any](opts Options) (*dispatcherPool[Q, R], error) {
|
|||
close(dp.donec)
|
||||
}()
|
||||
|
||||
for i := 0; i < len(inOuts); i++ {
|
||||
for i := range inOuts {
|
||||
d := &dispatcher[Q, R]{
|
||||
pending: make(map[uint32]*call[Q, R]),
|
||||
inOut: inOuts[i],
|
||||
|
|
|
@ -101,7 +101,7 @@ func TestGreet(t *testing.T) {
|
|||
Infof: t.Logf,
|
||||
}
|
||||
|
||||
for i := 0; i < 2; i++ {
|
||||
for range 2 {
|
||||
func() {
|
||||
d, err := Start[person, greeting](opts)
|
||||
if err != nil {
|
||||
|
@ -123,7 +123,7 @@ func TestGreet(t *testing.T) {
|
|||
},
|
||||
}
|
||||
|
||||
for j := 0; j < 20; j++ {
|
||||
for j := range 20 {
|
||||
inputMessage.Header.ID = uint32(j + 1)
|
||||
g, err := d.Execute(ctx, inputMessage)
|
||||
if err != nil {
|
||||
|
@ -163,7 +163,7 @@ func TestGreetParallel(t *testing.T) {
|
|||
|
||||
ctx := context.Background()
|
||||
|
||||
for j := 0; j < 5; j++ {
|
||||
for j := range 5 {
|
||||
base := i * 100
|
||||
id := uint32(base + j)
|
||||
|
||||
|
@ -217,7 +217,7 @@ func TestKatexParallel(t *testing.T) {
|
|||
|
||||
ctx := context.Background()
|
||||
|
||||
for j := 0; j < 1; j++ {
|
||||
for j := range 1 {
|
||||
base := i * 100
|
||||
id := uint32(base + j)
|
||||
|
||||
|
|
|
@ -29,13 +29,13 @@ func TestCollator(t *testing.T) {
|
|||
|
||||
coll := &Collator{c: collate.New(language.English, collate.Loose)}
|
||||
|
||||
for i := 0; i < 10; i++ {
|
||||
for range 10 {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
coll.Lock()
|
||||
defer coll.Unlock()
|
||||
defer wg.Done()
|
||||
for j := 0; j < 10; j++ {
|
||||
for range 10 {
|
||||
k := coll.CompareStrings("abc", "def")
|
||||
c.Assert(k, qt.Equals, -1)
|
||||
}
|
||||
|
@ -48,7 +48,7 @@ func BenchmarkCollator(b *testing.B) {
|
|||
s := []string{"foo", "bar", "éntre", "baz", "qux", "quux", "corge", "grault", "garply", "waldo", "fred", "plugh", "xyzzy", "thud"}
|
||||
|
||||
doWork := func(coll *Collator) {
|
||||
for i := 0; i < len(s); i++ {
|
||||
for i := range s {
|
||||
for j := i + 1; j < len(s); j++ {
|
||||
_ = coll.CompareStrings(s[i], s[j])
|
||||
}
|
||||
|
|
|
@ -79,7 +79,7 @@ func TestInit(t *testing.T) {
|
|||
|
||||
// Add some concurrency and randomness to verify thread safety and
|
||||
// init order.
|
||||
for i := 0; i < 100; i++ {
|
||||
for i := range 100 {
|
||||
wg.Add(1)
|
||||
go func(i int) {
|
||||
defer wg.Done()
|
||||
|
|
|
@ -77,7 +77,7 @@ func (r *htmlRenderer) renderCodeBlock(w util.BufWriter, src []byte, node ast.No
|
|||
var buff bytes.Buffer
|
||||
|
||||
l := n.Lines().Len()
|
||||
for i := 0; i < l; i++ {
|
||||
for i := range l {
|
||||
line := n.Lines().At(i)
|
||||
buff.Write(line.Value(src))
|
||||
}
|
||||
|
|
|
@ -182,7 +182,7 @@ func (r *hugoContextRenderer) renderHTMLBlock(
|
|||
if entering {
|
||||
if r.Unsafe {
|
||||
l := n.Lines().Len()
|
||||
for i := 0; i < l; i++ {
|
||||
for i := range l {
|
||||
line := n.Lines().At(i)
|
||||
linev := line.Value(source)
|
||||
var stripped bool
|
||||
|
@ -226,7 +226,7 @@ func (r *hugoContextRenderer) renderRawHTML(
|
|||
n := node.(*ast.RawHTML)
|
||||
l := n.Segments.Len()
|
||||
if r.Unsafe {
|
||||
for i := 0; i < l; i++ {
|
||||
for i := range l {
|
||||
segment := n.Segments.At(i)
|
||||
_, _ = w.Write(segment.Value(source))
|
||||
}
|
||||
|
|
|
@ -110,7 +110,7 @@ func (r *htmlRenderer) renderPassthroughBlock(w util.BufWriter, src []byte, node
|
|||
case (*passthrough.PassthroughBlock):
|
||||
l := nn.Lines().Len()
|
||||
var buff bytes.Buffer
|
||||
for i := 0; i < l; i++ {
|
||||
for i := range l {
|
||||
line := nn.Lines().At(i)
|
||||
buff.Write(line.Value(src))
|
||||
}
|
||||
|
|
|
@ -100,10 +100,7 @@ func (c *rstConverter) getRstContent(src []byte, ctx converter.DocumentContext)
|
|||
|
||||
bodyEnd := bytes.Index(result, []byte("\n</body>"))
|
||||
if bodyEnd < 0 || bodyEnd >= len(result) {
|
||||
bodyEnd = len(result) - 1
|
||||
if bodyEnd < 0 {
|
||||
bodyEnd = 0
|
||||
}
|
||||
bodyEnd = max(len(result)-1, 0)
|
||||
}
|
||||
|
||||
return result[bodyStart+7 : bodyEnd], err
|
||||
|
|
|
@ -250,7 +250,7 @@ func (b *tocBuilder) writeHeading(level, indent int, h *Heading) {
|
|||
}
|
||||
|
||||
func (b *tocBuilder) indent(n int) {
|
||||
for i := 0; i < n; i++ {
|
||||
for range n {
|
||||
b.s.WriteString(" ")
|
||||
}
|
||||
}
|
||||
|
|
|
@ -196,7 +196,7 @@ func TestTocMisc(t *testing.T) {
|
|||
func BenchmarkToc(b *testing.B) {
|
||||
newTocs := func(n int) []*Fragments {
|
||||
var tocs []*Fragments
|
||||
for i := 0; i < n; i++ {
|
||||
for range n {
|
||||
tocs = append(tocs, newTestToc())
|
||||
}
|
||||
return tocs
|
||||
|
|
|
@ -26,6 +26,7 @@ import (
|
|||
|
||||
"github.com/mitchellh/mapstructure"
|
||||
"github.com/spf13/cast"
|
||||
"slices"
|
||||
)
|
||||
|
||||
// DefaultTypes is the default media types supported by Hugo.
|
||||
|
@ -46,7 +47,7 @@ func init() {
|
|||
// Initialize the Builtin types with values from DefaultTypes.
|
||||
v := reflect.ValueOf(&Builtin).Elem()
|
||||
|
||||
for i := 0; i < v.NumField(); i++ {
|
||||
for i := range v.NumField() {
|
||||
f := v.Field(i)
|
||||
fieldName := v.Type().Field(i).Name
|
||||
builtinType := f.Interface().(Type)
|
||||
|
@ -149,12 +150,7 @@ func (t ContentTypes) IsIndexContentFile(filename string) bool {
|
|||
|
||||
// IsHTMLSuffix returns whether the given suffix is a HTML media type.
|
||||
func (t ContentTypes) IsHTMLSuffix(suffix string) bool {
|
||||
for _, s := range t.HTML.Suffixes() {
|
||||
if s == suffix {
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
return slices.Contains(t.HTML.Suffixes(), suffix)
|
||||
}
|
||||
|
||||
// Types is a slice of media types.
|
||||
|
|
|
@ -380,14 +380,12 @@ func (c *Client) Verify(clean bool) error {
|
|||
if err != nil {
|
||||
if clean {
|
||||
m := verifyErrorDirRe.FindAllStringSubmatch(err.Error(), -1)
|
||||
if m != nil {
|
||||
for i := 0; i < len(m); i++ {
|
||||
c, err := hugofs.MakeReadableAndRemoveAllModulePkgDir(c.fs, m[i][1])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fmt.Println("Cleaned", c)
|
||||
for i := range m {
|
||||
c, err := hugofs.MakeReadableAndRemoveAllModulePkgDir(c.fs, m[i][1])
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
fmt.Println("Cleaned", c)
|
||||
}
|
||||
// Try to verify it again.
|
||||
err = c.runVerify()
|
||||
|
|
|
@ -25,6 +25,7 @@ import (
|
|||
"github.com/mitchellh/mapstructure"
|
||||
|
||||
"github.com/spf13/cast"
|
||||
"slices"
|
||||
)
|
||||
|
||||
var smc = newMenuCache()
|
||||
|
@ -267,7 +268,7 @@ func (m Menu) Reverse() Menu {
|
|||
// Clone clones the menu entries.
|
||||
// This is for internal use only.
|
||||
func (m Menu) Clone() Menu {
|
||||
return append(Menu(nil), m...)
|
||||
return slices.Clone(m)
|
||||
}
|
||||
|
||||
func DecodeConfig(in any) (*config.ConfigNamespace[map[string]MenuConfig, Menus], error) {
|
||||
|
|
|
@ -14,6 +14,7 @@
|
|||
package navigation
|
||||
|
||||
import (
|
||||
"slices"
|
||||
"sync"
|
||||
)
|
||||
|
||||
|
@ -84,7 +85,7 @@ func (c *menuCache) getP(key string, apply func(m *Menu), menuLists ...Menu) (Me
|
|||
}
|
||||
|
||||
m := menuLists[0]
|
||||
menuCopy := append(Menu(nil), m...)
|
||||
menuCopy := slices.Clone(m)
|
||||
|
||||
if apply != nil {
|
||||
apply(&menuCopy)
|
||||
|
|
|
@ -23,7 +23,7 @@ import (
|
|||
|
||||
func createSortTestMenu(num int) Menu {
|
||||
menu := make(Menu, num)
|
||||
for i := 0; i < num; i++ {
|
||||
for i := range num {
|
||||
m := &MenuEntry{}
|
||||
menu[i] = m
|
||||
}
|
||||
|
@ -49,11 +49,11 @@ func TestMenuCache(t *testing.T) {
|
|||
|
||||
var testMenuSets []Menu
|
||||
|
||||
for i := 0; i < 50; i++ {
|
||||
for i := range 50 {
|
||||
testMenuSets = append(testMenuSets, createSortTestMenu(i+1))
|
||||
}
|
||||
|
||||
for j := 0; j < 100; j++ {
|
||||
for range 100 {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
|
|
|
@ -321,7 +321,7 @@ func uniqueStringsReuse(s []string) []string {
|
|||
for i, val := range s {
|
||||
var seen bool
|
||||
|
||||
for j := 0; j < i; j++ {
|
||||
for j := range i {
|
||||
if s[j] == val {
|
||||
seen = true
|
||||
break
|
||||
|
|
|
@ -99,7 +99,7 @@ func (c ReplacingJSONMarshaller) MarshalJSON() ([]byte, error) {
|
|||
|
||||
if c.OmitEmpty {
|
||||
// It's tricky to do this with a regexp, so convert it to a map, remove zero values and convert back.
|
||||
var m map[string]interface{}
|
||||
var m map[string]any
|
||||
err = json.Unmarshal(converted, &m)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
|
@ -111,9 +111,9 @@ func (c ReplacingJSONMarshaller) MarshalJSON() ([]byte, error) {
|
|||
delete(m, k)
|
||||
} else {
|
||||
switch vv := v.(type) {
|
||||
case map[string]interface{}:
|
||||
case map[string]any:
|
||||
removeZeroVAlues(vv)
|
||||
case []interface{}:
|
||||
case []any:
|
||||
for _, vvv := range vv {
|
||||
if m, ok := vvv.(map[string]any); ok {
|
||||
removeZeroVAlues(m)
|
||||
|
|
|
@ -123,7 +123,7 @@ LOOP:
|
|||
|
||||
// Handle YAML or TOML front matter.
|
||||
func (l *pageLexer) lexFrontMatterSection(tp ItemType, delimr rune, name string, delim []byte) stateFunc {
|
||||
for i := 0; i < 2; i++ {
|
||||
for range 2 {
|
||||
if r := l.next(); r != delimr {
|
||||
return l.errorf("invalid %s delimiter", name)
|
||||
}
|
||||
|
|
|
@ -192,7 +192,7 @@ func (t *Iterator) PeekWalk(walkFn func(item Item) bool) {
|
|||
// Consume is a convenience method to consume the next n tokens,
|
||||
// but back off Errors and EOF.
|
||||
func (t *Iterator) Consume(cnt int) {
|
||||
for i := 0; i < cnt; i++ {
|
||||
for range cnt {
|
||||
token := t.Next()
|
||||
if token.Type == tError || token.Type == tEOF {
|
||||
t.Backup()
|
||||
|
|
|
@ -292,7 +292,7 @@ func (r *rank) addWeight(w int) {
|
|||
}
|
||||
|
||||
var rankPool = sync.Pool{
|
||||
New: func() interface{} {
|
||||
New: func() any {
|
||||
return &rank{}
|
||||
},
|
||||
}
|
||||
|
@ -433,7 +433,7 @@ func (cfg IndexConfig) ToKeywords(v any) ([]Keyword, error) {
|
|||
keywords = append(keywords, cfg.stringToKeyword(vv))
|
||||
case []string:
|
||||
vvv := make([]Keyword, len(vv))
|
||||
for i := 0; i < len(vvv); i++ {
|
||||
for i := range vvv {
|
||||
vvv[i] = cfg.stringToKeyword(vv[i])
|
||||
}
|
||||
keywords = append(keywords, vvv...)
|
||||
|
@ -623,7 +623,7 @@ type Keyword interface {
|
|||
func (cfg IndexConfig) StringsToKeywords(s ...string) []Keyword {
|
||||
kw := make([]Keyword, len(s))
|
||||
|
||||
for i := 0; i < len(s); i++ {
|
||||
for i := range s {
|
||||
kw[i] = cfg.stringToKeyword(s[i])
|
||||
}
|
||||
|
||||
|
|
|
@ -65,7 +65,7 @@ func (d *testDoc) addKeywords(name string, keywords ...string) *testDoc {
|
|||
|
||||
for k, v := range keywordm {
|
||||
keywords := make([]Keyword, len(v))
|
||||
for i := 0; i < len(v); i++ {
|
||||
for i := range v {
|
||||
keywords[i] = StringKeyword(v[i])
|
||||
}
|
||||
d.keywords[k] = keywords
|
||||
|
@ -221,7 +221,7 @@ func TestSearch(t *testing.T) {
|
|||
doc := newTestDocWithDate("keywords", date, "a", "b")
|
||||
doc.name = "thedoc"
|
||||
|
||||
for i := 0; i < 10; i++ {
|
||||
for i := range 10 {
|
||||
docc := *doc
|
||||
docc.name = fmt.Sprintf("doc%d", i)
|
||||
idx.Add(context.Background(), &docc)
|
||||
|
@ -230,7 +230,7 @@ func TestSearch(t *testing.T) {
|
|||
m, err := idx.Search(context.Background(), SearchOpts{Document: doc, Indices: []string{"keywords"}})
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(len(m), qt.Equals, 10)
|
||||
for i := 0; i < 10; i++ {
|
||||
for i := range 10 {
|
||||
c.Assert(m[i].Name(), qt.Equals, fmt.Sprintf("doc%d", i))
|
||||
}
|
||||
})
|
||||
|
@ -311,11 +311,11 @@ func BenchmarkRelatedNewIndex(b *testing.B) {
|
|||
pages := make([]*testDoc, 100)
|
||||
numkeywords := 30
|
||||
allKeywords := make([]string, numkeywords)
|
||||
for i := 0; i < numkeywords; i++ {
|
||||
for i := range numkeywords {
|
||||
allKeywords[i] = fmt.Sprintf("keyword%d", i+1)
|
||||
}
|
||||
|
||||
for i := 0; i < len(pages); i++ {
|
||||
for i := range pages {
|
||||
start := rand.Intn(len(allKeywords))
|
||||
end := start + 3
|
||||
if end >= len(allKeywords) {
|
||||
|
@ -356,7 +356,7 @@ func BenchmarkRelatedNewIndex(b *testing.B) {
|
|||
for i := 0; i < b.N; i++ {
|
||||
idx := NewInvertedIndex(cfg)
|
||||
docs := make([]Document, len(pages))
|
||||
for i := 0; i < len(pages); i++ {
|
||||
for i := range pages {
|
||||
docs[i] = pages[i]
|
||||
}
|
||||
idx.Add(context.Background(), docs...)
|
||||
|
@ -372,7 +372,7 @@ func BenchmarkRelatedMatchesIn(b *testing.B) {
|
|||
docs := make([]*testDoc, 1000)
|
||||
numkeywords := 20
|
||||
allKeywords := make([]string, numkeywords)
|
||||
for i := 0; i < numkeywords; i++ {
|
||||
for i := range numkeywords {
|
||||
allKeywords[i] = fmt.Sprintf("keyword%d", i+1)
|
||||
}
|
||||
|
||||
|
@ -386,7 +386,7 @@ func BenchmarkRelatedMatchesIn(b *testing.B) {
|
|||
|
||||
idx := NewInvertedIndex(cfg)
|
||||
|
||||
for i := 0; i < len(docs); i++ {
|
||||
for i := range docs {
|
||||
start := rand.Intn(len(allKeywords))
|
||||
end := start + 3
|
||||
if end >= len(allKeywords) {
|
||||
|
|
|
@ -160,7 +160,7 @@ keywords: ['k%d']
|
|||
---
|
||||
`
|
||||
|
||||
for i := 0; i < 32; i++ {
|
||||
for range 32 {
|
||||
base += fmt.Sprintf("\n## Title %d", rand.Intn(100))
|
||||
}
|
||||
|
||||
|
|
|
@ -230,10 +230,10 @@ func git(args ...string) (string, error) {
|
|||
return string(out), nil
|
||||
}
|
||||
|
||||
func logf(format string, args ...interface{}) {
|
||||
func logf(format string, args ...any) {
|
||||
fmt.Fprintf(os.Stderr, format, args...)
|
||||
}
|
||||
|
||||
func logln(args ...interface{}) {
|
||||
func logln(args ...any) {
|
||||
fmt.Fprintln(os.Stderr, args...)
|
||||
}
|
||||
|
|
|
@ -348,13 +348,13 @@ func TestImageTransformConcurrent(t *testing.T) {
|
|||
|
||||
image := fetchImageForSpec(spec, c, "sunset.jpg")
|
||||
|
||||
for i := 0; i < 4; i++ {
|
||||
for i := range 4 {
|
||||
wg.Add(1)
|
||||
go func(id int) {
|
||||
defer wg.Done()
|
||||
for j := 0; j < 5; j++ {
|
||||
for j := range 5 {
|
||||
img := image
|
||||
for k := 0; k < 2; k++ {
|
||||
for k := range 2 {
|
||||
r1, err := img.Resize(fmt.Sprintf("%dx", id-k))
|
||||
if err != nil {
|
||||
t.Error(err)
|
||||
|
@ -499,7 +499,7 @@ func BenchmarkImageExif(b *testing.B) {
|
|||
|
||||
b.StartTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
for j := 0; j < 10; j++ {
|
||||
for range 10 {
|
||||
getAndCheckExif(c, images[i])
|
||||
}
|
||||
}
|
||||
|
|
|
@ -22,6 +22,7 @@ import (
|
|||
"strings"
|
||||
|
||||
"github.com/gohugoio/hugo/common/hstrings"
|
||||
"slices"
|
||||
)
|
||||
|
||||
type colorGoProvider interface {
|
||||
|
@ -91,11 +92,8 @@ func (c Color) toSRGB(i uint8) float64 {
|
|||
// that the palette is valid for the relevant format.
|
||||
func AddColorToPalette(c color.Color, p color.Palette) color.Palette {
|
||||
var found bool
|
||||
for _, cc := range p {
|
||||
if c == cc {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
if slices.Contains(p, c) {
|
||||
found = true
|
||||
}
|
||||
|
||||
if !found {
|
||||
|
|
|
@ -209,7 +209,7 @@ func goldenEqual(img1, img2 *image.NRGBA) bool {
|
|||
if len(img1.Pix) != len(img2.Pix) {
|
||||
return false
|
||||
}
|
||||
for i := 0; i < len(img1.Pix); i++ {
|
||||
for i := range img1.Pix {
|
||||
diff := int(img1.Pix[i]) - int(img2.Pix[i])
|
||||
if diff < 0 {
|
||||
diff = -diff
|
||||
|
|
|
@ -24,6 +24,7 @@ import (
|
|||
"github.com/gohugoio/hugo/hugofs/glob"
|
||||
"github.com/gohugoio/hugo/resources/kinds"
|
||||
"github.com/mitchellh/mapstructure"
|
||||
"slices"
|
||||
)
|
||||
|
||||
// A PageMatcher can be used to match a Page with Glob patterns.
|
||||
|
@ -208,13 +209,7 @@ func decodePageMatcher(m any, v *PageMatcher) error {
|
|||
v.Kind = strings.ToLower(v.Kind)
|
||||
if v.Kind != "" {
|
||||
g, _ := glob.GetGlob(v.Kind)
|
||||
found := false
|
||||
for _, k := range kinds.AllKindsInPages {
|
||||
if g.Match(k) {
|
||||
found = true
|
||||
break
|
||||
}
|
||||
}
|
||||
found := slices.ContainsFunc(kinds.AllKindsInPages, g.Match)
|
||||
if !found {
|
||||
return fmt.Errorf("%q did not match a valid Page Kind", v.Kind)
|
||||
}
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue