Compare commits

..

No commits in common. "master" and "v0.137.1" have entirely different histories.

2186 changed files with 40701 additions and 43523 deletions

View file

@ -4,7 +4,7 @@ parameters:
defaults: &defaults defaults: &defaults
resource_class: large resource_class: large
docker: docker:
- image: bepsays/ci-hugoreleaser:1.22400.20000 - image: bepsays/ci-hugoreleaser:1.22300.20200
environment: &buildenv environment: &buildenv
GOMODCACHE: /root/project/gomodcache GOMODCACHE: /root/project/gomodcache
version: 2 version: 2
@ -58,7 +58,7 @@ jobs:
environment: environment:
<<: [*buildenv] <<: [*buildenv]
docker: docker:
- image: bepsays/ci-hugoreleaser-linux-arm64:1.22400.20000 - image: bepsays/ci-hugoreleaser-linux-arm64:1.22300.20200
steps: steps:
- *restore-cache - *restore-cache
- &attach-workspace - &attach-workspace

View file

@ -46,4 +46,3 @@ jobs:
platforms: linux/amd64,linux/arm64 platforms: linux/amd64,linux/arm64
tags: ${{ steps.meta.outputs.tags }} tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }} labels: ${{ steps.meta.outputs.labels }}
build-args: HUGO_BUILD_TAGS=extended,withdeploy

View file

@ -16,7 +16,7 @@ jobs:
test: test:
strategy: strategy:
matrix: matrix:
go-version: [1.23.x, 1.24.x] go-version: [1.22.x, 1.23.x]
os: [ubuntu-latest, windows-latest] # macos disabled for now because of disk space issues. os: [ubuntu-latest, windows-latest] # macos disabled for now because of disk space issues.
runs-on: ${{ matrix.os }} runs-on: ${{ matrix.os }}
steps: steps:

1
.gitignore vendored
View file

@ -3,4 +3,3 @@
imports.* imports.*
dist/ dist/
public/ public/
.DS_Store

View file

@ -1,4 +1,4 @@
>**Note:** We would appreciate if you hold on with any big refactoring (like renaming deprecated Go packages), mainly because of potential for extra merge work for future coming in in the near future. >**Note:** We would apprecitate if you hold on with any big refactorings (like renaming deprecated Go packages), mainly because of potential for extra merge work for future coming in in the near future.
# Contributing to Hugo # Contributing to Hugo
@ -93,7 +93,6 @@ Most title/subjects should have a lower-cased prefix with a colon and one whites
* If this commit touches many packages without a common functional topic, prefix with `all:` (e.g. `all: Reformat Go code`) * If this commit touches many packages without a common functional topic, prefix with `all:` (e.g. `all: Reformat Go code`)
* If this is a documentation update, prefix with `docs:`. * If this is a documentation update, prefix with `docs:`.
* If nothing of the above applies, just leave the prefix out. * If nothing of the above applies, just leave the prefix out.
* Note that the above excludes nouns seen in other repositories, e.g. "chore:".
Also, if your commit references one or more GitHub issues, always end your commit message body with *See #1234* or *Fixes #1234*. Also, if your commit references one or more GitHub issues, always end your commit message body with *See #1234* or *Fixes #1234*.
Replace *1234* with the GitHub issue ID. The last example will close the issue when the commit is merged into *master*. Replace *1234* with the GitHub issue ID. The last example will close the issue when the commit is merged into *master*.

View file

@ -2,8 +2,8 @@
# Twitter: https://twitter.com/gohugoio # Twitter: https://twitter.com/gohugoio
# Website: https://gohugo.io/ # Website: https://gohugo.io/
ARG GO_VERSION="1.24" ARG GO_VERSION="1.23.2"
ARG ALPINE_VERSION="3.22" ARG ALPINE_VERSION="3.20"
ARG DART_SASS_VERSION="1.79.3" ARG DART_SASS_VERSION="1.79.3"
FROM --platform=$BUILDPLATFORM tonistiigi/xx:1.5.0 AS xx FROM --platform=$BUILDPLATFORM tonistiigi/xx:1.5.0 AS xx

201
README.md
View file

@ -34,7 +34,6 @@ A fast and flexible static site generator built with love by [bep], [spf13], and
[![Go Report Card](https://goreportcard.com/badge/github.com/gohugoio/hugo)](https://goreportcard.com/report/github.com/gohugoio/hugo) [![Go Report Card](https://goreportcard.com/badge/github.com/gohugoio/hugo)](https://goreportcard.com/report/github.com/gohugoio/hugo)
[Website] | [Installation] | [Documentation] | [Support] | [Contributing] | <a rel="me" href="https://fosstodon.org/@gohugoio">Mastodon</a> [Website] | [Installation] | [Documentation] | [Support] | [Contributing] | <a rel="me" href="https://fosstodon.org/@gohugoio">Mastodon</a>
## Overview ## Overview
Hugo is a [static site generator] written in [Go], optimized for speed and designed for flexibility. With its advanced templating system and fast asset pipelines, Hugo renders a complete site in seconds, often less. Hugo is a [static site generator] written in [Go], optimized for speed and designed for flexibility. With its advanced templating system and fast asset pipelines, Hugo renders a complete site in seconds, often less.
@ -65,30 +64,13 @@ See the [features] section of the documentation for a comprehensive summary of H
<p>&nbsp;</p> <p>&nbsp;</p>
<p float="left"> <p float="left">
<a href="https://www.linode.com/?utm_campaign=hugosponsor&utm_medium=banner&utm_source=hugogithub" target="_blank"><img src="https://raw.githubusercontent.com/gohugoio/hugoDocs/master/assets/images/sponsors/linode-logo_standard_light_medium.png" width="200" alt="Linode"></a> <a href="https://www.linode.com/?utm_campaign=hugosponsor&utm_medium=banner&utm_source=hugogithub" target="_blank"><img src="https://raw.githubusercontent.com/gohugoio/gohugoioTheme/master/assets/images/sponsors/linode-logo_standard_light_medium.png" width="200" alt="Linode"></a>
&nbsp;&nbsp;&nbsp; &nbsp;&nbsp;&nbsp;
<a href="https://www.jetbrains.com/go/?utm_source=OSS&utm_medium=referral&utm_campaign=hugo" target="_blank"><img src="https://raw.githubusercontent.com/gohugoio/hugoDocs/master/assets/images/sponsors/goland.svg" width="200" alt="The complete IDE crafted for professional Go developers."></a> <a href="https://route4me.com/" target="_blank"><img src="https://raw.githubusercontent.com/gohugoio/gohugoioTheme/master/assets/images/sponsors/Route4MeLogoBlueOnWhite.svg" width="200" alt="Route Planning & Route Optimization Software"></a>
&nbsp;&nbsp;&nbsp; &nbsp;&nbsp;&nbsp;
<a href="https://pinme.eth.limo/?s=hugo" target="_blank"><img src="https://raw.githubusercontent.com/gohugoio/hugoDocs/master/assets/images/sponsors/logo-pinme.svg" width="200" alt="PinMe."></a> <a href="https://www.jetbrains.com/go/?utm_source=OSS&utm_medium=referral&utm_campaign=hugo" target="_blank"><img src="https://raw.githubusercontent.com/gohugoio/gohugoioTheme/master/assets/images/sponsors/goland.svg" width="200" alt="The complete IDE crafted for professional Go developers."></a>
</p> </p>
## Editions
Hugo is available in three editions: standard, extended, and extended/deploy. While the standard edition provides core functionality, the extended and extended/deploy editions offer advanced features.
Feature|extended edition|extended/deploy edition
:--|:-:|:-:
Encode to the WebP format when [processing images]. You can decode WebP images with any edition.|:heavy_check_mark:|:heavy_check_mark:
[Transpile Sass to CSS] using the embedded LibSass transpiler. You can use the [Dart Sass] transpiler with any edition.|:heavy_check_mark:|:heavy_check_mark:
Deploy your site directly to a Google Cloud Storage bucket, an AWS S3 bucket, or an Azure Storage container. See&nbsp;[details].|:x:|:heavy_check_mark:
[dart sass]: https://gohugo.io/functions/css/sass/#dart-sass
[processing images]: https://gohugo.io/content-management/image-processing/
[transpile sass to css]: https://gohugo.io/functions/css/sass/
[details]: https://gohugo.io/hosting-and-deployment/hugo-deploy/
Unless your specific deployment needs require the extended/deploy edition, we recommend the extended edition.
## Installation ## Installation
Install Hugo from a [prebuilt binary], package manager, or package repository. Please see the installation instructions for your operating system: Install Hugo from a [prebuilt binary], package manager, or package repository. Please see the installation instructions for your operating system:
@ -100,11 +82,15 @@ Install Hugo from a [prebuilt binary], package manager, or package repository. P
## Build from source ## Build from source
Hugo is available in two editions: standard and extended. With the extended edition you can:
- Encode to the WebP format when processing images. You can decode WebP images with either edition.
- Transpile Sass to CSS using the embedded LibSass transpiler. The extended edition is not required to use the Dart Sass transpiler.
Prerequisites to build Hugo from source: Prerequisites to build Hugo from source:
- Standard edition: Go 1.23.0 or later - Standard edition: Go 1.20 or later
- Extended edition: Go 1.23.0 or later, and GCC - Extended edition: Go 1.20 or later, and GCC
- Extended/deploy edition: Go 1.23.0 or later, and GCC
Build the standard edition: Build the standard edition:
@ -117,13 +103,6 @@ Build the extended edition:
```text ```text
CGO_ENABLED=1 go install -tags extended github.com/gohugoio/hugo@latest CGO_ENABLED=1 go install -tags extended github.com/gohugoio/hugo@latest
``` ```
Build the extended/deploy edition:
```text
CGO_ENABLED=1 go install -tags extended,withdeploy github.com/gohugoio/hugo@latest
```
## Star History ## Star History
[![Star History Chart](https://api.star-history.com/svg?repos=gohugoio/hugo&type=Timeline)](https://star-history.com/#gohugoio/hugo&Timeline) [![Star History Chart](https://api.star-history.com/svg?repos=gohugoio/hugo&type=Timeline)](https://star-history.com/#gohugoio/hugo&Timeline)
@ -170,113 +149,153 @@ Hugo stands on the shoulders of great open source libraries. Run `hugo env --log
<summary>See current dependencies</summary> <summary>See current dependencies</summary>
```text ```text
cloud.google.com/go/compute/metadata="v0.2.3"
cloud.google.com/go/iam="v1.1.5"
cloud.google.com/go/storage="v1.35.1"
cloud.google.com/go="v0.110.10"
github.com/Azure/azure-sdk-for-go/sdk/azcore="v1.9.0"
github.com/Azure/azure-sdk-for-go/sdk/azidentity="v1.4.0"
github.com/Azure/azure-sdk-for-go/sdk/internal="v1.5.0"
github.com/Azure/azure-sdk-for-go/sdk/storage/azblob="v1.2.0"
github.com/Azure/go-autorest/autorest/to="v0.4.0"
github.com/AzureAD/microsoft-authentication-library-for-go="v1.2.0"
github.com/BurntSushi/locker="v0.0.0-20171006230638-a6e239ea1c69" github.com/BurntSushi/locker="v0.0.0-20171006230638-a6e239ea1c69"
github.com/PuerkitoBio/goquery="v1.10.1" github.com/alecthomas/chroma/v2="v2.14.0"
github.com/alecthomas/chroma/v2="v2.15.0"
github.com/andybalholm/cascadia="v1.3.3"
github.com/armon/go-radix="v1.0.1-0.20221118154546-54df44f2176c" github.com/armon/go-radix="v1.0.1-0.20221118154546-54df44f2176c"
github.com/aws/aws-sdk-go-v2/aws/protocol/eventstream="v1.5.4"
github.com/aws/aws-sdk-go-v2/config="v1.26.1"
github.com/aws/aws-sdk-go-v2/credentials="v1.16.12"
github.com/aws/aws-sdk-go-v2/feature/ec2/imds="v1.14.10"
github.com/aws/aws-sdk-go-v2/feature/s3/manager="v1.15.7"
github.com/aws/aws-sdk-go-v2/internal/configsources="v1.3.5"
github.com/aws/aws-sdk-go-v2/internal/endpoints/v2="v2.6.5"
github.com/aws/aws-sdk-go-v2/internal/ini="v1.7.2"
github.com/aws/aws-sdk-go-v2/internal/v4a="v1.2.9"
github.com/aws/aws-sdk-go-v2/service/cloudfront="v1.35.4"
github.com/aws/aws-sdk-go-v2/service/internal/accept-encoding="v1.10.4"
github.com/aws/aws-sdk-go-v2/service/internal/checksum="v1.2.9"
github.com/aws/aws-sdk-go-v2/service/internal/presigned-url="v1.10.9"
github.com/aws/aws-sdk-go-v2/service/internal/s3shared="v1.16.9"
github.com/aws/aws-sdk-go-v2/service/s3="v1.47.5"
github.com/aws/aws-sdk-go-v2/service/sso="v1.18.5"
github.com/aws/aws-sdk-go-v2/service/ssooidc="v1.21.5"
github.com/aws/aws-sdk-go-v2/service/sts="v1.26.5"
github.com/aws/aws-sdk-go-v2="v1.26.1"
github.com/aws/aws-sdk-go="v1.50.7"
github.com/aws/smithy-go="v1.20.2"
github.com/bep/clocks="v0.5.0" github.com/bep/clocks="v0.5.0"
github.com/bep/debounce="v1.2.0" github.com/bep/debounce="v1.2.0"
github.com/bep/gitmap="v1.6.0" github.com/bep/gitmap="v1.1.2"
github.com/bep/goat="v0.5.0" github.com/bep/goat="v0.5.0"
github.com/bep/godartsass/v2="v2.3.2" github.com/bep/godartsass/v2="v2.0.0"
github.com/bep/golibsass="v1.2.0" github.com/bep/godartsass="v1.2.0"
github.com/bep/golibsass="v1.1.1"
github.com/bep/gowebp="v0.3.0" github.com/bep/gowebp="v0.3.0"
github.com/bep/imagemeta="v0.8.4" github.com/bep/lazycache="v0.4.0"
github.com/bep/lazycache="v0.7.0"
github.com/bep/logg="v0.4.0" github.com/bep/logg="v0.4.0"
github.com/bep/mclib="v1.20400.20402" github.com/bep/mclib="v1.20400.20402"
github.com/bep/overlayfs="v0.9.2" github.com/bep/overlayfs="v0.9.2"
github.com/bep/simplecobra="v0.5.0" github.com/bep/simplecobra="v0.4.0"
github.com/bep/tmc="v0.5.1" github.com/bep/tmc="v0.5.1"
github.com/cespare/xxhash/v2="v2.3.0"
github.com/clbanning/mxj/v2="v2.7.0" github.com/clbanning/mxj/v2="v2.7.0"
github.com/cpuguy83/go-md2man/v2="v2.0.4" github.com/cli/safeexec="v1.0.1"
github.com/cpuguy83/go-md2man/v2="v2.0.3"
github.com/disintegration/gift="v1.2.1" github.com/disintegration/gift="v1.2.1"
github.com/dlclark/regexp2="v1.11.5" github.com/dlclark/regexp2="v1.11.0"
github.com/dop251/goja="v0.0.0-20250125213203-5ef83b82af17" github.com/dustin/go-humanize="v1.0.1"
github.com/evanw/esbuild="v0.24.2" github.com/evanw/esbuild="v0.21.4"
github.com/fatih/color="v1.18.0" github.com/fatih/color="v1.16.0"
github.com/frankban/quicktest="v1.14.6" github.com/frankban/quicktest="v1.14.6"
github.com/fsnotify/fsnotify="v1.8.0" github.com/fsnotify/fsnotify="v1.7.0"
github.com/getkin/kin-openapi="v0.129.0" github.com/getkin/kin-openapi="v0.123.0"
github.com/ghodss/yaml="v1.0.0" github.com/ghodss/yaml="v1.0.0"
github.com/go-openapi/jsonpointer="v0.21.0" github.com/go-openapi/jsonpointer="v0.20.2"
github.com/go-openapi/swag="v0.23.0" github.com/go-openapi/swag="v0.22.8"
github.com/go-sourcemap/sourcemap="v2.1.4+incompatible" github.com/gobuffalo/flect="v1.0.2"
github.com/gobuffalo/flect="v1.0.3"
github.com/gobwas/glob="v0.2.3" github.com/gobwas/glob="v0.2.3"
github.com/gohugoio/go-i18n/v2="v2.1.3-0.20230805085216-e63c13218d0e" github.com/gohugoio/go-i18n/v2="v2.1.3-0.20230805085216-e63c13218d0e"
github.com/gohugoio/hashstructure="v0.5.0"
github.com/gohugoio/httpcache="v0.7.0" github.com/gohugoio/httpcache="v0.7.0"
github.com/gohugoio/hugo-goldmark-extensions/extras="v0.2.0" github.com/gohugoio/hugo-goldmark-extensions/extras="v0.2.0"
github.com/gohugoio/hugo-goldmark-extensions/passthrough="v0.3.0" github.com/gohugoio/hugo-goldmark-extensions/passthrough="v0.2.0"
github.com/gohugoio/locales="v0.14.0" github.com/gohugoio/locales="v0.14.0"
github.com/gohugoio/localescompressed="v1.0.1" github.com/gohugoio/localescompressed="v1.0.1"
github.com/golang/freetype="v0.0.0-20170609003504-e2365dfdc4a0" github.com/golang-jwt/jwt/v5="v5.1.0"
github.com/golang/groupcache="v0.0.0-20210331224755-41bb18bfe9da"
github.com/golang/protobuf="v1.5.3"
github.com/google/go-cmp="v0.6.0" github.com/google/go-cmp="v0.6.0"
github.com/google/pprof="v0.0.0-20250208200701-d0013a598941" github.com/google/s2a-go="v0.1.7"
github.com/gorilla/websocket="v1.5.3" github.com/google/uuid="v1.4.0"
github.com/hairyhenderson/go-codeowners="v0.7.0" github.com/google/wire="v0.5.0"
github.com/googleapis/enterprise-certificate-proxy="v0.3.2"
github.com/googleapis/gax-go/v2="v2.12.0"
github.com/gorilla/websocket="v1.5.1"
github.com/hairyhenderson/go-codeowners="v0.4.0"
github.com/hashicorp/golang-lru/v2="v2.0.7" github.com/hashicorp/golang-lru/v2="v2.0.7"
github.com/invopop/yaml="v0.2.0"
github.com/jdkato/prose="v1.2.1" github.com/jdkato/prose="v1.2.1"
github.com/jmespath/go-jmespath="v0.4.0"
github.com/josharian/intern="v1.0.0" github.com/josharian/intern="v1.0.0"
github.com/kr/pretty="v0.3.1" github.com/kr/pretty="v0.3.1"
github.com/kr/text="v0.2.0" github.com/kr/text="v0.2.0"
github.com/kyokomi/emoji/v2="v2.2.13" github.com/kylelemons/godebug="v1.1.0"
github.com/lucasb-eyer/go-colorful="v1.2.0" github.com/kyokomi/emoji/v2="v2.2.12"
github.com/mailru/easyjson="v0.7.7" github.com/mailru/easyjson="v0.7.7"
github.com/makeworld-the-better-one/dither/v2="v2.4.0" github.com/makeworld-the-better-one/dither/v2="v2.4.0"
github.com/marekm4/color-extractor="v1.2.1" github.com/marekm4/color-extractor="v1.2.1"
github.com/mattn/go-colorable="v0.1.13" github.com/mattn/go-colorable="v0.1.13"
github.com/mattn/go-isatty="v0.0.20" github.com/mattn/go-isatty="v0.0.20"
github.com/mattn/go-runewidth="v0.0.9" github.com/mattn/go-runewidth="v0.0.9"
github.com/mazznoer/csscolorparser="v0.1.5" github.com/mitchellh/hashstructure="v1.1.0"
github.com/mitchellh/mapstructure="v1.5.1-0.20231216201459-8508981c8b6c" github.com/mitchellh/mapstructure="v1.5.1-0.20231216201459-8508981c8b6c"
github.com/mohae/deepcopy="v0.0.0-20170929034955-c48cc78d4826" github.com/mohae/deepcopy="v0.0.0-20170929034955-c48cc78d4826"
github.com/muesli/smartcrop="v0.3.0" github.com/muesli/smartcrop="v0.3.0"
github.com/niklasfasching/go-org="v1.7.0" github.com/niklasfasching/go-org="v1.7.0"
github.com/oasdiff/yaml3="v0.0.0-20241210130736-a94c01f36349"
github.com/oasdiff/yaml="v0.0.0-20241210131133-6b86fb107d80"
github.com/olekukonko/tablewriter="v0.0.5" github.com/olekukonko/tablewriter="v0.0.5"
github.com/pbnjay/memory="v0.0.0-20210728143218-7b4eea64cf58" github.com/pbnjay/memory="v0.0.0-20210728143218-7b4eea64cf58"
github.com/pelletier/go-toml/v2="v2.2.3" github.com/pelletier/go-toml/v2="v2.2.2"
github.com/perimeterx/marshmallow="v1.1.5" github.com/perimeterx/marshmallow="v1.1.5"
github.com/pkg/browser="v0.0.0-20240102092130-5ac0b6a4141c" github.com/pkg/browser="v0.0.0-20210911075715-681adbf594b8"
github.com/pkg/errors="v0.9.1" github.com/pkg/errors="v0.9.1"
github.com/rivo/uniseg="v0.4.7" github.com/rogpeppe/go-internal="v1.12.0"
github.com/rogpeppe/go-internal="v1.13.1"
github.com/russross/blackfriday/v2="v2.1.0" github.com/russross/blackfriday/v2="v2.1.0"
github.com/sass/libsass="3.6.6" github.com/rwcarlsen/goexif="v0.0.0-20190401172101-9e8deecbddbd"
github.com/sass/dart-sass/compiler="1.77.5"
github.com/sass/dart-sass/implementation="1.77.5"
github.com/sass/dart-sass/protocol="2.7.1"
github.com/sass/libsass="3.6.5"
github.com/spf13/afero="v1.11.0" github.com/spf13/afero="v1.11.0"
github.com/spf13/cast="v1.7.1" github.com/spf13/cast="v1.6.0"
github.com/spf13/cobra="v1.8.1" github.com/spf13/cobra="v1.8.0"
github.com/spf13/fsync="v0.10.1" github.com/spf13/fsync="v0.10.1"
github.com/spf13/pflag="v1.0.6" github.com/spf13/pflag="v1.0.5"
github.com/tdewolff/minify/v2="v2.20.37" github.com/tdewolff/minify/v2="v2.20.20"
github.com/tdewolff/parse/v2="v2.7.15" github.com/tdewolff/parse/v2="v2.7.13"
github.com/tetratelabs/wazero="v1.8.2"
github.com/webmproject/libwebp="v1.3.2" github.com/webmproject/libwebp="v1.3.2"
github.com/yuin/goldmark-emoji="v1.0.4" github.com/yuin/goldmark-emoji="v1.0.3"
github.com/yuin/goldmark="v1.7.8" github.com/yuin/goldmark="v1.7.4"
go.opencensus.io="v0.24.0"
go.uber.org/automaxprocs="v1.5.3" go.uber.org/automaxprocs="v1.5.3"
golang.org/x/crypto="v0.33.0" gocloud.dev="v0.36.0"
golang.org/x/exp="v0.0.0-20250210185358-939b2ce775ac" golang.org/x/crypto="v0.23.0"
golang.org/x/image="v0.24.0" golang.org/x/exp="v0.0.0-20221031165847-c99f073a8326"
golang.org/x/mod="v0.23.0" golang.org/x/image="v0.16.0"
golang.org/x/net="v0.35.0" golang.org/x/mod="v0.17.0"
golang.org/x/sync="v0.11.0" golang.org/x/net="v0.25.0"
golang.org/x/sys="v0.30.0" golang.org/x/oauth2="v0.15.0"
golang.org/x/text="v0.22.0" golang.org/x/sync="v0.7.0"
golang.org/x/tools="v0.30.0" golang.org/x/sys="v0.20.0"
golang.org/x/xerrors="v0.0.0-20240903120638-7835f813f4da" golang.org/x/text="v0.15.0"
gonum.org/v1/plot="v0.15.0" golang.org/x/time="v0.5.0"
google.golang.org/protobuf="v1.36.5" golang.org/x/tools="v0.20.0"
golang.org/x/xerrors="v0.0.0-20231012003039-104605ab7028"
google.golang.org/api="v0.152.0"
google.golang.org/genproto/googleapis/api="v0.0.0-20231120223509-83a465c0220f"
google.golang.org/genproto/googleapis/rpc="v0.0.0-20231120223509-83a465c0220f"
google.golang.org/genproto="v0.0.0-20231120223509-83a465c0220f"
google.golang.org/grpc="v1.59.0"
google.golang.org/protobuf="v1.33.0"
gopkg.in/yaml.v2="v2.4.0" gopkg.in/yaml.v2="v2.4.0"
gopkg.in/yaml.v3="v3.0.1" gopkg.in/yaml.v3="v3.0.1"
oss.terrastruct.com/d2="v0.6.9"
oss.terrastruct.com/util-go="v0.0.0-20241005222610-44c011a04896"
rsc.io/qr="v0.2.0"
software.sslmate.com/src/go-pkcs12="v0.2.0" software.sslmate.com/src/go-pkcs12="v0.2.0"
``` ```
</details> </details>

37
bench.sh Executable file
View file

@ -0,0 +1,37 @@
#!/usr/bin/env bash
# allow user to override go executable by running as GOEXE=xxx make ...
GOEXE="${GOEXE-go}"
# Convenience script to
# - For a given branch
# - Run benchmark tests for a given package
# - Do the same for master
# - then compare the two runs with benchcmp
benchFilter=".*"
if (( $# < 2 ));
then
echo "USAGE: ./bench.sh <git-branch> <package-to-bench> (and <benchmark filter> (regexp, optional))"
exit 1
fi
if [ $# -eq 3 ]; then
benchFilter=$3
fi
BRANCH=$1
PACKAGE=$2
git checkout $BRANCH
"${GOEXE}" test -test.run=NONE -bench="$benchFilter" -test.benchmem=true ./$PACKAGE > /tmp/bench-$PACKAGE-$BRANCH.txt
git checkout master
"${GOEXE}" test -test.run=NONE -bench="$benchFilter" -test.benchmem=true ./$PACKAGE > /tmp/bench-$PACKAGE-master.txt
benchcmp /tmp/bench-$PACKAGE-master.txt /tmp/bench-$PACKAGE-$BRANCH.txt

12
benchSite.sh Executable file
View file

@ -0,0 +1,12 @@
#!/bin/bash
# allow user to override go executable by running as GOEXE=xxx make ...
GOEXE="${GOEXE-go}"
# Send in a regexp matching the benchmarks you want to run, i.e. './benchSite.sh "YAML"'.
# Note the quotes, which will be needed for more complex expressions.
# The above will run all variations, but only for front matter YAML.
echo "Running with BenchmarkSiteBuilding/${1}"
"${GOEXE}" test -run="NONE" -bench="BenchmarkSiteBuilding/${1}" -test.benchmem=true ./hugolib -memprofile mem.prof -count 3 -cpuprofile cpu.prof

1
benchbep.sh Executable file
View file

@ -0,0 +1 @@
gobench -package=./hugolib -bench="BenchmarkSiteNew/Deep_content_tree"

1
bepdock.sh Executable file
View file

@ -0,0 +1 @@
docker run --rm --mount type=bind,source="$(pwd)",target=/hugo -w /hugo -i -t bepsays/ci-goreleaser:1.11-2 /bin/bash

View file

@ -176,12 +176,11 @@ func (c *Cache) ClearMatching(predicatePartition func(k string, p PartitionManag
} }
// ClearOnRebuild prepares the cache for a new rebuild taking the given changeset into account. // ClearOnRebuild prepares the cache for a new rebuild taking the given changeset into account.
// predicate is optional and will clear any entry for which it returns true. func (c *Cache) ClearOnRebuild(changeset ...identity.Identity) {
func (c *Cache) ClearOnRebuild(predicate func(k, v any) bool, changeset ...identity.Identity) {
g := rungroup.Run[PartitionManager](context.Background(), rungroup.Config[PartitionManager]{ g := rungroup.Run[PartitionManager](context.Background(), rungroup.Config[PartitionManager]{
NumWorkers: len(c.partitions), NumWorkers: len(c.partitions),
Handle: func(ctx context.Context, partition PartitionManager) error { Handle: func(ctx context.Context, partition PartitionManager) error {
partition.clearOnRebuild(predicate, changeset...) partition.clearOnRebuild(changeset...)
return nil return nil
}, },
}) })
@ -480,12 +479,7 @@ func (p *Partition[K, V]) clearMatching(predicate func(k, v any) bool) {
}) })
} }
func (p *Partition[K, V]) clearOnRebuild(predicate func(k, v any) bool, changeset ...identity.Identity) { func (p *Partition[K, V]) clearOnRebuild(changeset ...identity.Identity) {
if predicate == nil {
predicate = func(k, v any) bool {
return false
}
}
opts := p.getOptions() opts := p.getOptions()
if opts.ClearWhen == ClearNever { if opts.ClearWhen == ClearNever {
return return
@ -531,7 +525,7 @@ func (p *Partition[K, V]) clearOnRebuild(predicate func(k, v any) bool, changese
// Second pass needs to be done in a separate loop to catch any // Second pass needs to be done in a separate loop to catch any
// elements marked as stale in the other partitions. // elements marked as stale in the other partitions.
p.c.DeleteFunc(func(key K, v V) bool { p.c.DeleteFunc(func(key K, v V) bool {
if predicate(key, v) || shouldDelete(key, v) { if shouldDelete(key, v) {
p.trace.Log( p.trace.Log(
logg.StringFunc( logg.StringFunc(
func() string { func() string {
@ -607,7 +601,7 @@ type PartitionManager interface {
adjustMaxSize(addend int) int adjustMaxSize(addend int) int
getMaxSize() int getMaxSize() int
getOptions() OptionsPartition getOptions() OptionsPartition
clearOnRebuild(predicate func(k, v any) bool, changeset ...identity.Identity) clearOnRebuild(changeset ...identity.Identity)
clearMatching(predicate func(k, v any) bool) clearMatching(predicate func(k, v any) bool)
clearStale() clearStale()
} }

View file

@ -147,13 +147,13 @@ func TestClear(t *testing.T) {
c.Assert(cache.Keys(predicateAll), qt.HasLen, 4) c.Assert(cache.Keys(predicateAll), qt.HasLen, 4)
cache.ClearOnRebuild(nil) cache.ClearOnRebuild()
// Stale items are always cleared. // Stale items are always cleared.
c.Assert(cache.Keys(predicateAll), qt.HasLen, 2) c.Assert(cache.Keys(predicateAll), qt.HasLen, 2)
cache = newTestCache(t) cache = newTestCache(t)
cache.ClearOnRebuild(nil, identity.StringIdentity("changed")) cache.ClearOnRebuild(identity.StringIdentity("changed"))
c.Assert(cache.Keys(nil), qt.HasLen, 1) c.Assert(cache.Keys(nil), qt.HasLen, 1)
@ -191,16 +191,16 @@ func TestPanicInCreate(t *testing.T) {
return err return err
} }
for i := range 3 { for i := 0; i < 3; i++ {
for range 3 { for j := 0; j < 3; j++ {
c.Assert(willPanic(i), qt.PanicMatches, fmt.Sprintf("panic-%d", i)) c.Assert(willPanic(i), qt.PanicMatches, fmt.Sprintf("panic-%d", i))
c.Assert(willErr(i), qt.ErrorMatches, fmt.Sprintf("error-%d", i)) c.Assert(willErr(i), qt.ErrorMatches, fmt.Sprintf("error-%d", i))
} }
} }
// Test the same keys again without the panic. // Test the same keys again without the panic.
for i := range 3 { for i := 0; i < 3; i++ {
for range 3 { for j := 0; j < 3; j++ {
v, err := p1.GetOrCreate(fmt.Sprintf("panic-%d", i), func(key string) (testItem, error) { v, err := p1.GetOrCreate(fmt.Sprintf("panic-%d", i), func(key string) (testItem, error) {
return testItem{ return testItem{
name: key, name: key,

View file

@ -59,7 +59,7 @@ dir = ":resourceDir/_gen"
caches, err := filecache.NewCaches(p) caches, err := filecache.NewCaches(p)
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
cache := caches[name] cache := caches[name]
for i := range 10 { for i := 0; i < 10; i++ {
id := fmt.Sprintf("i%d", i) id := fmt.Sprintf("i%d", i)
cache.GetOrCreateBytes(id, func() ([]byte, error) { cache.GetOrCreateBytes(id, func() ([]byte, error) {
return []byte("abc"), nil return []byte("abc"), nil
@ -74,7 +74,7 @@ dir = ":resourceDir/_gen"
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
c.Assert(count, qt.Equals, 5, msg) c.Assert(count, qt.Equals, 5, msg)
for i := range 10 { for i := 0; i < 10; i++ {
id := fmt.Sprintf("i%d", i) id := fmt.Sprintf("i%d", i)
v := cache.GetString(id) v := cache.GetString(id)
if i < 5 { if i < 5 {
@ -97,7 +97,7 @@ dir = ":resourceDir/_gen"
c.Assert(count, qt.Equals, 4) c.Assert(count, qt.Equals, 4)
// Now only the i5 should be left. // Now only the i5 should be left.
for i := range 10 { for i := 0; i < 10; i++ {
id := fmt.Sprintf("i%d", i) id := fmt.Sprintf("i%d", i)
v := cache.GetString(id) v := cache.GetString(id)
if i != 5 { if i != 5 {

View file

@ -105,7 +105,7 @@ dir = ":cacheDir/c"
} }
for _, ca := range []*filecache.Cache{caches.ImageCache(), caches.AssetsCache(), caches.GetJSONCache(), caches.GetCSVCache()} { for _, ca := range []*filecache.Cache{caches.ImageCache(), caches.AssetsCache(), caches.GetJSONCache(), caches.GetCSVCache()} {
for range 2 { for i := 0; i < 2; i++ {
info, r, err := ca.GetOrCreate("a", rf("abc")) info, r, err := ca.GetOrCreate("a", rf("abc"))
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
c.Assert(r, qt.Not(qt.IsNil)) c.Assert(r, qt.Not(qt.IsNil))
@ -193,11 +193,11 @@ dir = "/cache/c"
var wg sync.WaitGroup var wg sync.WaitGroup
for i := range 50 { for i := 0; i < 50; i++ {
wg.Add(1) wg.Add(1)
go func(i int) { go func(i int) {
defer wg.Done() defer wg.Done()
for range 20 { for j := 0; j < 20; j++ {
ca := caches.Get(cacheName) ca := caches.Get(cacheName)
c.Assert(ca, qt.Not(qt.IsNil)) c.Assert(ca, qt.Not(qt.IsNil))
filename, data := filenameData(i) filename, data := filenameData(i)

View file

@ -42,7 +42,7 @@ var DefaultConfig = Config{
// Config holds the configuration for the HTTP cache. // Config holds the configuration for the HTTP cache.
type Config struct { type Config struct {
// Configures the HTTP cache behavior (RFC 9111). // Configures the HTTP cache behaviour (RFC 9111).
// When this is not enabled for a resource, Hugo will go straight to the file cache. // When this is not enabled for a resource, Hugo will go straight to the file cache.
Cache Cache Cache Cache
@ -52,7 +52,7 @@ type Config struct {
} }
type Cache struct { type Cache struct {
// Enable HTTP cache behavior (RFC 9111) for these resources. // Enable HTTP cache behaviour (RFC 9111) for these rsources.
For GlobMatcher For GlobMatcher
} }
@ -122,10 +122,6 @@ type GlobMatcher struct {
Includes []string Includes []string
} }
func (gm GlobMatcher) IsZero() bool {
return len(gm.Includes) == 0 && len(gm.Excludes) == 0
}
type ConfigCompiled struct { type ConfigCompiled struct {
For predicate.P[string] For predicate.P[string]
PollConfigs []PollConfigCompiled PollConfigs []PollConfigCompiled
@ -159,9 +155,6 @@ func (p PollConfigCompiled) IsZero() bool {
} }
func (gm *GlobMatcher) CompilePredicate() (func(string) bool, error) { func (gm *GlobMatcher) CompilePredicate() (func(string) bool, error) {
if gm.IsZero() {
panic("no includes or excludes")
}
var p predicate.P[string] var p predicate.P[string]
for _, include := range gm.Includes { for _, include := range gm.Includes {
g, err := glob.Compile(include, '/') g, err := glob.Compile(include, '/')
@ -188,7 +181,7 @@ func (gm *GlobMatcher) CompilePredicate() (func(string) bool, error) {
return p, nil return p, nil
} }
func DecodeConfig(_ config.BaseConfig, m map[string]any) (Config, error) { func DecodeConfig(bcfg config.BaseConfig, m map[string]any) (Config, error) {
if len(m) == 0 { if len(m) == 0 {
return DefaultConfig, nil return DefaultConfig, nil
} }
@ -210,20 +203,5 @@ func DecodeConfig(_ config.BaseConfig, m map[string]any) (Config, error) {
return c, err return c, err
} }
if c.Cache.For.IsZero() {
c.Cache.For = DefaultConfig.Cache.For
}
for pci := range c.Polls {
if c.Polls[pci].For.IsZero() {
c.Polls[pci].For = DefaultConfig.Cache.For
c.Polls[pci].Disable = true
}
}
if len(c.Polls) == 0 {
c.Polls = DefaultConfig.Polls
}
return c, nil return c, nil
} }

View file

@ -22,8 +22,6 @@ import (
) )
func TestConfigCustom(t *testing.T) { func TestConfigCustom(t *testing.T) {
t.Parallel()
files := ` files := `
-- hugo.toml -- -- hugo.toml --
[httpcache] [httpcache]
@ -53,8 +51,6 @@ includes = ["**gohugo.io**"]
} }
func TestConfigDefault(t *testing.T) { func TestConfigDefault(t *testing.T) {
t.Parallel()
files := ` files := `
-- hugo.toml -- -- hugo.toml --
` `
@ -66,30 +62,3 @@ func TestConfigDefault(t *testing.T) {
b.Assert(compiled.For("https://gohugo.io/foo.jpg"), qt.IsFalse) b.Assert(compiled.For("https://gohugo.io/foo.jpg"), qt.IsFalse)
b.Assert(compiled.PollConfigFor("https://gohugo.io/foo.jpg").Config.Disable, qt.IsTrue) b.Assert(compiled.PollConfigFor("https://gohugo.io/foo.jpg").Config.Disable, qt.IsTrue)
} }
func TestConfigPollsOnly(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
[httpcache]
[[httpcache.polls]]
low = "5s"
high = "32s"
[httpcache.polls.for]
includes = ["**gohugo.io**"]
`
b := hugolib.Test(t, files)
compiled := b.H.Configs.Base.C.HTTPCache
b.Assert(compiled.For("https://gohugo.io/posts.json"), qt.IsFalse)
b.Assert(compiled.For("https://gohugo.io/foo.jpg"), qt.IsFalse)
pc := compiled.PollConfigFor("https://gohugo.io/foo.jpg")
b.Assert(pc.Config.Low, qt.Equals, 5*time.Second)
b.Assert(pc.Config.High, qt.Equals, 32*time.Second)
b.Assert(compiled.PollConfigFor("https://example.com/foo.jpg").IsZero(), qt.IsTrue)
}

View file

@ -17,7 +17,6 @@ import (
"testing" "testing"
qt "github.com/frankban/quicktest" qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/config"
) )
func TestGlobMatcher(t *testing.T) { func TestGlobMatcher(t *testing.T) {
@ -41,33 +40,3 @@ func TestGlobMatcher(t *testing.T) {
c.Assert(p("foo/bar/foo.css"), qt.IsFalse) c.Assert(p("foo/bar/foo.css"), qt.IsFalse)
c.Assert(p("foo/bar/foo.xml"), qt.IsTrue) c.Assert(p("foo/bar/foo.xml"), qt.IsTrue)
} }
func TestDefaultConfig(t *testing.T) {
c := qt.New(t)
_, err := DefaultConfig.Compile()
c.Assert(err, qt.IsNil)
}
func TestDecodeConfigInjectsDefaultAndCompiles(t *testing.T) {
c := qt.New(t)
cfg, err := DecodeConfig(config.BaseConfig{}, map[string]interface{}{})
c.Assert(err, qt.IsNil)
c.Assert(cfg, qt.DeepEquals, DefaultConfig)
_, err = cfg.Compile()
c.Assert(err, qt.IsNil)
cfg, err = DecodeConfig(config.BaseConfig{}, map[string]any{
"cache": map[string]any{
"polls": []map[string]any{
{"disable": true},
},
},
})
c.Assert(err, qt.IsNil)
_, err = cfg.Compile()
c.Assert(err, qt.IsNil)
}

View file

@ -26,7 +26,6 @@ import (
"path/filepath" "path/filepath"
"reflect" "reflect"
"regexp" "regexp"
"slices"
"sort" "sort"
"strings" "strings"
"sync" "sync"
@ -103,7 +102,7 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
} }
for _, t := range include { for _, t := range include {
for i := range t.NumMethod() { for i := 0; i < t.NumMethod(); i++ {
m := t.Method(i) m := t.Method(i)
if excludes[m.Name] || seen[m.Name] { if excludes[m.Name] || seen[m.Name] {
@ -123,7 +122,7 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
method := Method{Owner: t, OwnerName: ownerName, Name: m.Name} method := Method{Owner: t, OwnerName: ownerName, Name: m.Name}
for i := range numIn { for i := 0; i < numIn; i++ {
in := m.Type.In(i) in := m.Type.In(i)
name, pkg := nameAndPackage(in) name, pkg := nameAndPackage(in)
@ -138,7 +137,7 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
numOut := m.Type.NumOut() numOut := m.Type.NumOut()
if numOut > 0 { if numOut > 0 {
for i := range numOut { for i := 0; i < numOut; i++ {
out := m.Type.Out(i) out := m.Type.Out(i)
name, pkg := nameAndPackage(out) name, pkg := nameAndPackage(out)
@ -305,7 +304,7 @@ func (m Method) inOutStr() string {
} }
args := make([]string, len(m.In)) args := make([]string, len(m.In))
for i := range args { for i := 0; i < len(args); i++ {
args[i] = fmt.Sprintf("arg%d", i) args[i] = fmt.Sprintf("arg%d", i)
} }
return "(" + strings.Join(args, ", ") + ")" return "(" + strings.Join(args, ", ") + ")"
@ -317,7 +316,7 @@ func (m Method) inStr() string {
} }
args := make([]string, len(m.In)) args := make([]string, len(m.In))
for i := range args { for i := 0; i < len(args); i++ {
args[i] = fmt.Sprintf("arg%d %s", i, m.In[i]) args[i] = fmt.Sprintf("arg%d %s", i, m.In[i])
} }
return "(" + strings.Join(args, ", ") + ")" return "(" + strings.Join(args, ", ") + ")"
@ -340,7 +339,7 @@ func (m Method) outStrNamed() string {
} }
outs := make([]string, len(m.Out)) outs := make([]string, len(m.Out))
for i := range outs { for i := 0; i < len(outs); i++ {
outs[i] = fmt.Sprintf("o%d %s", i, m.Out[i]) outs[i] = fmt.Sprintf("o%d %s", i, m.Out[i])
} }
@ -436,7 +435,7 @@ func (m Methods) ToMarshalJSON(receiver, pkgPath string, excludes ...string) (st
// Exclude self // Exclude self
for i, pkgImp := range pkgImports { for i, pkgImp := range pkgImports {
if pkgImp == pkgPath { if pkgImp == pkgPath {
pkgImports = slices.Delete(pkgImports, i, i+1) pkgImports = append(pkgImports[:i], pkgImports[i+1:]...)
} }
} }
} }

View file

@ -39,6 +39,7 @@ import (
"github.com/gohugoio/hugo/common/hstrings" "github.com/gohugoio/hugo/common/hstrings"
"github.com/gohugoio/hugo/common/htime" "github.com/gohugoio/hugo/common/htime"
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/paths"
"github.com/gohugoio/hugo/common/types" "github.com/gohugoio/hugo/common/types"
@ -101,10 +102,9 @@ type configKey struct {
// This is the root command. // This is the root command.
type rootCommand struct { type rootCommand struct {
Printf func(format string, v ...any) Printf func(format string, v ...interface{})
Println func(a ...any) Println func(a ...interface{})
StdOut io.Writer Out io.Writer
StdErr io.Writer
logger loggers.Logger logger loggers.Logger
@ -141,6 +141,8 @@ type rootCommand struct {
logLevel string logLevel string
verbose bool
debug bool
quiet bool quiet bool
devMode bool // Hidden flag. devMode bool // Hidden flag.
@ -357,7 +359,7 @@ func (r *rootCommand) getOrCreateHugo(cfg config.Provider, ignoreModuleDoesNotEx
} }
func (r *rootCommand) newDepsConfig(conf *commonConfig) deps.DepsCfg { func (r *rootCommand) newDepsConfig(conf *commonConfig) deps.DepsCfg {
return deps.DepsCfg{Configs: conf.configs, Fs: conf.fs, StdOut: r.logger.StdOut(), StdErr: r.logger.StdErr(), LogLevel: r.logger.Level(), ChangesFromBuild: r.changesFromBuild} return deps.DepsCfg{Configs: conf.configs, Fs: conf.fs, LogOut: r.logger.Out(), LogLevel: r.logger.Level(), ChangesFromBuild: r.changesFromBuild}
} }
func (r *rootCommand) Name() string { func (r *rootCommand) Name() string {
@ -422,23 +424,21 @@ func (r *rootCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args
} }
func (r *rootCommand) PreRun(cd, runner *simplecobra.Commandeer) error { func (r *rootCommand) PreRun(cd, runner *simplecobra.Commandeer) error {
r.StdOut = os.Stdout r.Out = os.Stdout
r.StdErr = os.Stderr
if r.quiet { if r.quiet {
r.StdOut = io.Discard r.Out = io.Discard
r.StdErr = io.Discard
} }
// Used by mkcert (server). // Used by mkcert (server).
log.SetOutput(r.StdOut) log.SetOutput(r.Out)
r.Printf = func(format string, v ...any) { r.Printf = func(format string, v ...interface{}) {
if !r.quiet { if !r.quiet {
fmt.Fprintf(r.StdOut, format, v...) fmt.Fprintf(r.Out, format, v...)
} }
} }
r.Println = func(a ...any) { r.Println = func(a ...interface{}) {
if !r.quiet { if !r.quiet {
fmt.Fprintln(r.StdOut, a...) fmt.Fprintln(r.Out, a...)
} }
} }
_, running := runner.Command.(*serverCommand) _, running := runner.Command.(*serverCommand)
@ -447,8 +447,6 @@ func (r *rootCommand) PreRun(cd, runner *simplecobra.Commandeer) error {
if err != nil { if err != nil {
return err return err
} }
// Set up the global logger early to allow info deprecations during config load.
loggers.SetGlobalLogger(r.logger)
r.changesFromBuild = make(chan []identity.Identity, 10) r.changesFromBuild = make(chan []identity.Identity, 10)
@ -484,14 +482,25 @@ func (r *rootCommand) createLogger(running bool) (loggers.Logger, error) {
default: default:
return nil, fmt.Errorf("invalid log level: %q, must be one of debug, warn, info or error", r.logLevel) return nil, fmt.Errorf("invalid log level: %q, must be one of debug, warn, info or error", r.logLevel)
} }
} else {
if r.verbose {
hugo.Deprecate("--verbose", "use --logLevel info", "v0.114.0")
hugo.Deprecate("--verbose", "use --logLevel info", "v0.114.0")
level = logg.LevelInfo
}
if r.debug {
hugo.Deprecate("--debug", "use --logLevel debug", "v0.114.0")
level = logg.LevelDebug
}
} }
} }
optsLogger := loggers.Options{ optsLogger := loggers.Options{
DistinctLevel: logg.LevelWarn, DistinctLevel: logg.LevelWarn,
Level: level, Level: level,
StdOut: r.StdOut, Stdout: r.Out,
StdErr: r.StdErr, Stderr: r.Out,
StoreErrors: running, StoreErrors: running,
} }
@ -540,7 +549,6 @@ Complete documentation is available at https://gohugo.io/.`
cmd.PersistentFlags().StringP("themesDir", "", "", "filesystem path to themes directory") cmd.PersistentFlags().StringP("themesDir", "", "", "filesystem path to themes directory")
_ = cmd.MarkFlagDirname("themesDir") _ = cmd.MarkFlagDirname("themesDir")
cmd.PersistentFlags().StringP("ignoreVendorPaths", "", "", "ignores any _vendor for module paths matching the given Glob pattern") cmd.PersistentFlags().StringP("ignoreVendorPaths", "", "", "ignores any _vendor for module paths matching the given Glob pattern")
cmd.PersistentFlags().BoolP("noBuildLock", "", false, "don't create .hugo_build.lock file")
_ = cmd.RegisterFlagCompletionFunc("ignoreVendorPaths", cobra.NoFileCompletions) _ = cmd.RegisterFlagCompletionFunc("ignoreVendorPaths", cobra.NoFileCompletions)
cmd.PersistentFlags().String("clock", "", "set the clock used by Hugo, e.g. --clock 2021-11-06T22:30:00.00+09:00") cmd.PersistentFlags().String("clock", "", "set the clock used by Hugo, e.g. --clock 2021-11-06T22:30:00.00+09:00")
_ = cmd.RegisterFlagCompletionFunc("clock", cobra.NoFileCompletions) _ = cmd.RegisterFlagCompletionFunc("clock", cobra.NoFileCompletions)
@ -552,6 +560,8 @@ Complete documentation is available at https://gohugo.io/.`
cmd.PersistentFlags().BoolVar(&r.quiet, "quiet", false, "build in quiet mode") cmd.PersistentFlags().BoolVar(&r.quiet, "quiet", false, "build in quiet mode")
cmd.PersistentFlags().BoolVarP(&r.renderToMemory, "renderToMemory", "M", false, "render to memory (mostly useful when running the server)") cmd.PersistentFlags().BoolVarP(&r.renderToMemory, "renderToMemory", "M", false, "render to memory (mostly useful when running the server)")
cmd.PersistentFlags().BoolVarP(&r.verbose, "verbose", "v", false, "verbose output")
cmd.PersistentFlags().BoolVarP(&r.debug, "debug", "", false, "debug output")
cmd.PersistentFlags().BoolVarP(&r.devMode, "devMode", "", false, "only used for internal testing, flag hidden.") cmd.PersistentFlags().BoolVarP(&r.devMode, "devMode", "", false, "only used for internal testing, flag hidden.")
cmd.PersistentFlags().StringVar(&r.logLevel, "logLevel", "", "log level (debug|info|warn|error)") cmd.PersistentFlags().StringVar(&r.logLevel, "logLevel", "", "log level (debug|info|warn|error)")
_ = cmd.RegisterFlagCompletionFunc("logLevel", cobra.FixedCompletions([]string{"debug", "info", "warn", "error"}, cobra.ShellCompDirectiveNoFileComp)) _ = cmd.RegisterFlagCompletionFunc("logLevel", cobra.FixedCompletions([]string{"debug", "info", "warn", "error"}, cobra.ShellCompDirectiveNoFileComp))
@ -596,6 +606,7 @@ func applyLocalFlagsBuild(cmd *cobra.Command, r *rootCommand) {
cmd.Flags().BoolVar(&r.forceSyncStatic, "forceSyncStatic", false, "copy all files when static is changed.") cmd.Flags().BoolVar(&r.forceSyncStatic, "forceSyncStatic", false, "copy all files when static is changed.")
cmd.Flags().BoolP("noTimes", "", false, "don't sync modification time of files") cmd.Flags().BoolP("noTimes", "", false, "don't sync modification time of files")
cmd.Flags().BoolP("noChmod", "", false, "don't sync permission mode of files") cmd.Flags().BoolP("noChmod", "", false, "don't sync permission mode of files")
cmd.Flags().BoolP("noBuildLock", "", false, "don't create .hugo_build.lock file")
cmd.Flags().BoolP("printI18nWarnings", "", false, "print missing translations") cmd.Flags().BoolP("printI18nWarnings", "", false, "print missing translations")
cmd.Flags().BoolP("printPathWarnings", "", false, "print warnings on duplicate target paths etc.") cmd.Flags().BoolP("printPathWarnings", "", false, "print warnings on duplicate target paths etc.")
cmd.Flags().BoolP("printUnusedTemplates", "", false, "print warnings on unused templates.") cmd.Flags().BoolP("printUnusedTemplates", "", false, "print warnings on unused templates.")

View file

@ -45,7 +45,6 @@ type configCommand struct {
format string format string
lang string lang string
printZero bool
commands []simplecobra.Commander commands []simplecobra.Commander
} }
@ -79,7 +78,7 @@ func (c *configCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, arg
dec.SetIndent("", " ") dec.SetIndent("", " ")
dec.SetEscapeHTML(false) dec.SetEscapeHTML(false)
if err := dec.Encode(parser.ReplacingJSONMarshaller{Value: config, KeysToLower: true, OmitEmpty: !c.printZero}); err != nil { if err := dec.Encode(parser.ReplacingJSONMarshaller{Value: config, KeysToLower: true, OmitEmpty: true}); err != nil {
return err return err
} }
@ -90,7 +89,7 @@ func (c *configCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, arg
os.Stdout.Write(buf.Bytes()) os.Stdout.Write(buf.Bytes())
default: default:
// Decode the JSON to a map[string]interface{} and then unmarshal it again to the correct format. // Decode the JSON to a map[string]interface{} and then unmarshal it again to the correct format.
var m map[string]any var m map[string]interface{}
if err := json.Unmarshal(buf.Bytes(), &m); err != nil { if err := json.Unmarshal(buf.Bytes(), &m); err != nil {
return err return err
} }
@ -116,7 +115,6 @@ func (c *configCommand) Init(cd *simplecobra.Commandeer) error {
cmd.Flags().StringVar(&c.format, "format", "toml", "preferred file format (toml, yaml or json)") cmd.Flags().StringVar(&c.format, "format", "toml", "preferred file format (toml, yaml or json)")
_ = cmd.RegisterFlagCompletionFunc("format", cobra.FixedCompletions([]string{"toml", "yaml", "json"}, cobra.ShellCompDirectiveNoFileComp)) _ = cmd.RegisterFlagCompletionFunc("format", cobra.FixedCompletions([]string{"toml", "yaml", "json"}, cobra.ShellCompDirectiveNoFileComp))
cmd.Flags().StringVar(&c.lang, "lang", "", "the language to display config for. Defaults to the first language defined.") cmd.Flags().StringVar(&c.lang, "lang", "", "the language to display config for. Defaults to the first language defined.")
cmd.Flags().BoolVar(&c.printZero, "printZero", false, `include config options with zero values (e.g. false, 0, "") in the output`)
_ = cmd.RegisterFlagCompletionFunc("lang", cobra.NoFileCompletions) _ = cmd.RegisterFlagCompletionFunc("lang", cobra.NoFileCompletions)
applyLocalFlagsBuildConfig(cmd, c.r) applyLocalFlagsBuildConfig(cmd, c.r)

View file

@ -12,6 +12,7 @@
// limitations under the License. // limitations under the License.
//go:build withdeploy //go:build withdeploy
// +build withdeploy
package commands package commands

View file

@ -12,6 +12,7 @@
// limitations under the License. // limitations under the License.
//go:build !withdeploy //go:build !withdeploy
// +build !withdeploy
// Copyright 2024 The Hugo Authors. All rights reserved. // Copyright 2024 The Hugo Authors. All rights reserved.
// //

View file

@ -21,7 +21,6 @@ import (
"os" "os"
"path" "path"
"path/filepath" "path/filepath"
"slices"
"strings" "strings"
"github.com/alecthomas/chroma/v2" "github.com/alecthomas/chroma/v2"
@ -50,7 +49,6 @@ func newGenCommand() *genCommand {
highlightStyle string highlightStyle string
lineNumbersInlineStyle string lineNumbersInlineStyle string
lineNumbersTableStyle string lineNumbersTableStyle string
omitEmpty bool
) )
newChromaStyles := func() simplecobra.Commander { newChromaStyles := func() simplecobra.Commander {
@ -62,10 +60,6 @@ func newGenCommand() *genCommand {
See https://xyproto.github.io/splash/docs/all.html for a preview of the available styles`, See https://xyproto.github.io/splash/docs/all.html for a preview of the available styles`,
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error { run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
style = strings.ToLower(style)
if !slices.Contains(styles.Names(), style) {
return fmt.Errorf("invalid style: %s", style)
}
builder := styles.Get(style).Builder() builder := styles.Get(style).Builder()
if highlightStyle != "" { if highlightStyle != "" {
builder.Add(chroma.LineHighlight, highlightStyle) builder.Add(chroma.LineHighlight, highlightStyle)
@ -80,17 +74,8 @@ See https://xyproto.github.io/splash/docs/all.html for a preview of the availabl
if err != nil { if err != nil {
return err return err
} }
formatter := html.New(html.WithAllClasses(true))
var formatter *html.Formatter formatter.WriteCSS(os.Stdout, style)
if omitEmpty {
formatter = html.New(html.WithClasses(true))
} else {
formatter = html.New(html.WithAllClasses(true))
}
w := os.Stdout
fmt.Fprintf(w, "/* Generated using: hugo %s */\n\n", strings.Join(os.Args[1:], " "))
formatter.WriteCSS(w, style)
return nil return nil
}, },
withc: func(cmd *cobra.Command, r *rootCommand) { withc: func(cmd *cobra.Command, r *rootCommand) {
@ -103,8 +88,6 @@ See https://xyproto.github.io/splash/docs/all.html for a preview of the availabl
_ = cmd.RegisterFlagCompletionFunc("lineNumbersInlineStyle", cobra.NoFileCompletions) _ = cmd.RegisterFlagCompletionFunc("lineNumbersInlineStyle", cobra.NoFileCompletions)
cmd.PersistentFlags().StringVar(&lineNumbersTableStyle, "lineNumbersTableStyle", "", `foreground and background colors for table line numbers, e.g. --lineNumbersTableStyle "#fff000 bg:#000fff"`) cmd.PersistentFlags().StringVar(&lineNumbersTableStyle, "lineNumbersTableStyle", "", `foreground and background colors for table line numbers, e.g. --lineNumbersTableStyle "#fff000 bg:#000fff"`)
_ = cmd.RegisterFlagCompletionFunc("lineNumbersTableStyle", cobra.NoFileCompletions) _ = cmd.RegisterFlagCompletionFunc("lineNumbersTableStyle", cobra.NoFileCompletions)
cmd.PersistentFlags().BoolVar(&omitEmpty, "omitEmpty", false, `omit empty CSS rules`)
_ = cmd.RegisterFlagCompletionFunc("omitEmpty", cobra.NoFileCompletions)
}, },
} }
} }
@ -159,7 +142,7 @@ url: %s
return &simpleCommand{ return &simpleCommand{
name: "doc", name: "doc",
short: "Generate Markdown documentation for the Hugo CLI", short: "Generate Markdown documentation for the Hugo CLI.",
long: `Generate Markdown documentation for the Hugo CLI. long: `Generate Markdown documentation for the Hugo CLI.
This command is, mostly, used to create up-to-date documentation This command is, mostly, used to create up-to-date documentation
of Hugo's command-line interface for https://gohugo.io/. of Hugo's command-line interface for https://gohugo.io/.
@ -184,13 +167,13 @@ url: %s
prepender := func(filename string) string { prepender := func(filename string) string {
name := filepath.Base(filename) name := filepath.Base(filename)
base := strings.TrimSuffix(name, path.Ext(name)) base := strings.TrimSuffix(name, path.Ext(name))
url := "/docs/reference/commands/" + strings.ToLower(base) + "/" url := "/commands/" + strings.ToLower(base) + "/"
return fmt.Sprintf(gendocFrontmatterTemplate, strings.Replace(base, "_", " ", -1), base, url) return fmt.Sprintf(gendocFrontmatterTemplate, strings.Replace(base, "_", " ", -1), base, url)
} }
linkHandler := func(name string) string { linkHandler := func(name string) string {
base := strings.TrimSuffix(name, path.Ext(name)) base := strings.TrimSuffix(name, path.Ext(name))
return "/docs/reference/commands/" + strings.ToLower(base) + "/" return "/commands/" + strings.ToLower(base) + "/"
} }
r.Println("Generating Hugo command-line documentation in", gendocdir, "...") r.Println("Generating Hugo command-line documentation in", gendocdir, "...")
doc.GenMarkdownTreeCustom(cd.CobraCommand.Root(), gendocdir, prepender, linkHandler) doc.GenMarkdownTreeCustom(cd.CobraCommand.Root(), gendocdir, prepender, linkHandler)
@ -211,7 +194,7 @@ url: %s
newDocsHelper := func() simplecobra.Commander { newDocsHelper := func() simplecobra.Commander {
return &simpleCommand{ return &simpleCommand{
name: "docshelper", name: "docshelper",
short: "Generate some data files for the Hugo docs", short: "Generate some data files for the Hugo docs.",
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error { run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
r.Println("Generate docs data to", docsHelperTarget) r.Println("Generate docs data to", docsHelperTarget)
@ -232,7 +215,7 @@ url: %s
} }
// Decode the JSON to a map[string]interface{} and then unmarshal it again to the correct format. // Decode the JSON to a map[string]interface{} and then unmarshal it again to the correct format.
var m map[string]any var m map[string]interface{}
if err := json.Unmarshal(buf.Bytes(), &m); err != nil { if err := json.Unmarshal(buf.Bytes(), &m); err != nil {
return err return err
} }

View file

@ -62,7 +62,7 @@ type hugoBuilder struct {
// Currently only set when in "fast render mode". // Currently only set when in "fast render mode".
changeDetector *fileChangeDetector changeDetector *fileChangeDetector
visitedURLs *types.EvictingQueue[string] visitedURLs *types.EvictingStringQueue
fullRebuildSem *semaphore.Weighted fullRebuildSem *semaphore.Weighted
debounce func(f func()) debounce func(f func())
@ -663,20 +663,7 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher,
var n int var n int
for _, ev := range evs { for _, ev := range evs {
keep := true keep := true
// Write and rename operations are often followed by CHMOD. if ev.Has(fsnotify.Create) || ev.Has(fsnotify.Write) {
// There may be valid use cases for rebuilding the site on CHMOD,
// but that will require more complex logic than this simple conditional.
// On OS X this seems to be related to Spotlight, see:
// https://github.com/go-fsnotify/fsnotify/issues/15
// A workaround is to put your site(s) on the Spotlight exception list,
// but that may be a little mysterious for most end users.
// So, for now, we skip reload on CHMOD.
// We do have to check for WRITE though. On slower laptops a Chmod
// could be aggregated with other important events, and we still want
// to rebuild on those
if ev.Op == fsnotify.Chmod {
keep = false
} else if ev.Has(fsnotify.Create) || ev.Has(fsnotify.Write) {
if _, err := os.Stat(ev.Name); err != nil { if _, err := os.Stat(ev.Name); err != nil {
keep = false keep = false
} }
@ -818,6 +805,21 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher,
continue continue
} }
// Write and rename operations are often followed by CHMOD.
// There may be valid use cases for rebuilding the site on CHMOD,
// but that will require more complex logic than this simple conditional.
// On OS X this seems to be related to Spotlight, see:
// https://github.com/go-fsnotify/fsnotify/issues/15
// A workaround is to put your site(s) on the Spotlight exception list,
// but that may be a little mysterious for most end users.
// So, for now, we skip reload on CHMOD.
// We do have to check for WRITE though. On slower laptops a Chmod
// could be aggregated with other important events, and we still want
// to rebuild on those
if ev.Op&(fsnotify.Chmod|fsnotify.Write|fsnotify.Create) == fsnotify.Chmod {
continue
}
walkAdder := func(path string, f hugofs.FileMetaInfo) error { walkAdder := func(path string, f hugofs.FileMetaInfo) error {
if f.IsDir() { if f.IsDir() {
c.r.logger.Println("adding created directory to watchlist", path) c.r.logger.Println("adding created directory to watchlist", path)
@ -918,11 +920,7 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher,
changed := c.changeDetector.changed() changed := c.changeDetector.changed()
if c.changeDetector != nil { if c.changeDetector != nil {
if len(changed) >= 10 {
lrl.Logf("build changed %d files", len(changed)) lrl.Logf("build changed %d files", len(changed))
} else {
lrl.Logf("build changed %d files: %q", len(changed), changed)
}
if len(changed) == 0 { if len(changed) == 0 {
// Nothing has changed. // Nothing has changed.
return return
@ -968,13 +966,10 @@ func (c *hugoBuilder) handleEvents(watcher *watcher.Batcher,
pathToRefresh := h.PathSpec.RelURL(paths.ToSlashTrimLeading(otherChanges[0]), false) pathToRefresh := h.PathSpec.RelURL(paths.ToSlashTrimLeading(otherChanges[0]), false)
lrl.Logf("refreshing %q", pathToRefresh) lrl.Logf("refreshing %q", pathToRefresh)
livereload.RefreshPath(pathToRefresh) livereload.RefreshPath(pathToRefresh)
} else if len(cssChanges) == 0 || len(otherChanges) > 1 { } else if len(cssChanges) == 0 {
lrl.Logf("force refresh") lrl.Logf("force refresh")
livereload.ForceRefresh() livereload.ForceRefresh()
} }
} else {
lrl.Logf("force refresh")
livereload.ForceRefresh()
} }
if len(cssChanges) > 0 { if len(cssChanges) > 0 {
@ -1104,7 +1099,7 @@ func (c *hugoBuilder) rebuildSites(events []fsnotify.Event) (err error) {
if err != nil { if err != nil {
return return
} }
err = h.Build(hugolib.BuildCfg{NoBuildLock: true, RecentlyTouched: c.visitedURLs, ErrRecovery: c.errState.wasErr()}, events...) err = h.Build(hugolib.BuildCfg{NoBuildLock: true, RecentlyVisited: c.visitedURLs, ErrRecovery: c.errState.wasErr()}, events...)
return return
} }
@ -1120,7 +1115,7 @@ func (c *hugoBuilder) rebuildSitesForChanges(ids []identity.Identity) (err error
} }
whatChanged := &hugolib.WhatChanged{} whatChanged := &hugolib.WhatChanged{}
whatChanged.Add(ids...) whatChanged.Add(ids...)
err = h.Build(hugolib.BuildCfg{NoBuildLock: true, WhatChanged: whatChanged, RecentlyTouched: c.visitedURLs, ErrRecovery: c.errState.wasErr()}) err = h.Build(hugolib.BuildCfg{NoBuildLock: true, WhatChanged: whatChanged, RecentlyVisited: c.visitedURLs, ErrRecovery: c.errState.wasErr()})
return return
} }

View file

@ -57,7 +57,7 @@ func newListCommand() *listCommand {
return err return err
} }
writer := csv.NewWriter(r.StdOut) writer := csv.NewWriter(r.Out)
defer writer.Flush() defer writer.Flush()
writer.Write([]string{ writer.Write([]string{

View file

@ -44,12 +44,12 @@ func newModCommands() *modCommands {
npmCommand := &simpleCommand{ npmCommand := &simpleCommand{
name: "npm", name: "npm",
short: "Various npm helpers", short: "Various npm helpers.",
long: `Various npm (Node package manager) helpers.`, long: `Various npm (Node package manager) helpers.`,
commands: []simplecobra.Commander{ commands: []simplecobra.Commander{
&simpleCommand{ &simpleCommand{
name: "pack", name: "pack",
short: "Experimental: Prepares and writes a composite package.json file for your project", short: "Experimental: Prepares and writes a composite package.json file for your project.",
long: `Prepares and writes a composite package.json file for your project. long: `Prepares and writes a composite package.json file for your project.
On first run it creates a "package.hugo.json" in the project root if not already there. This file will be used as a template file On first run it creates a "package.hugo.json" in the project root if not already there. This file will be used as a template file
@ -80,7 +80,7 @@ so this may/will change in future versions of Hugo.
commands: []simplecobra.Commander{ commands: []simplecobra.Commander{
&simpleCommand{ &simpleCommand{
name: "init", name: "init",
short: "Initialize this project as a Hugo Module", short: "Initialize this project as a Hugo Module.",
long: `Initialize this project as a Hugo Module. long: `Initialize this project as a Hugo Module.
It will try to guess the module path, but you may help by passing it as an argument, e.g: It will try to guess the module path, but you may help by passing it as an argument, e.g:
@ -111,7 +111,7 @@ so this may/will change in future versions of Hugo.
}, },
&simpleCommand{ &simpleCommand{
name: "verify", name: "verify",
short: "Verify dependencies", short: "Verify dependencies.",
long: `Verify checks that the dependencies of the current module, which are stored in a local downloaded source cache, have not been modified since being downloaded.`, long: `Verify checks that the dependencies of the current module, which are stored in a local downloaded source cache, have not been modified since being downloaded.`,
withc: func(cmd *cobra.Command, r *rootCommand) { withc: func(cmd *cobra.Command, r *rootCommand) {
cmd.ValidArgsFunction = cobra.NoFileCompletions cmd.ValidArgsFunction = cobra.NoFileCompletions
@ -129,7 +129,7 @@ so this may/will change in future versions of Hugo.
}, },
&simpleCommand{ &simpleCommand{
name: "graph", name: "graph",
short: "Print a module dependency graph", short: "Print a module dependency graph.",
long: `Print a module dependency graph with information about module status (disabled, vendored). long: `Print a module dependency graph with information about module status (disabled, vendored).
Note that for vendored modules, that is the version listed and not the one from go.mod. Note that for vendored modules, that is the version listed and not the one from go.mod.
`, `,
@ -149,7 +149,7 @@ Note that for vendored modules, that is the version listed and not the one from
}, },
&simpleCommand{ &simpleCommand{
name: "clean", name: "clean",
short: "Delete the Hugo Module cache for the current project", short: "Delete the Hugo Module cache for the current project.",
long: `Delete the Hugo Module cache for the current project.`, long: `Delete the Hugo Module cache for the current project.`,
withc: func(cmd *cobra.Command, r *rootCommand) { withc: func(cmd *cobra.Command, r *rootCommand) {
cmd.ValidArgsFunction = cobra.NoFileCompletions cmd.ValidArgsFunction = cobra.NoFileCompletions
@ -175,7 +175,7 @@ Note that for vendored modules, that is the version listed and not the one from
}, },
&simpleCommand{ &simpleCommand{
name: "tidy", name: "tidy",
short: "Remove unused entries in go.mod and go.sum", short: "Remove unused entries in go.mod and go.sum.",
withc: func(cmd *cobra.Command, r *rootCommand) { withc: func(cmd *cobra.Command, r *rootCommand) {
cmd.ValidArgsFunction = cobra.NoFileCompletions cmd.ValidArgsFunction = cobra.NoFileCompletions
applyLocalFlagsBuildConfig(cmd, r) applyLocalFlagsBuildConfig(cmd, r)
@ -190,7 +190,7 @@ Note that for vendored modules, that is the version listed and not the one from
}, },
&simpleCommand{ &simpleCommand{
name: "vendor", name: "vendor",
short: "Vendor all module dependencies into the _vendor directory", short: "Vendor all module dependencies into the _vendor directory.",
long: `Vendor all module dependencies into the _vendor directory. long: `Vendor all module dependencies into the _vendor directory.
If a module is vendored, that is where Hugo will look for it's dependencies. If a module is vendored, that is where Hugo will look for it's dependencies.
`, `,
@ -209,9 +209,9 @@ Note that for vendored modules, that is the version listed and not the one from
&simpleCommand{ &simpleCommand{
name: "get", name: "get",
short: "Resolves dependencies in your current Hugo project", short: "Resolves dependencies in your current Hugo Project.",
long: ` long: `
Resolves dependencies in your current Hugo project. Resolves dependencies in your current Hugo Project.
Some examples: Some examples:

View file

@ -76,8 +76,10 @@ Ensure you run this within the root directory of your site.`,
&simpleCommand{ &simpleCommand{
name: "site", name: "site",
use: "site [path]", use: "site [path]",
short: "Create a new site", short: "Create a new site (skeleton)",
long: `Create a new site at the specified path.`, long: `Create a new site in the provided directory.
The new site will have the correct structure, but no content or theme yet.
Use ` + "`hugo new [contentPath]`" + ` to create new content.`,
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error { run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
if len(args) < 1 { if len(args) < 1 {
return newUserError("path needs to be provided") return newUserError("path needs to be provided")
@ -122,9 +124,11 @@ Ensure you run this within the root directory of your site.`,
&simpleCommand{ &simpleCommand{
name: "theme", name: "theme",
use: "theme [name]", use: "theme [name]",
short: "Create a new theme", short: "Create a new theme (skeleton)",
long: `Create a new theme with the specified name in the ./themes directory. long: `Create a new theme (skeleton) called [name] in ./themes.
This generates a functional theme including template examples and sample content.`, New theme is a skeleton. Please add content to the touched files. Add your
name to the copyright line in the license and adjust the theme.toml file
according to your needs.`,
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error { run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
if len(args) < 1 { if len(args) < 1 {
return newUserError("theme name needs to be provided") return newUserError("theme name needs to be provided")
@ -140,7 +144,7 @@ This generates a functional theme including template examples and sample content
createpath := paths.AbsPathify(conf.configs.Base.WorkingDir, filepath.Join(conf.configs.Base.ThemesDir, args[0])) createpath := paths.AbsPathify(conf.configs.Base.WorkingDir, filepath.Join(conf.configs.Base.ThemesDir, args[0]))
r.Println("Creating new theme in", createpath) r.Println("Creating new theme in", createpath)
err = skeletons.CreateTheme(createpath, sourceFs, format) err = skeletons.CreateTheme(createpath, sourceFs)
if err != nil { if err != nil {
return err return err
} }
@ -148,14 +152,7 @@ This generates a functional theme including template examples and sample content
return nil return nil
}, },
withc: func(cmd *cobra.Command, r *rootCommand) { withc: func(cmd *cobra.Command, r *rootCommand) {
cmd.ValidArgsFunction = func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) { cmd.ValidArgsFunction = cobra.NoFileCompletions
if len(args) != 0 {
return []string{}, cobra.ShellCompDirectiveNoFileComp
}
return []string{}, cobra.ShellCompDirectiveNoFileComp | cobra.ShellCompDirectiveFilterDirs
}
cmd.Flags().StringVar(&format, "format", "toml", "preferred file format (toml, yaml or json)")
_ = cmd.RegisterFlagCompletionFunc("format", cobra.FixedCompletions([]string{"toml", "yaml", "json"}, cobra.ShellCompDirectiveNoFileComp))
}, },
}, },
}, },

View file

@ -32,7 +32,7 @@ func newReleaseCommand() simplecobra.Commander {
return &simpleCommand{ return &simpleCommand{
name: "release", name: "release",
short: "Release a new version of Hugo", short: "Release a new version of Hugo.",
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error { run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
rel, err := releaser.New(skipPush, try, step) rel, err := releaser.New(skipPush, try, step)
if err != nil { if err != nil {

View file

@ -23,7 +23,6 @@ import (
"errors" "errors"
"fmt" "fmt"
"io" "io"
"maps"
"net" "net"
"net/http" "net/http"
_ "net/http/pprof" _ "net/http/pprof"
@ -33,7 +32,6 @@ import (
"path" "path"
"path/filepath" "path/filepath"
"regexp" "regexp"
"sort"
"strconv" "strconv"
"strings" "strings"
"sync" "sync"
@ -42,14 +40,12 @@ import (
"time" "time"
"github.com/bep/mclib" "github.com/bep/mclib"
"github.com/pkg/browser"
"github.com/bep/debounce" "github.com/bep/debounce"
"github.com/bep/simplecobra" "github.com/bep/simplecobra"
"github.com/fsnotify/fsnotify" "github.com/fsnotify/fsnotify"
"github.com/gohugoio/hugo/common/herrors" "github.com/gohugoio/hugo/common/herrors"
"github.com/gohugoio/hugo/common/hugo" "github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/tpl/tplimpl"
"github.com/gohugoio/hugo/common/types" "github.com/gohugoio/hugo/common/types"
"github.com/gohugoio/hugo/common/urls" "github.com/gohugoio/hugo/common/urls"
@ -59,6 +55,7 @@ import (
"github.com/gohugoio/hugo/hugolib" "github.com/gohugoio/hugo/hugolib"
"github.com/gohugoio/hugo/hugolib/filesystems" "github.com/gohugoio/hugo/hugolib/filesystems"
"github.com/gohugoio/hugo/livereload" "github.com/gohugoio/hugo/livereload"
"github.com/gohugoio/hugo/tpl"
"github.com/gohugoio/hugo/transform" "github.com/gohugoio/hugo/transform"
"github.com/gohugoio/hugo/transform/livereloadinject" "github.com/gohugoio/hugo/transform/livereloadinject"
"github.com/spf13/afero" "github.com/spf13/afero"
@ -85,14 +82,10 @@ const (
configChangeGoWork = "go work file" configChangeGoWork = "go work file"
) )
const (
hugoHeaderRedirect = "X-Hugo-Redirect"
)
func newHugoBuilder(r *rootCommand, s *serverCommand, onConfigLoaded ...func(reloaded bool) error) *hugoBuilder { func newHugoBuilder(r *rootCommand, s *serverCommand, onConfigLoaded ...func(reloaded bool) error) *hugoBuilder {
var visitedURLs *types.EvictingQueue[string] var visitedURLs *types.EvictingStringQueue
if s != nil && !s.disableFastRender { if s != nil && !s.disableFastRender {
visitedURLs = types.NewEvictingQueue[string](20) visitedURLs = types.NewEvictingStringQueue(20)
} }
return &hugoBuilder{ return &hugoBuilder{
r: r, r: r,
@ -120,7 +113,7 @@ func newServerCommand() *serverCommand {
commands: []simplecobra.Commander{ commands: []simplecobra.Commander{
&simpleCommand{ &simpleCommand{
name: "trust", name: "trust",
short: "Install the local CA in the system trust store", short: "Install the local CA in the system trust store.",
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error { run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
action := "-install" action := "-install"
if uninstall { if uninstall {
@ -196,7 +189,9 @@ func (f *fileChangeDetector) PrepareNew() {
} }
f.prev = make(map[string]uint64) f.prev = make(map[string]uint64)
maps.Copy(f.prev, f.current) for k, v := range f.current {
f.prev[k] = v
}
f.current = make(map[string]uint64) f.current = make(map[string]uint64)
} }
@ -214,17 +209,16 @@ func (f *fileChangeDetector) changed() []string {
} }
} }
return f.filterIrrelevantAndSort(c) return f.filterIrrelevant(c)
} }
func (f *fileChangeDetector) filterIrrelevantAndSort(in []string) []string { func (f *fileChangeDetector) filterIrrelevant(in []string) []string {
var filtered []string var filtered []string
for _, v := range in { for _, v := range in {
if !f.irrelevantRe.MatchString(v) { if !f.irrelevantRe.MatchString(v) {
filtered = append(filtered, v) filtered = append(filtered, v)
} }
} }
sort.Strings(filtered)
return filtered return filtered
} }
@ -310,8 +304,8 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string
w.Header().Set(header.Key, header.Value) w.Header().Set(header.Key, header.Value)
} }
if canRedirect(requestURI, r) { if redirect := serverConfig.MatchRedirect(requestURI); !redirect.IsZero() {
if redirect := serverConfig.MatchRedirect(requestURI, r.Header); !redirect.IsZero() { // fullName := filepath.Join(dir, filepath.FromSlash(path.Clean("/"+name)))
doRedirect := true doRedirect := true
// This matches Netlify's behavior and is needed for SPA behavior. // This matches Netlify's behavior and is needed for SPA behavior.
// See https://docs.netlify.com/routing/redirects/rewrites-proxies/ // See https://docs.netlify.com/routing/redirects/rewrites-proxies/
@ -340,7 +334,6 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string
} }
if doRedirect { if doRedirect {
w.Header().Set(hugoHeaderRedirect, "true")
switch redirect.Status { switch redirect.Status {
case 404: case 404:
w.WriteHeader(404) w.WriteHeader(404)
@ -364,11 +357,11 @@ func (f *fileServer) createEndpoint(i int) (*http.ServeMux, net.Listener, string
} }
} }
}
} }
if f.c.fastRenderMode && f.c.errState.buildErr() == nil { if f.c.fastRenderMode && f.c.errState.buildErr() == nil {
if isNavigation(requestURI, r) { if strings.HasSuffix(requestURI, "/") || strings.HasSuffix(requestURI, "html") || strings.HasSuffix(requestURI, "htm") {
if !f.c.visitedURLs.Contains(requestURI) { if !f.c.visitedURLs.Contains(requestURI) {
// If not already on stack, re-render that single page. // If not already on stack, re-render that single page.
if err := f.c.partialReRender(requestURI); err != nil { if err := f.c.partialReRender(requestURI); err != nil {
@ -455,7 +448,6 @@ type serverCommand struct {
// Flags. // Flags.
renderStaticToDisk bool renderStaticToDisk bool
navigateToChanged bool navigateToChanged bool
openBrowser bool
serverAppend bool serverAppend bool
serverInterface string serverInterface string
tlsCertFile string tlsCertFile string
@ -547,7 +539,6 @@ of a second, you will be able to save and see your changes nearly instantly.`
cmd.Flags().BoolVarP(&c.serverAppend, "appendPort", "", true, "append port to baseURL") cmd.Flags().BoolVarP(&c.serverAppend, "appendPort", "", true, "append port to baseURL")
cmd.Flags().BoolVar(&c.disableLiveReload, "disableLiveReload", false, "watch without enabling live browser reload on rebuild") cmd.Flags().BoolVar(&c.disableLiveReload, "disableLiveReload", false, "watch without enabling live browser reload on rebuild")
cmd.Flags().BoolVarP(&c.navigateToChanged, "navigateToChanged", "N", false, "navigate to changed content file on live browser reload") cmd.Flags().BoolVarP(&c.navigateToChanged, "navigateToChanged", "N", false, "navigate to changed content file on live browser reload")
cmd.Flags().BoolVarP(&c.openBrowser, "openBrowser", "O", false, "open the site in a browser after server startup")
cmd.Flags().BoolVar(&c.renderStaticToDisk, "renderStaticToDisk", false, "serve static files from disk and dynamic files from memory") cmd.Flags().BoolVar(&c.renderStaticToDisk, "renderStaticToDisk", false, "serve static files from disk and dynamic files from memory")
cmd.Flags().BoolVar(&c.disableFastRender, "disableFastRender", false, "enables full re-renders on changes") cmd.Flags().BoolVar(&c.disableFastRender, "disableFastRender", false, "enables full re-renders on changes")
cmd.Flags().BoolVar(&c.disableBrowserError, "disableBrowserError", false, "do not show build errors in the browser") cmd.Flags().BoolVar(&c.disableBrowserError, "disableBrowserError", false, "do not show build errors in the browser")
@ -627,7 +618,7 @@ func (c *serverCommand) setServerInfoInConfig() error {
panic("no server ports set") panic("no server ports set")
} }
return c.withConfE(func(conf *commonConfig) error { return c.withConfE(func(conf *commonConfig) error {
for i, language := range conf.configs.LanguagesDefaultFirst { for i, language := range conf.configs.Languages {
isMultihost := conf.configs.IsMultihost isMultihost := conf.configs.IsMultihost
var serverPort int var serverPort int
if isMultihost { if isMultihost {
@ -758,7 +749,7 @@ func (c *serverCommand) createServerPorts(cd *simplecobra.Commandeer) error {
c.serverPorts = make([]serverPortListener, len(conf.configs.Languages)) c.serverPorts = make([]serverPortListener, len(conf.configs.Languages))
} }
currentServerPort := c.serverPort currentServerPort := c.serverPort
for i := range c.serverPorts { for i := 0; i < len(c.serverPorts); i++ {
l, err := net.Listen("tcp", net.JoinHostPort(c.serverInterface, strconv.Itoa(currentServerPort))) l, err := net.Listen("tcp", net.JoinHostPort(c.serverInterface, strconv.Itoa(currentServerPort)))
if err == nil { if err == nil {
c.serverPorts[i] = serverPortListener{ln: l, p: currentServerPort} c.serverPorts[i] = serverPortListener{ln: l, p: currentServerPort}
@ -842,7 +833,7 @@ func (c *serverCommand) partialReRender(urls ...string) (err error) {
defer func() { defer func() {
c.errState.setWasErr(false) c.errState.setWasErr(false)
}() }()
visited := types.NewEvictingQueue[string](len(urls)) visited := types.NewEvictingStringQueue(len(urls))
for _, url := range urls { for _, url := range urls {
visited.Add(url) visited.Add(url)
} }
@ -854,7 +845,7 @@ func (c *serverCommand) partialReRender(urls ...string) (err error) {
} }
// Note: We do not set NoBuildLock as the file lock is not acquired at this stage. // Note: We do not set NoBuildLock as the file lock is not acquired at this stage.
err = h.Build(hugolib.BuildCfg{NoBuildLock: false, RecentlyTouched: visited, PartialReRender: true, ErrRecovery: c.errState.wasErr()}) err = h.Build(hugolib.BuildCfg{NoBuildLock: false, RecentlyVisited: visited, PartialReRender: true, ErrRecovery: c.errState.wasErr()})
return return
} }
@ -897,16 +888,16 @@ func (c *serverCommand) serve() error {
// To allow the en user to change the error template while the server is running, we use // To allow the en user to change the error template while the server is running, we use
// the freshest template we can provide. // the freshest template we can provide.
var ( var (
errTempl *tplimpl.TemplInfo errTempl tpl.Template
templHandler *tplimpl.TemplateStore templHandler tpl.TemplateHandler
) )
getErrorTemplateAndHandler := func(h *hugolib.HugoSites) (*tplimpl.TemplInfo, *tplimpl.TemplateStore) { getErrorTemplateAndHandler := func(h *hugolib.HugoSites) (tpl.Template, tpl.TemplateHandler) {
if h == nil { if h == nil {
return errTempl, templHandler return errTempl, templHandler
} }
templHandler := h.GetTemplateStore() templHandler := h.Tmpl()
errTempl := templHandler.LookupByPath("/_server/error.html") errTempl, found := templHandler.Lookup("_server/error.html")
if errTempl == nil { if !found {
panic("template server/error.html not found") panic("template server/error.html not found")
} }
return errTempl, templHandler return errTempl, templHandler
@ -1007,13 +998,6 @@ func (c *serverCommand) serve() error {
c.r.Println("Press Ctrl+C to stop") c.r.Println("Press Ctrl+C to stop")
if c.openBrowser {
// There may be more than one baseURL in multihost mode, open the first.
if err := browser.OpenURL(baseURLs[0].String()); err != nil {
c.r.logger.Warnf("Failed to open browser: %s", err)
}
}
err = func() error { err = func() error {
for { for {
select { select {
@ -1234,24 +1218,3 @@ func formatByteCount(b uint64) string {
return fmt.Sprintf("%.1f %cB", return fmt.Sprintf("%.1f %cB",
float64(b)/float64(div), "kMGTPE"[exp]) float64(b)/float64(div), "kMGTPE"[exp])
} }
func canRedirect(requestURIWithoutQuery string, r *http.Request) bool {
if r.Header.Get(hugoHeaderRedirect) != "" {
return false
}
return isNavigation(requestURIWithoutQuery, r)
}
// Sec-Fetch-Mode should be sent by all recent browser versions, see https://developer.mozilla.org/en-US/docs/Web/HTTP/Headers/Sec-Fetch-Mode#navigate
// Fall back to the file extension if not set.
// The main take here is that we don't want to have CSS/JS files etc. partake in this logic.
func isNavigation(requestURIWithoutQuery string, r *http.Request) bool {
return r.Header.Get("Sec-Fetch-Mode") == "navigate" || isPropablyHTMLRequest(requestURIWithoutQuery)
}
func isPropablyHTMLRequest(requestURIWithoutQuery string) bool {
if strings.HasSuffix(requestURIWithoutQuery, "/") || strings.HasSuffix(requestURIWithoutQuery, "html") || strings.HasSuffix(requestURIWithoutQuery, "htm") {
return true
}
return !strings.Contains(requestURIWithoutQuery, ".")
}

View file

@ -117,7 +117,7 @@ func appendToInterfaceSliceFromValues(slice1, slice2 reflect.Value) ([]any, erro
tos = append(tos, nil) tos = append(tos, nil)
continue continue
} }
for i := range slice.Len() { for i := 0; i < slice.Len(); i++ {
tos = append(tos, slice.Index(i).Interface()) tos = append(tos, slice.Index(i).Interface())
} }
} }
@ -128,7 +128,7 @@ func appendToInterfaceSliceFromValues(slice1, slice2 reflect.Value) ([]any, erro
func appendToInterfaceSlice(tov reflect.Value, from ...any) ([]any, error) { func appendToInterfaceSlice(tov reflect.Value, from ...any) ([]any, error) {
var tos []any var tos []any
for i := range tov.Len() { for i := 0; i < tov.Len(); i++ {
tos = append(tos, tov.Index(i).Interface()) tos = append(tos, tov.Index(i).Interface())
} }

View file

@ -15,7 +15,6 @@ package collections
import ( import (
"html/template" "html/template"
"reflect"
"testing" "testing"
qt "github.com/frankban/quicktest" qt "github.com/frankban/quicktest"
@ -78,7 +77,6 @@ func TestAppend(t *testing.T) {
{[]string{"a", "b"}, []any{nil}, []any{"a", "b", nil}}, {[]string{"a", "b"}, []any{nil}, []any{"a", "b", nil}},
{[]string{"a", "b"}, []any{nil, "d", nil}, []any{"a", "b", nil, "d", nil}}, {[]string{"a", "b"}, []any{nil, "d", nil}, []any{"a", "b", nil, "d", nil}},
{[]any{"a", nil, "c"}, []any{"d", nil, "f"}, []any{"a", nil, "c", "d", nil, "f"}}, {[]any{"a", nil, "c"}, []any{"d", nil, "f"}, []any{"a", nil, "c", "d", nil, "f"}},
{[]string{"a", "b"}, []any{}, []string{"a", "b"}},
} { } {
result, err := Append(test.start, test.addend...) result, err := Append(test.start, test.addend...)
@ -148,66 +146,3 @@ func TestAppendShouldMakeACopyOfTheInputSlice(t *testing.T) {
c.Assert(result, qt.DeepEquals, []string{"a", "b", "c"}) c.Assert(result, qt.DeepEquals, []string{"a", "b", "c"})
c.Assert(slice, qt.DeepEquals, []string{"d", "b"}) c.Assert(slice, qt.DeepEquals, []string{"d", "b"})
} }
func TestIndirect(t *testing.T) {
t.Parallel()
c := qt.New(t)
type testStruct struct {
Field string
}
var (
nilPtr *testStruct
nilIface interface{} = nil
nonNilIface interface{} = &testStruct{Field: "hello"}
)
tests := []struct {
name string
input any
wantKind reflect.Kind
wantNil bool
}{
{
name: "nil pointer",
input: nilPtr,
wantKind: reflect.Ptr,
wantNil: true,
},
{
name: "nil interface",
input: nilIface,
wantKind: reflect.Invalid,
wantNil: false,
},
{
name: "non-nil pointer to struct",
input: &testStruct{Field: "abc"},
wantKind: reflect.Struct,
wantNil: false,
},
{
name: "non-nil interface holding pointer",
input: nonNilIface,
wantKind: reflect.Struct,
wantNil: false,
},
{
name: "plain value",
input: testStruct{Field: "xyz"},
wantKind: reflect.Struct,
wantNil: false,
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
v := reflect.ValueOf(tt.input)
got, isNil := indirect(v)
c.Assert(got.Kind(), qt.Equals, tt.wantKind)
c.Assert(isNil, qt.Equals, tt.wantNil)
})
}
}

View file

@ -136,37 +136,3 @@ func TestSortedStringSlice(t *testing.T) {
c.Assert(s.Count("z"), qt.Equals, 0) c.Assert(s.Count("z"), qt.Equals, 0)
c.Assert(s.Count("a"), qt.Equals, 1) c.Assert(s.Count("a"), qt.Equals, 1)
} }
func TestStringSliceToInterfaceSlice(t *testing.T) {
t.Parallel()
c := qt.New(t)
tests := []struct {
name string
in []string
want []any
}{
{
name: "empty slice",
in: []string{},
want: []any{},
},
{
name: "single element",
in: []string{"hello"},
want: []any{"hello"},
},
{
name: "multiple elements",
in: []string{"a", "b", "c"},
want: []any{"a", "b", "c"},
},
}
for _, tt := range tests {
t.Run(tt.name, func(t *testing.T) {
got := StringSliceToInterfaceSlice(tt.in)
c.Assert(got, qt.DeepEquals, tt.want)
})
}
}

View file

@ -13,8 +13,6 @@
package collections package collections
import "slices"
import "sync" import "sync"
// Stack is a simple LIFO stack that is safe for concurrent use. // Stack is a simple LIFO stack that is safe for concurrent use.
@ -75,7 +73,7 @@ func (s *Stack[T]) DrainMatching(predicate func(T) bool) []T {
for i := len(s.items) - 1; i >= 0; i-- { for i := len(s.items) - 1; i >= 0; i-- {
if predicate(s.items[i]) { if predicate(s.items[i]) {
items = append(items, s.items[i]) items = append(items, s.items[i])
s.items = slices.Delete(s.items, i, i+1) s.items = append(s.items[:i], s.items[i+1:]...)
} }
} }
return items return items

View file

@ -1,77 +0,0 @@
package collections
import (
"testing"
qt "github.com/frankban/quicktest"
)
func TestNewStack(t *testing.T) {
t.Parallel()
c := qt.New(t)
s := NewStack[int]()
c.Assert(s, qt.IsNotNil)
}
func TestStackBasic(t *testing.T) {
t.Parallel()
c := qt.New(t)
s := NewStack[int]()
c.Assert(s.Len(), qt.Equals, 0)
s.Push(1)
s.Push(2)
s.Push(3)
c.Assert(s.Len(), qt.Equals, 3)
top, ok := s.Peek()
c.Assert(ok, qt.Equals, true)
c.Assert(top, qt.Equals, 3)
popped, ok := s.Pop()
c.Assert(ok, qt.Equals, true)
c.Assert(popped, qt.Equals, 3)
c.Assert(s.Len(), qt.Equals, 2)
_, _ = s.Pop()
_, _ = s.Pop()
_, ok = s.Pop()
c.Assert(ok, qt.Equals, false)
}
func TestStackDrain(t *testing.T) {
t.Parallel()
c := qt.New(t)
s := NewStack[string]()
s.Push("a")
s.Push("b")
got := s.Drain()
c.Assert(got, qt.DeepEquals, []string{"a", "b"})
c.Assert(s.Len(), qt.Equals, 0)
}
func TestStackDrainMatching(t *testing.T) {
t.Parallel()
c := qt.New(t)
s := NewStack[int]()
s.Push(1)
s.Push(2)
s.Push(3)
s.Push(4)
got := s.DrainMatching(func(v int) bool { return v%2 == 0 })
c.Assert(got, qt.DeepEquals, []int{4, 2})
c.Assert(s.Drain(), qt.DeepEquals, []int{1, 3})
}

View file

@ -23,8 +23,6 @@ const (
WarnFrontMatterParamsOverrides = "warning-frontmatter-params-overrides" WarnFrontMatterParamsOverrides = "warning-frontmatter-params-overrides"
WarnRenderShortcodesInHTML = "warning-rendershortcodes-in-html" WarnRenderShortcodesInHTML = "warning-rendershortcodes-in-html"
WarnGoldmarkRawHTML = "warning-goldmark-raw-html" WarnGoldmarkRawHTML = "warning-goldmark-raw-html"
WarnPartialSuperfluousPrefix = "warning-partial-superfluous-prefix"
WarnHomePageIsLeafBundle = "warning-home-page-is-leaf-bundle"
) )
// Field/method names with special meaning. // Field/method names with special meaning.
@ -43,7 +41,7 @@ const (
ResourceTransformationFingerprint = "fingerprint" ResourceTransformationFingerprint = "fingerprint"
) )
// IsResourceTransformationPermalinkHash returns whether the given name is a resource transformation that changes the permalink based on the content. // IsResourceTransformationLinkChange returns whether the given name is a resource transformation that changes the permalink based on the content.
func IsResourceTransformationPermalinkHash(name string) bool { func IsResourceTransformationPermalinkHash(name string) bool {
return name == ResourceTransformationFingerprint return name == ResourceTransformationFingerprint
} }

View file

@ -38,19 +38,6 @@ func XXHashFromReader(r io.Reader) (uint64, int64, error) {
return h.Sum64(), size, nil return h.Sum64(), size, nil
} }
// XxHashFromReaderHexEncoded calculates the xxHash for the given reader
// and returns the hash as a hex encoded string.
func XxHashFromReaderHexEncoded(r io.Reader) (string, error) {
h := getXxHashReadFrom()
defer putXxHashReadFrom(h)
_, err := io.Copy(h, r)
if err != nil {
return "", err
}
hash := h.Sum(nil)
return hex.EncodeToString(hash), nil
}
// XXHashFromString calculates the xxHash for the given string. // XXHashFromString calculates the xxHash for the given string.
func XXHashFromString(s string) (uint64, error) { func XXHashFromString(s string) (uint64, error) {
h := xxhash.New() h := xxhash.New()
@ -83,13 +70,6 @@ func HashString(vs ...any) string {
return strconv.FormatUint(hash, 10) return strconv.FormatUint(hash, 10)
} }
// HashStringHex returns a hash from the given elements as a hex encoded string.
// See HashString for more information.
func HashStringHex(vs ...any) string {
hash := HashUint64(vs...)
return strconv.FormatUint(hash, 16)
}
var hashOptsPool = sync.Pool{ var hashOptsPool = sync.Pool{
New: func() any { New: func() any {
return &hashstructure.HashOptions{ return &hashstructure.HashOptions{
@ -123,24 +103,16 @@ func HashUint64(vs ...any) uint64 {
o = elements o = elements
} }
hash, err := Hash(o) hashOpts := getHashOpts()
defer putHashOpts(hashOpts)
hash, err := hashstructure.Hash(o, hashOpts)
if err != nil { if err != nil {
panic(err) panic(err)
} }
return hash return hash
} }
// Hash returns a hash from vs.
func Hash(vs ...any) (uint64, error) {
hashOpts := getHashOpts()
defer putHashOpts(hashOpts)
var v any = vs
if len(vs) == 1 {
v = vs[0]
}
return hashstructure.Hash(v, hashOpts)
}
type keyer interface { type keyer interface {
Key() string Key() string
} }

View file

@ -37,12 +37,12 @@ func TestXxHashFromReaderPara(t *testing.T) {
c := qt.New(t) c := qt.New(t)
var wg sync.WaitGroup var wg sync.WaitGroup
for i := range 10 { for i := 0; i < 10; i++ {
i := i i := i
wg.Add(1) wg.Add(1)
go func() { go func() {
defer wg.Done() defer wg.Done()
for j := range 100 { for j := 0; j < 100; j++ {
s := strings.Repeat("Hello ", i+j+1*42) s := strings.Repeat("Hello ", i+j+1*42)
r := strings.NewReader(s) r := strings.NewReader(s)
got, size, err := XXHashFromReader(r) got, size, err := XXHashFromReader(r)
@ -142,16 +142,3 @@ func BenchmarkHashString(b *testing.B) {
}) })
} }
} }
func BenchmarkHashMap(b *testing.B) {
m := map[string]any{}
for i := range 1000 {
m[fmt.Sprintf("key%d", i)] = i
}
b.ResetTimer()
for i := 0; i < b.N; i++ {
HashString(m)
}
}

View file

@ -152,7 +152,10 @@ func locateError(r io.Reader, le FileError, matches LineMatcherFn) *ErrorContext
} }
if ectx.Position.LineNumber > 0 { if ectx.Position.LineNumber > 0 {
low := max(ectx.Position.LineNumber-3, 0) low := ectx.Position.LineNumber - 3
if low < 0 {
low = 0
}
if ectx.Position.LineNumber > 2 { if ectx.Position.LineNumber > 2 {
ectx.LinesPos = 2 ectx.LinesPos = 2
@ -160,7 +163,10 @@ func locateError(r io.Reader, le FileError, matches LineMatcherFn) *ErrorContext
ectx.LinesPos = ectx.Position.LineNumber - 1 ectx.LinesPos = ectx.Position.LineNumber - 1
} }
high := min(ectx.Position.LineNumber+2, len(lines)) high := ectx.Position.LineNumber + 2
if high > len(lines) {
high = len(lines)
}
ectx.Lines = lines[low:high] ectx.Lines = lines[low:high]

View file

@ -133,21 +133,6 @@ func IsNotExist(err error) bool {
return false return false
} }
// IsExist returns true if the error is a file exists error.
// Unlike os.IsExist, this also considers wrapped errors.
func IsExist(err error) bool {
if os.IsExist(err) {
return true
}
// os.IsExist does not consider wrapped errors.
if os.IsExist(errors.Unwrap(err)) {
return true
}
return false
}
var nilPointerErrRe = regexp.MustCompile(`at <(.*)>: error calling (.*?): runtime error: invalid memory address or nil pointer dereference`) var nilPointerErrRe = regexp.MustCompile(`at <(.*)>: error calling (.*?): runtime error: invalid memory address or nil pointer dereference`)
const deferredPrefix = "__hdeferred/" const deferredPrefix = "__hdeferred/"

View file

@ -20,6 +20,8 @@ import (
"io" "io"
"path/filepath" "path/filepath"
godartsassv1 "github.com/bep/godartsass"
"github.com/bep/godartsass/v2" "github.com/bep/godartsass/v2"
"github.com/bep/golibsass/libsass/libsasserrors" "github.com/bep/golibsass/libsass/libsasserrors"
"github.com/gohugoio/hugo/common/paths" "github.com/gohugoio/hugo/common/paths"
@ -151,6 +153,8 @@ func (e *fileError) causeString() string {
// Avoid repeating the file info in the error message. // Avoid repeating the file info in the error message.
case godartsass.SassError: case godartsass.SassError:
return v.Message return v.Message
case godartsassv1.SassError:
return v.Message
case libsasserrors.Error: case libsasserrors.Error:
return v.Message return v.Message
default: default:
@ -258,27 +262,8 @@ func openFile(filename string, fs afero.Fs) (afero.File, string, error) {
return f, realFilename, nil return f, realFilename, nil
} }
// Cause returns the underlying error, that is, // Cause returns the underlying error or itself if it does not implement Unwrap.
// it unwraps errors until it finds one that does not implement
// the Unwrap method.
// For a shallow variant, see Unwrap.
func Cause(err error) error { func Cause(err error) error {
type unwrapper interface {
Unwrap() error
}
for err != nil {
cause, ok := err.(unwrapper)
if !ok {
break
}
err = cause.Unwrap()
}
return err
}
// Unwrap returns the underlying error or itself if it does not implement Unwrap.
func Unwrap(err error) error {
if u := errors.Unwrap(err); u != nil { if u := errors.Unwrap(err); u != nil {
return u return u
} }
@ -286,7 +271,7 @@ func Unwrap(err error) error {
} }
func extractFileTypePos(err error) (string, text.Position) { func extractFileTypePos(err error) (string, text.Position) {
err = Unwrap(err) err = Cause(err)
var fileType string var fileType string
@ -403,7 +388,14 @@ func extractPosition(e error) (pos text.Position) {
case godartsass.SassError: case godartsass.SassError:
span := v.Span span := v.Span
start := span.Start start := span.Start
filename, _ := paths.UrlStringToFilename(span.Url) filename, _ := paths.UrlToFilename(span.Url)
pos.Filename = filename
pos.Offset = start.Offset
pos.ColumnNumber = start.Column
case godartsassv1.SassError:
span := v.Span
start := span.Start
filename, _ := paths.UrlToFilename(span.Url)
pos.Filename = filename pos.Filename = filename
pos.Offset = start.Offset pos.Offset = start.Offset
pos.ColumnNumber = start.Column pos.ColumnNumber = start.Column

View file

@ -26,9 +26,7 @@ import (
"strings" "strings"
"sync" "sync"
"github.com/bep/logg" "github.com/cli/safeexec"
"github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/config" "github.com/gohugoio/hugo/config"
"github.com/gohugoio/hugo/config/security" "github.com/gohugoio/hugo/config/security"
) )
@ -88,7 +86,7 @@ var WithEnviron = func(env []string) func(c *commandeer) {
} }
// New creates a new Exec using the provided security config. // New creates a new Exec using the provided security config.
func New(cfg security.Config, workingDir string, log loggers.Logger) *Exec { func New(cfg security.Config, workingDir string) *Exec {
var baseEnviron []string var baseEnviron []string
for _, v := range os.Environ() { for _, v := range os.Environ() {
k, _ := config.SplitEnvVar(v) k, _ := config.SplitEnvVar(v)
@ -100,9 +98,7 @@ func New(cfg security.Config, workingDir string, log loggers.Logger) *Exec {
return &Exec{ return &Exec{
sc: cfg, sc: cfg,
workingDir: workingDir, workingDir: workingDir,
infol: log.InfoCommand("exec"),
baseEnviron: baseEnviron, baseEnviron: baseEnviron,
newNPXRunnerCache: maps.NewCache[string, func(arg ...any) (Runner, error)](),
} }
} }
@ -112,16 +108,26 @@ func IsNotFound(err error) bool {
return errors.As(err, &notFoundErr) return errors.As(err, &notFoundErr)
} }
// SafeCommand is a wrapper around os/exec Command which uses a LookPath
// implementation that does not search in current directory before looking in PATH.
// See https://github.com/cli/safeexec and the linked issues.
func SafeCommand(name string, arg ...string) (*exec.Cmd, error) {
bin, err := safeexec.LookPath(name)
if err != nil {
return nil, err
}
return exec.Command(bin, arg...), nil
}
// Exec enforces a security policy for commands run via os/exec. // Exec enforces a security policy for commands run via os/exec.
type Exec struct { type Exec struct {
sc security.Config sc security.Config
workingDir string workingDir string
infol logg.LevelLogger
// os.Environ filtered by the Exec.OsEnviron whitelist filter. // os.Environ filtered by the Exec.OsEnviron whitelist filter.
baseEnviron []string baseEnviron []string
newNPXRunnerCache *maps.Cache[string, func(arg ...any) (Runner, error)]
npxInit sync.Once npxInit sync.Once
npxAvailable bool npxAvailable bool
} }
@ -149,86 +155,25 @@ func (e *Exec) new(name string, fullyQualifiedName string, arg ...any) (Runner,
return cm.command(arg...) return cm.command(arg...)
} }
type binaryLocation int
func (b binaryLocation) String() string {
switch b {
case binaryLocationNodeModules:
return "node_modules/.bin"
case binaryLocationNpx:
return "npx"
case binaryLocationPath:
return "PATH"
}
return "unknown"
}
const (
binaryLocationNodeModules binaryLocation = iota + 1
binaryLocationNpx
binaryLocationPath
)
// Npx will in order: // Npx will in order:
// 1. Try fo find the binary in the WORKINGDIR/node_modules/.bin directory. // 1. Try fo find the binary in the WORKINGDIR/node_modules/.bin directory.
// 2. If not found, and npx is available, run npx --no-install <name> <args>. // 2. If not found, and npx is available, run npx --no-install <name> <args>.
// 3. Fall back to the PATH. // 3. Fall back to the PATH.
// If name is "tailwindcss", we will try the PATH as the second option.
func (e *Exec) Npx(name string, arg ...any) (Runner, error) { func (e *Exec) Npx(name string, arg ...any) (Runner, error) {
if err := e.sc.CheckAllowedExec(name); err != nil { // npx is slow, so first try the common case.
return nil, err
}
newRunner, err := e.newNPXRunnerCache.GetOrCreate(name, func() (func(...any) (Runner, error), error) {
type tryFunc func() func(...any) (Runner, error)
tryFuncs := map[binaryLocation]tryFunc{
binaryLocationNodeModules: func() func(...any) (Runner, error) {
nodeBinFilename := filepath.Join(e.workingDir, nodeModulesBinPath, name) nodeBinFilename := filepath.Join(e.workingDir, nodeModulesBinPath, name)
_, err := exec.LookPath(nodeBinFilename) _, err := safeexec.LookPath(nodeBinFilename)
if err != nil { if err == nil {
return nil return e.new(name, nodeBinFilename, arg...)
} }
return func(arg2 ...any) (Runner, error) {
return e.new(name, nodeBinFilename, arg2...)
}
},
binaryLocationNpx: func() func(...any) (Runner, error) {
e.checkNpx() e.checkNpx()
if !e.npxAvailable { if e.npxAvailable {
return nil r, err := e.npx(name, arg...)
} if err == nil {
return func(arg2 ...any) (Runner, error) { return r, nil
return e.npx(name, arg2...)
}
},
binaryLocationPath: func() func(...any) (Runner, error) {
if _, err := exec.LookPath(name); err != nil {
return nil
}
return func(arg2 ...any) (Runner, error) {
return e.New(name, arg2...)
}
},
}
locations := []binaryLocation{binaryLocationNodeModules, binaryLocationNpx, binaryLocationPath}
if name == "tailwindcss" {
// See https://github.com/gohugoio/hugo/issues/13221#issuecomment-2574801253
locations = []binaryLocation{binaryLocationNodeModules, binaryLocationPath, binaryLocationNpx}
}
for _, loc := range locations {
if f := tryFuncs[loc](); f != nil {
e.infol.Logf("resolve %q using %s", name, loc)
return f, nil
} }
} }
return nil, &NotFoundError{name: name, method: fmt.Sprintf("in %s", locations[len(locations)-1])} return e.New(name, arg...)
})
if err != nil {
return nil, err
}
return newRunner(arg...)
} }
const ( const (
@ -333,7 +278,7 @@ func (c *commandeer) command(arg ...any) (*cmdWrapper, error) {
bin = c.fullyQualifiedName bin = c.fullyQualifiedName
} else { } else {
var err error var err error
bin, err = exec.LookPath(c.name) bin, err = safeexec.LookPath(c.name)
if err != nil { if err != nil {
return nil, &NotFoundError{ return nil, &NotFoundError{
name: c.name, name: c.name,
@ -371,7 +316,7 @@ func InPath(binaryName string) bool {
if strings.Contains(binaryName, "/") { if strings.Contains(binaryName, "/") {
panic("binary name should not contain any slash") panic("binary name should not contain any slash")
} }
_, err := exec.LookPath(binaryName) _, err := safeexec.LookPath(binaryName)
return err == nil return err == nil
} }
@ -381,7 +326,7 @@ func LookPath(binaryName string) string {
if strings.Contains(binaryName, "/") { if strings.Contains(binaryName, "/") {
panic("binary name should not contain any slash") panic("binary name should not contain any slash")
} }
s, err := exec.LookPath(binaryName) s, err := safeexec.LookPath(binaryName)
if err != nil { if err != nil {
return "" return ""
} }

View file

@ -74,16 +74,6 @@ func IsTruthful(in any) bool {
} }
} }
// IsMap reports whether v is a map.
func IsMap(v any) bool {
return reflect.ValueOf(v).Kind() == reflect.Map
}
// IsSlice reports whether v is a slice.
func IsSlice(v any) bool {
return reflect.ValueOf(v).Kind() == reflect.Slice
}
var zeroType = reflect.TypeOf((*types.Zeroer)(nil)).Elem() var zeroType = reflect.TypeOf((*types.Zeroer)(nil)).Elem()
// IsTruthfulValue returns whether the given value has a meaningful truth value. // IsTruthfulValue returns whether the given value has a meaningful truth value.
@ -134,7 +124,12 @@ type methodKey struct {
name string name string
} }
var methodCache sync.Map type methods struct {
sync.RWMutex
cache map[methodKey]int
}
var methodCache = &methods{cache: make(map[methodKey]int)}
// GetMethodByName is the same as reflect.Value.MethodByName, but it caches the // GetMethodByName is the same as reflect.Value.MethodByName, but it caches the
// type lookup. // type lookup.
@ -152,16 +147,22 @@ func GetMethodByName(v reflect.Value, name string) reflect.Value {
// -1 if no such method exists. // -1 if no such method exists.
func GetMethodIndexByName(tp reflect.Type, name string) int { func GetMethodIndexByName(tp reflect.Type, name string) int {
k := methodKey{tp, name} k := methodKey{tp, name}
v, found := methodCache.Load(k) methodCache.RLock()
index, found := methodCache.cache[k]
methodCache.RUnlock()
if found { if found {
return v.(int) return index
} }
methodCache.Lock()
defer methodCache.Unlock()
m, ok := tp.MethodByName(name) m, ok := tp.MethodByName(name)
index := m.Index index = m.Index
if !ok { if !ok {
index = -1 index = -1
} }
methodCache.Store(k, index) methodCache.cache[k] = index
if !ok { if !ok {
return -1 return -1
@ -222,27 +223,6 @@ func AsTime(v reflect.Value, loc *time.Location) (time.Time, bool) {
return time.Time{}, false return time.Time{}, false
} }
// ToSliceAny converts the given value to a slice of any if possible.
func ToSliceAny(v any) ([]any, bool) {
if v == nil {
return nil, false
}
switch vv := v.(type) {
case []any:
return vv, true
default:
vvv := reflect.ValueOf(v)
if vvv.Kind() == reflect.Slice {
out := make([]any, vvv.Len())
for i := range vvv.Len() {
out[i] = vvv.Index(i).Interface()
}
return out, true
}
}
return nil, false
}
func CallMethodByName(cxt context.Context, name string, v reflect.Value) []reflect.Value { func CallMethodByName(cxt context.Context, name string, v reflect.Value) []reflect.Value {
fn := v.MethodByName(name) fn := v.MethodByName(name)
var args []reflect.Value var args []reflect.Value

View file

@ -50,19 +50,6 @@ func TestIsContextType(t *testing.T) {
c.Assert(IsContextType(reflect.TypeOf(valueCtx)), qt.IsTrue) c.Assert(IsContextType(reflect.TypeOf(valueCtx)), qt.IsTrue)
} }
func TestToSliceAny(t *testing.T) {
c := qt.New(t)
checkOK := func(in any, expected []any) {
out, ok := ToSliceAny(in)
c.Assert(ok, qt.Equals, true)
c.Assert(out, qt.DeepEquals, expected)
}
checkOK([]any{1, 2, 3}, []any{1, 2, 3})
checkOK([]int{1, 2, 3}, []any{1, 2, 3})
}
func BenchmarkIsContextType(b *testing.B) { func BenchmarkIsContextType(b *testing.B) {
type k string type k string
b.Run("value", func(b *testing.B) { b.Run("value", func(b *testing.B) {
@ -134,17 +121,3 @@ func BenchmarkGetMethodByName(b *testing.B) {
} }
} }
} }
func BenchmarkGetMethodByNamePara(b *testing.B) {
v := reflect.ValueOf(&testStruct{})
methods := []string{"Method1", "Method2", "Method3", "Method4", "Method5"}
b.ResetTimer()
b.RunParallel(func(pb *testing.PB) {
for pb.Next() {
for _, method := range methods {
_ = GetMethodByName(v, method)
}
}
})
}

View file

@ -16,7 +16,6 @@ package hstrings
import ( import (
"fmt" "fmt"
"regexp" "regexp"
"slices"
"strings" "strings"
"sync" "sync"
@ -51,7 +50,12 @@ func (s StringEqualFold) Eq(s2 any) bool {
// EqualAny returns whether a string is equal to any of the given strings. // EqualAny returns whether a string is equal to any of the given strings.
func EqualAny(a string, b ...string) bool { func EqualAny(a string, b ...string) bool {
return slices.Contains(b, a) for _, s := range b {
if a == s {
return true
}
}
return false
} }
// regexpCache represents a cache of regexp objects protected by a mutex. // regexpCache represents a cache of regexp objects protected by a mutex.
@ -99,7 +103,12 @@ func GetOrCompileRegexp(pattern string) (re *regexp.Regexp, err error) {
// InSlice checks if a string is an element of a slice of strings // InSlice checks if a string is an element of a slice of strings
// and returns a boolean value. // and returns a boolean value.
func InSlice(arr []string, el string) bool { func InSlice(arr []string, el string) bool {
return slices.Contains(arr, el) for _, v := range arr {
if v == el {
return true
}
}
return false
} }
// InSlicEqualFold checks if a string is an element of a slice of strings // InSlicEqualFold checks if a string is an element of a slice of strings
@ -128,7 +137,7 @@ func ToString(v any) (string, bool) {
return "", false return "", false
} }
type ( type Tuple struct {
Strings2 [2]string First string
Strings3 [3]string Second string
) }

View file

@ -46,18 +46,18 @@ func TestHasBytesWriter(t *testing.T) {
return strings.Repeat("ab cfo", r.Intn(33)) return strings.Repeat("ab cfo", r.Intn(33))
} }
for range 22 { for i := 0; i < 22; i++ {
h, w := neww() h, w := neww()
fmt.Fprint(w, rndStr()+"abc __foobar"+rndStr()) fmt.Fprintf(w, rndStr()+"abc __foobar"+rndStr())
c.Assert(h.Patterns[0].Match, qt.Equals, true) c.Assert(h.Patterns[0].Match, qt.Equals, true)
h, w = neww() h, w = neww()
fmt.Fprint(w, rndStr()+"abc __f") fmt.Fprintf(w, rndStr()+"abc __f")
fmt.Fprint(w, "oo bar"+rndStr()) fmt.Fprintf(w, "oo bar"+rndStr())
c.Assert(h.Patterns[0].Match, qt.Equals, true) c.Assert(h.Patterns[0].Match, qt.Equals, true)
h, w = neww() h, w = neww()
fmt.Fprint(w, rndStr()+"abc __moo bar") fmt.Fprintf(w, rndStr()+"abc __moo bar")
c.Assert(h.Patterns[0].Match, qt.Equals, false) c.Assert(h.Patterns[0].Match, qt.Equals, false)
} }

View file

@ -74,13 +74,13 @@ type StringReader interface {
ReadString() string ReadString() string
} }
// NewReadSeekerNoOpCloserFromBytes uses bytes.NewReader to create a new ReadSeekerNoOpCloser // NewReadSeekerNoOpCloserFromString uses strings.NewReader to create a new ReadSeekerNoOpCloser
// from the given bytes slice. // from the given bytes slice.
func NewReadSeekerNoOpCloserFromBytes(content []byte) readSeekerNopCloser { func NewReadSeekerNoOpCloserFromBytes(content []byte) readSeekerNopCloser {
return readSeekerNopCloser{bytes.NewReader(content)} return readSeekerNopCloser{bytes.NewReader(content)}
} }
// NewOpenReadSeekCloser creates a new ReadSeekCloser from the given ReadSeeker. // NewReadSeekCloser creates a new ReadSeekCloser from the given ReadSeeker.
// The ReadSeeker will be seeked to the beginning before returned. // The ReadSeeker will be seeked to the beginning before returned.
func NewOpenReadSeekCloser(r ReadSeekCloser) OpenReadSeekCloser { func NewOpenReadSeekCloser(r ReadSeekCloser) OpenReadSeekCloser {
return func() (ReadSeekCloser, error) { return func() (ReadSeekCloser, error) {

View file

@ -25,13 +25,14 @@ import (
"sync" "sync"
"time" "time"
godartsassv1 "github.com/bep/godartsass"
"github.com/bep/logg" "github.com/bep/logg"
"github.com/mitchellh/mapstructure"
"github.com/bep/godartsass/v2" "github.com/bep/godartsass/v2"
"github.com/gohugoio/hugo/common/hcontext" "github.com/gohugoio/hugo/common/hcontext"
"github.com/gohugoio/hugo/common/hexec" "github.com/gohugoio/hugo/common/hexec"
"github.com/gohugoio/hugo/common/loggers" "github.com/gohugoio/hugo/common/loggers"
"github.com/gohugoio/hugo/common/maps"
"github.com/gohugoio/hugo/hugofs/files" "github.com/gohugoio/hugo/hugofs/files"
"github.com/spf13/afero" "github.com/spf13/afero"
@ -54,8 +55,6 @@ var (
vendorInfo string vendorInfo string
) )
var _ maps.StoreProvider = (*HugoInfo)(nil)
// HugoInfo contains information about the current Hugo environment // HugoInfo contains information about the current Hugo environment
type HugoInfo struct { type HugoInfo struct {
CommitHash string CommitHash string
@ -73,8 +72,6 @@ type HugoInfo struct {
conf ConfigProvider conf ConfigProvider
deps []*Dependency deps []*Dependency
store *maps.Scratch
// Context gives access to some of the context scoped variables. // Context gives access to some of the context scoped variables.
Context Context Context Context
} }
@ -119,10 +116,6 @@ func (i HugoInfo) Deps() []*Dependency {
return i.deps return i.deps
} }
func (i HugoInfo) Store() *maps.Scratch {
return i.store
}
// Deprecated: Use hugo.IsMultihost instead. // Deprecated: Use hugo.IsMultihost instead.
func (i HugoInfo) IsMultiHost() bool { func (i HugoInfo) IsMultiHost() bool {
Deprecate("hugo.IsMultiHost", "Use hugo.IsMultihost instead.", "v0.124.0") Deprecate("hugo.IsMultiHost", "Use hugo.IsMultihost instead.", "v0.124.0")
@ -139,13 +132,9 @@ func (i HugoInfo) IsMultilingual() bool {
return i.conf.IsMultilingual() return i.conf.IsMultilingual()
} }
type contextKey uint8 type contextKey string
const ( var markupScope = hcontext.NewContextDispatcher[string](contextKey("markupScope"))
contextKeyMarkupScope contextKey = iota
)
var markupScope = hcontext.NewContextDispatcher[string](contextKeyMarkupScope)
type Context struct{} type Context struct{}
@ -196,7 +185,6 @@ func NewInfo(conf ConfigProvider, deps []*Dependency) HugoInfo {
Environment: conf.Environment(), Environment: conf.Environment(),
conf: conf, conf: conf,
deps: deps, deps: deps,
store: maps.NewScratch(),
GoVersion: goVersion, GoVersion: goVersion,
} }
} }
@ -320,7 +308,7 @@ func GetDependencyListNonGo() []string {
if dartSass := dartSassVersion(); dartSass.ProtocolVersion != "" { if dartSass := dartSassVersion(); dartSass.ProtocolVersion != "" {
dartSassPath := "github.com/sass/dart-sass-embedded" dartSassPath := "github.com/sass/dart-sass-embedded"
if IsDartSassGeV2() { if IsDartSassV2() {
dartSassPath = "github.com/sass/dart-sass" dartSassPath = "github.com/sass/dart-sass"
} }
deps = append(deps, deps = append(deps,
@ -367,15 +355,22 @@ type Dependency struct {
} }
func dartSassVersion() godartsass.DartSassVersion { func dartSassVersion() godartsass.DartSassVersion {
if DartSassBinaryName == "" || !IsDartSassGeV2() { if DartSassBinaryName == "" {
return godartsass.DartSassVersion{} return godartsass.DartSassVersion{}
} }
if IsDartSassV2() {
v, _ := godartsass.Version(DartSassBinaryName) v, _ := godartsass.Version(DartSassBinaryName)
return v return v
}
v, _ := godartsassv1.Version(DartSassBinaryName)
var vv godartsass.DartSassVersion
mapstructure.WeakDecode(v, &vv)
return vv
} }
// DartSassBinaryName is the name of the Dart Sass binary to use. // DartSassBinaryName is the name of the Dart Sass binary to use.
// TODO(bep) find a better place for this. // TODO(beop) find a better place for this.
var DartSassBinaryName string var DartSassBinaryName string
func init() { func init() {
@ -400,10 +395,7 @@ var (
dartSassBinaryNamesV2 = []string{"dart-sass", "sass"} dartSassBinaryNamesV2 = []string{"dart-sass", "sass"}
) )
// TODO(bep) we eventually want to remove this, but keep it for a while to throw an informative error. func IsDartSassV2() bool {
// We stopped supporting the old binary in Hugo 0.139.0.
func IsDartSassGeV2() bool {
// dart-sass-embedded was the first version of the embedded Dart Sass before it was moved into the main project.
return !strings.Contains(DartSassBinaryName, "embedded") return !strings.Contains(DartSassBinaryName, "embedded")
} }
@ -415,39 +407,22 @@ func IsDartSassGeV2() bool {
// 2. Their theme to work for at least the last few Hugo versions. // 2. Their theme to work for at least the last few Hugo versions.
func Deprecate(item, alternative string, version string) { func Deprecate(item, alternative string, version string) {
level := deprecationLogLevelFromVersion(version) level := deprecationLogLevelFromVersion(version)
deprecateLevel(item, alternative, version, level) DeprecateLevel(item, alternative, version, level)
}
// See Deprecate for details.
func DeprecateWithLogger(item, alternative string, version string, log logg.Logger) {
level := deprecationLogLevelFromVersion(version)
deprecateLevelWithLogger(item, alternative, version, level, log)
}
// DeprecateLevelMin informs about a deprecation starting at the given version, but with a minimum log level.
func DeprecateLevelMin(item, alternative string, version string, minLevel logg.Level) {
level := max(deprecationLogLevelFromVersion(version), minLevel)
deprecateLevel(item, alternative, version, level)
}
// deprecateLevel informs about a deprecation logging at the given level.
func deprecateLevel(item, alternative, version string, level logg.Level) {
deprecateLevelWithLogger(item, alternative, version, level, loggers.Log().Logger())
} }
// DeprecateLevel informs about a deprecation logging at the given level. // DeprecateLevel informs about a deprecation logging at the given level.
func deprecateLevelWithLogger(item, alternative, version string, level logg.Level, log logg.Logger) { func DeprecateLevel(item, alternative, version string, level logg.Level) {
var msg string var msg string
if level == logg.LevelError { if level == logg.LevelError {
msg = fmt.Sprintf("%s was deprecated in Hugo %s and subsequently removed. %s", item, version, alternative) msg = fmt.Sprintf("%s was deprecated in Hugo %s and will be removed in Hugo %s. %s", item, version, CurrentVersion.Next().ReleaseVersion(), alternative)
} else { } else {
msg = fmt.Sprintf("%s was deprecated in Hugo %s and will be removed in a future release. %s", item, version, alternative) msg = fmt.Sprintf("%s was deprecated in Hugo %s and will be removed in a future release. %s", item, version, alternative)
} }
log.WithLevel(level).WithField(loggers.FieldNameCmd, "deprecated").Logf("%s", msg) loggers.Log().Logger().WithLevel(level).WithField(loggers.FieldNameCmd, "deprecated").Logf(msg)
} }
// We usually do about one minor version a month. // We ususally do about one minor version a month.
// We want people to run at least the current and previous version without any warnings. // We want people to run at least the current and previous version without any warnings.
// We want people who don't update Hugo that often to see the warnings and errors before we remove the feature. // We want people who don't update Hugo that often to see the warnings and errors before we remove the feature.
func deprecationLogLevelFromVersion(ver string) logg.Level { func deprecationLogLevelFromVersion(ver string) logg.Level {
@ -455,11 +430,11 @@ func deprecationLogLevelFromVersion(ver string) logg.Level {
to := CurrentVersion to := CurrentVersion
minorDiff := to.Minor - from.Minor minorDiff := to.Minor - from.Minor
switch { switch {
case minorDiff >= 15: case minorDiff >= 12:
// Start failing the build after about 15 months. // Start failing the build after about a year.
return logg.LevelError return logg.LevelError
case minorDiff >= 3: case minorDiff >= 6:
// Start printing warnings after about 3 months. // Start printing warnings after about six months.
return logg.LevelWarn return logg.LevelWarn
default: default:
return logg.LevelInfo return logg.LevelInfo

View file

@ -57,16 +57,12 @@ func TestDeprecationLogLevelFromVersion(t *testing.T) {
c.Assert(deprecationLogLevelFromVersion("0.55.0"), qt.Equals, logg.LevelError) c.Assert(deprecationLogLevelFromVersion("0.55.0"), qt.Equals, logg.LevelError)
ver := CurrentVersion ver := CurrentVersion
c.Assert(deprecationLogLevelFromVersion(ver.String()), qt.Equals, logg.LevelInfo) c.Assert(deprecationLogLevelFromVersion(ver.String()), qt.Equals, logg.LevelInfo)
ver.Minor -= 3 ver.Minor -= 1
c.Assert(deprecationLogLevelFromVersion(ver.String()), qt.Equals, logg.LevelInfo)
ver.Minor -= 6
c.Assert(deprecationLogLevelFromVersion(ver.String()), qt.Equals, logg.LevelWarn) c.Assert(deprecationLogLevelFromVersion(ver.String()), qt.Equals, logg.LevelWarn)
ver.Minor -= 4 ver.Minor -= 6
c.Assert(deprecationLogLevelFromVersion(ver.String()), qt.Equals, logg.LevelWarn)
ver.Minor -= 13
c.Assert(deprecationLogLevelFromVersion(ver.String()), qt.Equals, logg.LevelError) c.Assert(deprecationLogLevelFromVersion(ver.String()), qt.Equals, logg.LevelError)
// Added just to find the threshold for where we can remove deprecated items.
// Subtract 5 from the minor version of the first ERRORed version => 0.122.0.
c.Assert(deprecationLogLevelFromVersion("0.127.0"), qt.Equals, logg.LevelError)
} }
func TestMarkupScope(t *testing.T) { func TestMarkupScope(t *testing.T) {

View file

@ -12,6 +12,7 @@
// limitations under the License. // limitations under the License.
//go:build extended //go:build extended
// +build extended
package hugo package hugo

View file

@ -12,6 +12,7 @@
// limitations under the License. // limitations under the License.
//go:build !extended //go:build !extended
// +build !extended
package hugo package hugo

View file

@ -12,6 +12,7 @@
// limitations under the License. // limitations under the License.
//go:build withdeploy //go:build withdeploy
// +build withdeploy
package hugo package hugo

View file

@ -12,6 +12,7 @@
// limitations under the License. // limitations under the License.
//go:build !withdeploy //go:build !withdeploy
// +build !withdeploy
package hugo package hugo

View file

@ -17,7 +17,7 @@ package hugo
// This should be the only one. // This should be the only one.
var CurrentVersion = Version{ var CurrentVersion = Version{
Major: 0, Major: 0,
Minor: 148, Minor: 137,
PatchLevel: 0, PatchLevel: 1,
Suffix: "-DEV", Suffix: "",
} }

View file

@ -18,19 +18,18 @@ package loggers
import ( import (
"fmt" "fmt"
"io" "io"
"regexp"
"strings" "strings"
"sync" "sync"
"github.com/bep/logg" "github.com/bep/logg"
) )
// newNoAnsiEscapeHandler creates a new noAnsiEscapeHandler // newNoColoursHandler creates a new NoColoursHandler
func newNoAnsiEscapeHandler(outWriter, errWriter io.Writer, noLevelPrefix bool, predicate func(*logg.Entry) bool) *noAnsiEscapeHandler { func newNoColoursHandler(outWriter, errWriter io.Writer, noLevelPrefix bool, predicate func(*logg.Entry) bool) *noColoursHandler {
if predicate == nil { if predicate == nil {
predicate = func(e *logg.Entry) bool { return true } predicate = func(e *logg.Entry) bool { return true }
} }
return &noAnsiEscapeHandler{ return &noColoursHandler{
noLevelPrefix: noLevelPrefix, noLevelPrefix: noLevelPrefix,
outWriter: outWriter, outWriter: outWriter,
errWriter: errWriter, errWriter: errWriter,
@ -38,15 +37,15 @@ func newNoAnsiEscapeHandler(outWriter, errWriter io.Writer, noLevelPrefix bool,
} }
} }
type noAnsiEscapeHandler struct { type noColoursHandler struct {
mu sync.Mutex mu sync.Mutex
outWriter io.Writer outWriter io.Writer // Defaults to os.Stdout.
errWriter io.Writer errWriter io.Writer // Defaults to os.Stderr.
predicate func(*logg.Entry) bool predicate func(*logg.Entry) bool
noLevelPrefix bool noLevelPrefix bool
} }
func (h *noAnsiEscapeHandler) HandleLog(e *logg.Entry) error { func (h *noColoursHandler) HandleLog(e *logg.Entry) error {
if !h.predicate(e) { if !h.predicate(e) {
return nil return nil
} }
@ -72,12 +71,10 @@ func (h *noAnsiEscapeHandler) HandleLog(e *logg.Entry) error {
prefix = prefix + ": " prefix = prefix + ": "
} }
msg := stripANSI(e.Message)
if h.noLevelPrefix { if h.noLevelPrefix {
fmt.Fprintf(w, "%s%s", prefix, msg) fmt.Fprintf(w, "%s%s", prefix, e.Message)
} else { } else {
fmt.Fprintf(w, "%s %s%s", levelString[e.Level], prefix, msg) fmt.Fprintf(w, "%s %s%s", levelString[e.Level], prefix, e.Message)
} }
for _, field := range e.Fields { for _, field := range e.Fields {
@ -91,10 +88,3 @@ func (h *noAnsiEscapeHandler) HandleLog(e *logg.Entry) error {
return nil return nil
} }
var ansiRe = regexp.MustCompile(`\x1b\[[0-9;]*m`)
// stripANSI removes ANSI escape codes from s.
func stripANSI(s string) string {
return ansiRe.ReplaceAllString(s, "")
}

View file

@ -1,40 +0,0 @@
// Copyright 2024 The Hugo Authors. All rights reserved.
// Some functions in this file (see comments) is based on the Go source code,
// copyright The Go Authors and governed by a BSD-style license.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package loggers
import (
"bytes"
"testing"
"github.com/bep/logg"
qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/common/terminal"
)
func TestNoAnsiEscapeHandler(t *testing.T) {
c := qt.New(t)
test := func(s string) {
c.Assert(stripANSI(terminal.Notice(s)), qt.Equals, s)
}
test(`error in "file.md:1:2"`)
var buf bytes.Buffer
h := newNoAnsiEscapeHandler(&buf, &buf, false, nil)
h.HandleLog(&logg.Entry{Message: terminal.Notice(`error in "file.md:1:2"`), Level: logg.LevelInfo})
c.Assert(buf.String(), qt.Equals, "INFO error in \"file.md:1:2\"\n")
}

View file

@ -38,8 +38,8 @@ var (
// Options defines options for the logger. // Options defines options for the logger.
type Options struct { type Options struct {
Level logg.Level Level logg.Level
StdOut io.Writer Stdout io.Writer
StdErr io.Writer Stderr io.Writer
DistinctLevel logg.Level DistinctLevel logg.Level
StoreErrors bool StoreErrors bool
HandlerPost func(e *logg.Entry) error HandlerPost func(e *logg.Entry) error
@ -48,22 +48,21 @@ type Options struct {
// New creates a new logger with the given options. // New creates a new logger with the given options.
func New(opts Options) Logger { func New(opts Options) Logger {
if opts.StdOut == nil { if opts.Stdout == nil {
opts.StdOut = os.Stdout opts.Stdout = os.Stdout
} }
if opts.StdErr == nil { if opts.Stderr == nil {
opts.StdErr = os.Stderr opts.Stderr = os.Stdout
} }
if opts.Level == 0 { if opts.Level == 0 {
opts.Level = logg.LevelWarn opts.Level = logg.LevelWarn
} }
var logHandler logg.Handler var logHandler logg.Handler
if terminal.PrintANSIColors(os.Stderr) { if terminal.PrintANSIColors(os.Stdout) {
logHandler = newDefaultHandler(opts.StdErr, opts.StdErr) logHandler = newDefaultHandler(opts.Stdout, opts.Stderr)
} else { } else {
logHandler = newNoAnsiEscapeHandler(opts.StdErr, opts.StdErr, false, nil) logHandler = newNoColoursHandler(opts.Stdout, opts.Stderr, false, nil)
} }
errorsw := &strings.Builder{} errorsw := &strings.Builder{}
@ -96,7 +95,7 @@ func New(opts Options) Logger {
} }
if opts.StoreErrors { if opts.StoreErrors {
h := newNoAnsiEscapeHandler(io.Discard, errorsw, true, func(e *logg.Entry) bool { h := newNoColoursHandler(io.Discard, errorsw, true, func(e *logg.Entry) bool {
return e.Level >= logg.LevelError return e.Level >= logg.LevelError
}) })
@ -138,8 +137,7 @@ func New(opts Options) Logger {
logCounters: logCounters, logCounters: logCounters,
errors: errorsw, errors: errorsw,
reset: reset, reset: reset,
stdOut: opts.StdOut, out: opts.Stdout,
stdErr: opts.StdErr,
level: opts.Level, level: opts.Level,
logger: logger, logger: logger,
tracel: l.WithLevel(logg.LevelTrace), tracel: l.WithLevel(logg.LevelTrace),
@ -155,6 +153,8 @@ func NewDefault() Logger {
opts := Options{ opts := Options{
DistinctLevel: logg.LevelWarn, DistinctLevel: logg.LevelWarn,
Level: logg.LevelWarn, Level: logg.LevelWarn,
Stdout: os.Stdout,
Stderr: os.Stdout,
} }
return New(opts) return New(opts)
} }
@ -163,6 +163,8 @@ func NewTrace() Logger {
opts := Options{ opts := Options{
DistinctLevel: logg.LevelWarn, DistinctLevel: logg.LevelWarn,
Level: logg.LevelTrace, Level: logg.LevelTrace,
Stdout: os.Stdout,
Stderr: os.Stdout,
} }
return New(opts) return New(opts)
} }
@ -187,8 +189,7 @@ type Logger interface {
Level() logg.Level Level() logg.Level
LoggCount(logg.Level) int LoggCount(logg.Level) int
Logger() logg.Logger Logger() logg.Logger
StdOut() io.Writer Out() io.Writer
StdErr() io.Writer
Printf(format string, v ...any) Printf(format string, v ...any)
Println(v ...any) Println(v ...any)
PrintTimerIfDelayed(start time.Time, name string) PrintTimerIfDelayed(start time.Time, name string)
@ -206,8 +207,7 @@ type logAdapter struct {
logCounters *logLevelCounter logCounters *logLevelCounter
errors *strings.Builder errors *strings.Builder
reset func() reset func()
stdOut io.Writer out io.Writer
stdErr io.Writer
level logg.Level level logg.Level
logger logg.Logger logger logg.Logger
tracel logg.LevelLogger tracel logg.LevelLogger
@ -259,12 +259,8 @@ func (l *logAdapter) Logger() logg.Logger {
return l.logger return l.logger
} }
func (l *logAdapter) StdOut() io.Writer { func (l *logAdapter) Out() io.Writer {
return l.stdOut return l.out
}
func (l *logAdapter) StdErr() io.Writer {
return l.stdErr
} }
// PrintTimerIfDelayed prints a time statement to the FEEDBACK logger // PrintTimerIfDelayed prints a time statement to the FEEDBACK logger
@ -275,7 +271,7 @@ func (l *logAdapter) PrintTimerIfDelayed(start time.Time, name string) {
if milli < 500 { if milli < 500 {
return return
} }
fmt.Fprintf(l.stdErr, "%s in %v ms", name, milli) l.Printf("%s in %v ms", name, milli)
} }
func (l *logAdapter) Printf(format string, v ...any) { func (l *logAdapter) Printf(format string, v ...any) {
@ -283,11 +279,11 @@ func (l *logAdapter) Printf(format string, v ...any) {
if !strings.HasSuffix(format, "\n") { if !strings.HasSuffix(format, "\n") {
format += "\n" format += "\n"
} }
fmt.Fprintf(l.stdOut, format, v...) fmt.Fprintf(l.out, format, v...)
} }
func (l *logAdapter) Println(v ...any) { func (l *logAdapter) Println(v ...any) {
fmt.Fprintln(l.stdOut, v...) fmt.Fprintln(l.out, v...)
} }
func (l *logAdapter) Reset() { func (l *logAdapter) Reset() {

View file

@ -31,13 +31,13 @@ func TestLogDistinct(t *testing.T) {
opts := loggers.Options{ opts := loggers.Options{
DistinctLevel: logg.LevelWarn, DistinctLevel: logg.LevelWarn,
StoreErrors: true, StoreErrors: true,
StdOut: io.Discard, Stdout: io.Discard,
StdErr: io.Discard, Stderr: io.Discard,
} }
l := loggers.New(opts) l := loggers.New(opts)
for range 10 { for i := 0; i < 10; i++ {
l.Errorln("error 1") l.Errorln("error 1")
l.Errorln("error 2") l.Errorln("error 2")
l.Warnln("warn 1") l.Warnln("warn 1")
@ -54,8 +54,8 @@ func TestHookLast(t *testing.T) {
HandlerPost: func(e *logg.Entry) error { HandlerPost: func(e *logg.Entry) error {
panic(e.Message) panic(e.Message)
}, },
StdOut: io.Discard, Stdout: io.Discard,
StdErr: io.Discard, Stderr: io.Discard,
} }
l := loggers.New(opts) l := loggers.New(opts)
@ -70,8 +70,8 @@ func TestOptionStoreErrors(t *testing.T) {
opts := loggers.Options{ opts := loggers.Options{
StoreErrors: true, StoreErrors: true,
StdErr: &sb, Stderr: &sb,
StdOut: &sb, Stdout: &sb,
} }
l := loggers.New(opts) l := loggers.New(opts)
@ -131,13 +131,13 @@ func TestReset(t *testing.T) {
opts := loggers.Options{ opts := loggers.Options{
StoreErrors: true, StoreErrors: true,
DistinctLevel: logg.LevelWarn, DistinctLevel: logg.LevelWarn,
StdOut: io.Discard, Stdout: io.Discard,
StdErr: io.Discard, Stderr: io.Discard,
} }
l := loggers.New(opts) l := loggers.New(opts)
for range 3 { for i := 0; i < 3; i++ {
l.Errorln("error 1") l.Errorln("error 1")
l.Errorln("error 2") l.Errorln("error 2")
l.Errorln("error 1") l.Errorln("error 1")

View file

@ -21,15 +21,7 @@ import (
"github.com/bep/logg" "github.com/bep/logg"
) )
// SetGlobalLogger sets the global logger. func InitGlobalLogger(level logg.Level, panicOnWarnings bool) {
// This is used in a few places in Hugo, e.g. deprecated functions.
func SetGlobalLogger(logger Logger) {
logMu.Lock()
defer logMu.Unlock()
log = logger
}
func initGlobalLogger(level logg.Level, panicOnWarnings bool) {
logMu.Lock() logMu.Lock()
defer logMu.Unlock() defer logMu.Unlock()
var logHookLast func(e *logg.Entry) error var logHookLast func(e *logg.Entry) error
@ -58,5 +50,5 @@ func Log() Logger {
var log Logger var log Logger
func init() { func init() {
initGlobalLogger(logg.LevelWarn, false) InitGlobalLogger(logg.LevelWarn, false)
} }

View file

@ -13,14 +13,11 @@
package maps package maps
import ( import "sync"
"sync"
)
// Cache is a simple thread safe cache backed by a map. // Cache is a simple thread safe cache backed by a map.
type Cache[K comparable, T any] struct { type Cache[K comparable, T any] struct {
m map[K]T m map[K]T
hasBeenInitialized bool
sync.RWMutex sync.RWMutex
} }
@ -37,13 +34,8 @@ func (c *Cache[K, T]) Get(key K) (T, bool) {
return zero, false return zero, false
} }
c.RLock() c.RLock()
v, found := c.get(key)
c.RUnlock()
return v, found
}
func (c *Cache[K, T]) get(key K) (T, bool) {
v, found := c.m[key] v, found := c.m[key]
c.RUnlock()
return v, found return v, found
} }
@ -69,77 +61,19 @@ func (c *Cache[K, T]) GetOrCreate(key K, create func() (T, error)) (T, error) {
return v, nil return v, nil
} }
// Contains returns whether the given key exists in the cache.
func (c *Cache[K, T]) Contains(key K) bool {
c.RLock()
_, found := c.m[key]
c.RUnlock()
return found
}
// InitAndGet initializes the cache if not already done and returns the value for the given key.
// The init state will be reset on Reset or Drain.
func (c *Cache[K, T]) InitAndGet(key K, init func(get func(key K) (T, bool), set func(key K, value T)) error) (T, error) {
var v T
c.RLock()
if !c.hasBeenInitialized {
c.RUnlock()
if err := func() error {
c.Lock()
defer c.Unlock()
// Double check in case another goroutine has initialized it in the meantime.
if !c.hasBeenInitialized {
err := init(c.get, c.set)
if err != nil {
return err
}
c.hasBeenInitialized = true
}
return nil
}(); err != nil {
return v, err
}
// Reacquire the read lock.
c.RLock()
}
v = c.m[key]
c.RUnlock()
return v, nil
}
// Set sets the given key to the given value. // Set sets the given key to the given value.
func (c *Cache[K, T]) Set(key K, value T) { func (c *Cache[K, T]) Set(key K, value T) {
c.Lock() c.Lock()
c.set(key, value) c.m[key] = value
c.Unlock() c.Unlock()
} }
// SetIfAbsent sets the given key to the given value if the key does not already exist in the cache.
func (c *Cache[K, T]) SetIfAbsent(key K, value T) {
c.RLock()
if _, found := c.get(key); !found {
c.RUnlock()
c.Set(key, value)
} else {
c.RUnlock()
}
}
func (c *Cache[K, T]) set(key K, value T) {
c.m[key] = value
}
// ForEeach calls the given function for each key/value pair in the cache. // ForEeach calls the given function for each key/value pair in the cache.
// If the function returns false, the iteration stops. func (c *Cache[K, T]) ForEeach(f func(K, T)) {
func (c *Cache[K, T]) ForEeach(f func(K, T) bool) {
c.RLock() c.RLock()
defer c.RUnlock() defer c.RUnlock()
for k, v := range c.m { for k, v := range c.m {
if !f(k, v) { f(k, v)
return
}
} }
} }
@ -147,7 +81,6 @@ func (c *Cache[K, T]) Drain() map[K]T {
c.Lock() c.Lock()
m := c.m m := c.m
c.m = make(map[K]T) c.m = make(map[K]T)
c.hasBeenInitialized = false
c.Unlock() c.Unlock()
return m return m
} }
@ -160,8 +93,7 @@ func (c *Cache[K, T]) Len() int {
func (c *Cache[K, T]) Reset() { func (c *Cache[K, T]) Reset() {
c.Lock() c.Lock()
clear(c.m) c.m = make(map[K]T)
c.hasBeenInitialized = false
c.Unlock() c.Unlock()
} }

View file

@ -73,14 +73,10 @@ func TestPrepareParams(t *testing.T) {
for i, test := range tests { for i, test := range tests {
t.Run(fmt.Sprint(i), func(t *testing.T) { t.Run(fmt.Sprint(i), func(t *testing.T) {
// PrepareParams modifies input. // PrepareParams modifies input.
prepareClone := PrepareParamsClone(test.input)
PrepareParams(test.input) PrepareParams(test.input)
if !reflect.DeepEqual(test.expected, test.input) { if !reflect.DeepEqual(test.expected, test.input) {
t.Errorf("[%d] Expected\n%#v, got\n%#v\n", i, test.expected, test.input) t.Errorf("[%d] Expected\n%#v, got\n%#v\n", i, test.expected, test.input)
} }
if !reflect.DeepEqual(test.expected, prepareClone) {
t.Errorf("[%d] Expected\n%#v, got\n%#v\n", i, test.expected, prepareClone)
}
}) })
} }
} }

View file

@ -1,144 +0,0 @@
// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package maps
import (
"slices"
"github.com/gohugoio/hugo/common/hashing"
)
// Ordered is a map that can be iterated in the order of insertion.
// Note that insertion order is not affected if a key is re-inserted into the map.
// In a nil map, all operations are no-ops.
// This is not thread safe.
type Ordered[K comparable, T any] struct {
// The keys in the order they were added.
keys []K
// The values.
values map[K]T
}
// NewOrdered creates a new Ordered map.
func NewOrdered[K comparable, T any]() *Ordered[K, T] {
return &Ordered[K, T]{values: make(map[K]T)}
}
// Set sets the value for the given key.
// Note that insertion order is not affected if a key is re-inserted into the map.
func (m *Ordered[K, T]) Set(key K, value T) {
if m == nil {
return
}
// Check if key already exists.
if _, found := m.values[key]; !found {
m.keys = append(m.keys, key)
}
m.values[key] = value
}
// Get gets the value for the given key.
func (m *Ordered[K, T]) Get(key K) (T, bool) {
if m == nil {
var v T
return v, false
}
value, found := m.values[key]
return value, found
}
// Has returns whether the given key exists in the map.
func (m *Ordered[K, T]) Has(key K) bool {
if m == nil {
return false
}
_, found := m.values[key]
return found
}
// Delete deletes the value for the given key.
func (m *Ordered[K, T]) Delete(key K) {
if m == nil {
return
}
delete(m.values, key)
for i, k := range m.keys {
if k == key {
m.keys = slices.Delete(m.keys, i, i+1)
break
}
}
}
// Clone creates a shallow copy of the map.
func (m *Ordered[K, T]) Clone() *Ordered[K, T] {
if m == nil {
return nil
}
clone := NewOrdered[K, T]()
for _, k := range m.keys {
clone.Set(k, m.values[k])
}
return clone
}
// Keys returns the keys in the order they were added.
func (m *Ordered[K, T]) Keys() []K {
if m == nil {
return nil
}
return m.keys
}
// Values returns the values in the order they were added.
func (m *Ordered[K, T]) Values() []T {
if m == nil {
return nil
}
var values []T
for _, k := range m.keys {
values = append(values, m.values[k])
}
return values
}
// Len returns the number of items in the map.
func (m *Ordered[K, T]) Len() int {
if m == nil {
return 0
}
return len(m.keys)
}
// Range calls f sequentially for each key and value present in the map.
// If f returns false, range stops the iteration.
// TODO(bep) replace with iter.Seq2 when we bump go Go 1.24.
func (m *Ordered[K, T]) Range(f func(key K, value T) bool) {
if m == nil {
return
}
for _, k := range m.keys {
if !f(k, m.values[k]) {
return
}
}
}
// Hash calculates a hash from the values.
func (m *Ordered[K, T]) Hash() (uint64, error) {
if m == nil {
return 0, nil
}
return hashing.Hash(m.values)
}

View file

@ -1,99 +0,0 @@
// Copyright 2024 The Hugo Authors. All rights reserved.
//
// Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License.
// You may obtain a copy of the License at
// http://www.apache.org/licenses/LICENSE-2.0
//
// Unless required by applicable law or agreed to in writing, software
// distributed under the License is distributed on an "AS IS" BASIS,
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
// See the License for the specific language governing permissions and
// limitations under the License.
package maps
import (
"testing"
qt "github.com/frankban/quicktest"
)
func TestOrdered(t *testing.T) {
c := qt.New(t)
m := NewOrdered[string, int]()
m.Set("a", 1)
m.Set("b", 2)
m.Set("c", 3)
c.Assert(m.Keys(), qt.DeepEquals, []string{"a", "b", "c"})
c.Assert(m.Values(), qt.DeepEquals, []int{1, 2, 3})
v, found := m.Get("b")
c.Assert(found, qt.Equals, true)
c.Assert(v, qt.Equals, 2)
m.Set("b", 22)
c.Assert(m.Keys(), qt.DeepEquals, []string{"a", "b", "c"})
c.Assert(m.Values(), qt.DeepEquals, []int{1, 22, 3})
m.Delete("b")
c.Assert(m.Keys(), qt.DeepEquals, []string{"a", "c"})
c.Assert(m.Values(), qt.DeepEquals, []int{1, 3})
}
func TestOrderedHash(t *testing.T) {
c := qt.New(t)
m := NewOrdered[string, int]()
m.Set("a", 1)
m.Set("b", 2)
m.Set("c", 3)
h1, err := m.Hash()
c.Assert(err, qt.IsNil)
m.Set("d", 4)
h2, err := m.Hash()
c.Assert(err, qt.IsNil)
c.Assert(h1, qt.Not(qt.Equals), h2)
m = NewOrdered[string, int]()
m.Set("b", 2)
m.Set("a", 1)
m.Set("c", 3)
h3, err := m.Hash()
c.Assert(err, qt.IsNil)
// Order does not matter.
c.Assert(h1, qt.Equals, h3)
}
func TestOrderedNil(t *testing.T) {
c := qt.New(t)
var m *Ordered[string, int]
m.Set("a", 1)
c.Assert(m.Keys(), qt.IsNil)
c.Assert(m.Values(), qt.IsNil)
v, found := m.Get("a")
c.Assert(found, qt.Equals, false)
c.Assert(v, qt.Equals, 0)
m.Delete("a")
var b bool
m.Range(func(k string, v int) bool {
b = true
return true
})
c.Assert(b, qt.Equals, false)
c.Assert(m.Len(), qt.Equals, 0)
c.Assert(m.Clone(), qt.IsNil)
h, err := m.Hash()
c.Assert(err, qt.IsNil)
c.Assert(h, qt.Equals, uint64(0))
}

View file

@ -303,7 +303,7 @@ func toMergeStrategy(v any) ParamsMergeStrategy {
} }
// PrepareParams // PrepareParams
// * makes all the keys in the given map lower cased and will do so recursively. // * makes all the keys in the given map lower cased and will do so
// * This will modify the map given. // * This will modify the map given.
// * Any nested map[interface{}]interface{}, map[string]interface{},map[string]string will be converted to Params. // * Any nested map[interface{}]interface{}, map[string]interface{},map[string]string will be converted to Params.
// * Any _merge value will be converted to proper type and value. // * Any _merge value will be converted to proper type and value.
@ -343,42 +343,3 @@ func PrepareParams(m Params) {
} }
} }
} }
// PrepareParamsClone is like PrepareParams, but it does not modify the input.
func PrepareParamsClone(m Params) Params {
m2 := make(Params)
for k, v := range m {
var retyped bool
lKey := strings.ToLower(k)
if lKey == MergeStrategyKey {
v = toMergeStrategy(v)
retyped = true
} else {
switch vv := v.(type) {
case map[any]any:
var p Params = cast.ToStringMap(v)
v = PrepareParamsClone(p)
retyped = true
case map[string]any:
var p Params = v.(map[string]any)
v = PrepareParamsClone(p)
retyped = true
case map[string]string:
p := make(Params)
for k, v := range vv {
p[k] = v
}
v = p
PrepareParams(p)
retyped = true
}
}
if retyped || k != lKey {
m2[lKey] = v
} else {
m2[k] = v
}
}
return m2
}

View file

@ -22,18 +22,31 @@ import (
"github.com/gohugoio/hugo/common/math" "github.com/gohugoio/hugo/common/math"
) )
type StoreProvider interface { // Scratch is a writable context used for stateful operations in Page/Node rendering.
// Store returns a Scratch that can be used to store temporary state.
// Store is not reset on server rebuilds.
Store() *Scratch
}
// Scratch is a writable context used for stateful build operations
type Scratch struct { type Scratch struct {
values map[string]any values map[string]any
mu sync.RWMutex mu sync.RWMutex
} }
// Scratcher provides a scratching service.
type Scratcher interface {
// Scratch returns a "scratch pad" that can be used to store state.
Scratch() *Scratch
}
type scratcher struct {
s *Scratch
}
func (s scratcher) Scratch() *Scratch {
return s.s
}
// NewScratcher creates a new Scratcher.
func NewScratcher() Scratcher {
return scratcher{s: NewScratch()}
}
// Add will, for single values, add (using the + operator) the addend to the existing addend (if found). // Add will, for single values, add (using the + operator) the addend to the existing addend (if found).
// Supports numeric values and strings. // Supports numeric values and strings.
// //

View file

@ -140,7 +140,7 @@ func TestScratchInParallel(t *testing.T) {
for i := 1; i <= 10; i++ { for i := 1; i <= 10; i++ {
wg.Add(1) wg.Add(1)
go func(j int) { go func(j int) {
for k := range 10 { for k := 0; k < 10; k++ {
newVal := int64(k + j) newVal := int64(k + j)
_, err := scratch.Add(key, newVal) _, err := scratch.Add(key, newVal)
@ -185,7 +185,7 @@ func TestScratchSetInMap(t *testing.T) {
scratch.SetInMap("key", "zyx", "Zyx") scratch.SetInMap("key", "zyx", "Zyx")
scratch.SetInMap("key", "abc", "Abc (updated)") scratch.SetInMap("key", "abc", "Abc (updated)")
scratch.SetInMap("key", "def", "Def") scratch.SetInMap("key", "def", "Def")
c.Assert(scratch.GetSortedMapValues("key"), qt.DeepEquals, any([]any{"Abc (updated)", "Def", "Lux", "Zyx"})) c.Assert(scratch.GetSortedMapValues("key"), qt.DeepEquals, []any{0: "Abc (updated)", 1: "Def", 2: "Lux", 3: "Zyx"})
} }
func TestScratchDeleteInMap(t *testing.T) { func TestScratchDeleteInMap(t *testing.T) {
@ -199,7 +199,7 @@ func TestScratchDeleteInMap(t *testing.T) {
scratch.DeleteInMap("key", "abc") scratch.DeleteInMap("key", "abc")
scratch.SetInMap("key", "def", "Def") scratch.SetInMap("key", "def", "Def")
scratch.DeleteInMap("key", "lmn") // Do nothing scratch.DeleteInMap("key", "lmn") // Do nothing
c.Assert(scratch.GetSortedMapValues("key"), qt.DeepEquals, any([]any{"Def", "Lux", "Zyx"})) c.Assert(scratch.GetSortedMapValues("key"), qt.DeepEquals, []any{0: "Def", 1: "Lux", 2: "Zyx"})
} }
func TestScratchGetSortedMapValues(t *testing.T) { func TestScratchGetSortedMapValues(t *testing.T) {

View file

@ -26,32 +26,29 @@ func DoArithmetic(a, b any, op rune) (any, error) {
var ai, bi int64 var ai, bi int64
var af, bf float64 var af, bf float64
var au, bu uint64 var au, bu uint64
var isInt, isFloat, isUint bool
switch av.Kind() { switch av.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
ai = av.Int() ai = av.Int()
switch bv.Kind() { switch bv.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
isInt = true
bi = bv.Int() bi = bv.Int()
case reflect.Float32, reflect.Float64: case reflect.Float32, reflect.Float64:
isFloat = true
af = float64(ai) // may overflow af = float64(ai) // may overflow
ai = 0
bf = bv.Float() bf = bv.Float()
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
bu = bv.Uint() bu = bv.Uint()
if ai >= 0 { if ai >= 0 {
isUint = true
au = uint64(ai) au = uint64(ai)
ai = 0
} else { } else {
isInt = true
bi = int64(bu) // may overflow bi = int64(bu) // may overflow
bu = 0
} }
default: default:
return nil, errors.New("can't apply the operator to the values") return nil, errors.New("can't apply the operator to the values")
} }
case reflect.Float32, reflect.Float64: case reflect.Float32, reflect.Float64:
isFloat = true
af = av.Float() af = av.Float()
switch bv.Kind() { switch bv.Kind() {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
@ -69,18 +66,17 @@ func DoArithmetic(a, b any, op rune) (any, error) {
case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64: case reflect.Int, reflect.Int8, reflect.Int16, reflect.Int32, reflect.Int64:
bi = bv.Int() bi = bv.Int()
if bi >= 0 { if bi >= 0 {
isUint = true
bu = uint64(bi) bu = uint64(bi)
bi = 0
} else { } else {
isInt = true
ai = int64(au) // may overflow ai = int64(au) // may overflow
au = 0
} }
case reflect.Float32, reflect.Float64: case reflect.Float32, reflect.Float64:
isFloat = true
af = float64(au) // may overflow af = float64(au) // may overflow
au = 0
bf = bv.Float() bf = bv.Float()
case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64: case reflect.Uint, reflect.Uint8, reflect.Uint16, reflect.Uint32, reflect.Uint64:
isUint = true
bu = bv.Uint() bu = bv.Uint()
default: default:
return nil, errors.New("can't apply the operator to the values") return nil, errors.New("can't apply the operator to the values")
@ -98,32 +94,38 @@ func DoArithmetic(a, b any, op rune) (any, error) {
switch op { switch op {
case '+': case '+':
if isInt { if ai != 0 || bi != 0 {
return ai + bi, nil return ai + bi, nil
} else if isFloat { } else if af != 0 || bf != 0 {
return af + bf, nil return af + bf, nil
} } else if au != 0 || bu != 0 {
return au + bu, nil return au + bu, nil
}
return 0, nil
case '-': case '-':
if isInt { if ai != 0 || bi != 0 {
return ai - bi, nil return ai - bi, nil
} else if isFloat { } else if af != 0 || bf != 0 {
return af - bf, nil return af - bf, nil
} } else if au != 0 || bu != 0 {
return au - bu, nil return au - bu, nil
case '*':
if isInt {
return ai * bi, nil
} else if isFloat {
return af * bf, nil
} }
return 0, nil
case '*':
if ai != 0 || bi != 0 {
return ai * bi, nil
} else if af != 0 || bf != 0 {
return af * bf, nil
} else if au != 0 || bu != 0 {
return au * bu, nil return au * bu, nil
}
return 0, nil
case '/': case '/':
if isInt && bi != 0 { if bi != 0 {
return ai / bi, nil return ai / bi, nil
} else if isFloat && bf != 0 { } else if bf != 0 {
return af / bf, nil return af / bf, nil
} else if isUint && bu != 0 { } else if bu != 0 {
return au / bu, nil return au / bu, nil
} }
return nil, errors.New("can't divide the value by 0") return nil, errors.New("can't divide the value by 0")

View file

@ -30,12 +30,10 @@ func TestDoArithmetic(t *testing.T) {
expect any expect any
}{ }{
{3, 2, '+', int64(5)}, {3, 2, '+', int64(5)},
{0, 0, '+', int64(0)},
{3, 2, '-', int64(1)}, {3, 2, '-', int64(1)},
{3, 2, '*', int64(6)}, {3, 2, '*', int64(6)},
{3, 2, '/', int64(1)}, {3, 2, '/', int64(1)},
{3.0, 2, '+', float64(5)}, {3.0, 2, '+', float64(5)},
{0.0, 0, '+', float64(0.0)},
{3.0, 2, '-', float64(1)}, {3.0, 2, '-', float64(1)},
{3.0, 2, '*', float64(6)}, {3.0, 2, '*', float64(6)},
{3.0, 2, '/', float64(1.5)}, {3.0, 2, '/', float64(1.5)},
@ -44,22 +42,18 @@ func TestDoArithmetic(t *testing.T) {
{3, 2.0, '*', float64(6)}, {3, 2.0, '*', float64(6)},
{3, 2.0, '/', float64(1.5)}, {3, 2.0, '/', float64(1.5)},
{3.0, 2.0, '+', float64(5)}, {3.0, 2.0, '+', float64(5)},
{0.0, 0.0, '+', float64(0.0)},
{3.0, 2.0, '-', float64(1)}, {3.0, 2.0, '-', float64(1)},
{3.0, 2.0, '*', float64(6)}, {3.0, 2.0, '*', float64(6)},
{3.0, 2.0, '/', float64(1.5)}, {3.0, 2.0, '/', float64(1.5)},
{uint(3), uint(2), '+', uint64(5)}, {uint(3), uint(2), '+', uint64(5)},
{uint(0), uint(0), '+', uint64(0)},
{uint(3), uint(2), '-', uint64(1)}, {uint(3), uint(2), '-', uint64(1)},
{uint(3), uint(2), '*', uint64(6)}, {uint(3), uint(2), '*', uint64(6)},
{uint(3), uint(2), '/', uint64(1)}, {uint(3), uint(2), '/', uint64(1)},
{uint(3), 2, '+', uint64(5)}, {uint(3), 2, '+', uint64(5)},
{uint(0), 0, '+', uint64(0)},
{uint(3), 2, '-', uint64(1)}, {uint(3), 2, '-', uint64(1)},
{uint(3), 2, '*', uint64(6)}, {uint(3), 2, '*', uint64(6)},
{uint(3), 2, '/', uint64(1)}, {uint(3), 2, '/', uint64(1)},
{3, uint(2), '+', uint64(5)}, {3, uint(2), '+', uint64(5)},
{0, uint(0), '+', uint64(0)},
{3, uint(2), '-', uint64(1)}, {3, uint(2), '-', uint64(1)},
{3, uint(2), '*', uint64(6)}, {3, uint(2), '*', uint64(6)},
{3, uint(2), '/', uint64(1)}, {3, uint(2), '/', uint64(1)},
@ -72,15 +66,16 @@ func TestDoArithmetic(t *testing.T) {
{-3, uint(2), '*', int64(-6)}, {-3, uint(2), '*', int64(-6)},
{-3, uint(2), '/', int64(-1)}, {-3, uint(2), '/', int64(-1)},
{uint(3), 2.0, '+', float64(5)}, {uint(3), 2.0, '+', float64(5)},
{uint(0), 0.0, '+', float64(0)},
{uint(3), 2.0, '-', float64(1)}, {uint(3), 2.0, '-', float64(1)},
{uint(3), 2.0, '*', float64(6)}, {uint(3), 2.0, '*', float64(6)},
{uint(3), 2.0, '/', float64(1.5)}, {uint(3), 2.0, '/', float64(1.5)},
{3.0, uint(2), '+', float64(5)}, {3.0, uint(2), '+', float64(5)},
{0.0, uint(0), '+', float64(0)},
{3.0, uint(2), '-', float64(1)}, {3.0, uint(2), '-', float64(1)},
{3.0, uint(2), '*', float64(6)}, {3.0, uint(2), '*', float64(6)},
{3.0, uint(2), '/', float64(1.5)}, {3.0, uint(2), '/', float64(1.5)},
{0, 0, '+', 0},
{0, 0, '-', 0},
{0, 0, '*', 0},
{"foo", "bar", '+', "foobar"}, {"foo", "bar", '+', "foobar"},
{3, 0, '/', false}, {3, 0, '/', false},
{3.0, 0, '/', false}, {3.0, 0, '/', false},

View file

@ -42,7 +42,7 @@ func TestPara(t *testing.T) {
c.Run("Order", func(c *qt.C) { c.Run("Order", func(c *qt.C) {
n := 500 n := 500
ints := make([]int, n) ints := make([]int, n)
for i := range n { for i := 0; i < n; i++ {
ints[i] = i ints[i] = i
} }
@ -51,7 +51,7 @@ func TestPara(t *testing.T) {
var result []int var result []int
var mu sync.Mutex var mu sync.Mutex
for i := range n { for i := 0; i < n; i++ {
i := i i := i
r.Run(func() error { r.Run(func() error {
mu.Lock() mu.Lock()
@ -78,7 +78,7 @@ func TestPara(t *testing.T) {
var counter int64 var counter int64
for range n { for i := 0; i < n; i++ {
r.Run(func() error { r.Run(func() error {
atomic.AddInt64(&counter, 1) atomic.AddInt64(&counter, 1)
time.Sleep(1 * time.Millisecond) time.Sleep(1 * time.Millisecond)

View file

@ -23,11 +23,6 @@ import (
"github.com/gohugoio/hugo/common/types" "github.com/gohugoio/hugo/common/types"
"github.com/gohugoio/hugo/hugofs/files" "github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/identity" "github.com/gohugoio/hugo/identity"
"github.com/gohugoio/hugo/resources/kinds"
)
const (
identifierBaseof = "baseof"
) )
// PathParser parses a path into a Path. // PathParser parses a path into a Path.
@ -38,10 +33,6 @@ type PathParser struct {
// Reports whether the given language is disabled. // Reports whether the given language is disabled.
IsLangDisabled func(string) bool IsLangDisabled func(string) bool
// IsOutputFormat reports whether the given name is a valid output format.
// The second argument is optional.
IsOutputFormat func(name, ext string) bool
// Reports whether the given ext is a content file. // Reports whether the given ext is a content file.
IsContentExt func(string) bool IsContentExt func(string) bool
} }
@ -92,10 +83,13 @@ func (pp *PathParser) Parse(c, s string) *Path {
} }
func (pp *PathParser) newPath(component string) *Path { func (pp *PathParser) newPath(component string) *Path {
p := &Path{} return &Path{
p.reset() component: component,
p.component = component posContainerLow: -1,
return p posContainerHigh: -1,
posSectionHigh: -1,
posIdentifierLanguage: -1,
}
} }
func (pp *PathParser) parse(component, s string) (*Path, error) { func (pp *PathParser) parse(component, s string) (*Path, error) {
@ -120,101 +114,10 @@ func (pp *PathParser) parse(component, s string) (*Path, error) {
return p, nil return p, nil
} }
func (pp *PathParser) parseIdentifier(component, s string, p *Path, i, lastDot, numDots int, isLast bool) {
if p.posContainerHigh != -1 {
return
}
mayHaveLang := numDots > 1 && p.posIdentifierLanguage == -1 && pp.LanguageIndex != nil
mayHaveLang = mayHaveLang && (component == files.ComponentFolderContent || component == files.ComponentFolderLayouts)
mayHaveOutputFormat := component == files.ComponentFolderLayouts
mayHaveKind := p.posIdentifierKind == -1 && mayHaveOutputFormat
var mayHaveLayout bool
if p.pathType == TypeShortcode {
mayHaveLayout = !isLast && component == files.ComponentFolderLayouts
} else {
mayHaveLayout = component == files.ComponentFolderLayouts
}
var found bool
var high int
if len(p.identifiersKnown) > 0 {
high = lastDot
} else {
high = len(p.s)
}
id := types.LowHigh[string]{Low: i + 1, High: high}
sid := p.s[id.Low:id.High]
if len(p.identifiersKnown) == 0 {
// The first is always the extension.
p.identifiersKnown = append(p.identifiersKnown, id)
found = true
// May also be the output format.
if mayHaveOutputFormat && pp.IsOutputFormat(sid, "") {
p.posIdentifierOutputFormat = 0
}
} else {
var langFound bool
if mayHaveLang {
var disabled bool
_, langFound = pp.LanguageIndex[sid]
if !langFound {
disabled = pp.IsLangDisabled != nil && pp.IsLangDisabled(sid)
if disabled {
p.disabled = true
langFound = true
}
}
found = langFound
if langFound {
p.identifiersKnown = append(p.identifiersKnown, id)
p.posIdentifierLanguage = len(p.identifiersKnown) - 1
}
}
if !found && mayHaveOutputFormat {
// At this point we may already have resolved an output format,
// but we need to keep looking for a more specific one, e.g. amp before html.
// Use both name and extension to prevent
// false positives on the form css.html.
if pp.IsOutputFormat(sid, p.Ext()) {
found = true
p.identifiersKnown = append(p.identifiersKnown, id)
p.posIdentifierOutputFormat = len(p.identifiersKnown) - 1
}
}
if !found && mayHaveKind {
if kinds.GetKindMain(sid) != "" {
found = true
p.identifiersKnown = append(p.identifiersKnown, id)
p.posIdentifierKind = len(p.identifiersKnown) - 1
}
}
if !found && sid == identifierBaseof {
found = true
p.identifiersKnown = append(p.identifiersKnown, id)
p.posIdentifierBaseof = len(p.identifiersKnown) - 1
}
if !found && mayHaveLayout {
p.identifiersKnown = append(p.identifiersKnown, id)
p.posIdentifierLayout = len(p.identifiersKnown) - 1
found = true
}
if !found {
p.identifiersUnknown = append(p.identifiersUnknown, id)
}
}
}
func (pp *PathParser) doParse(component, s string, p *Path) (*Path, error) { func (pp *PathParser) doParse(component, s string, p *Path) (*Path, error) {
hasLang := pp.LanguageIndex != nil
hasLang = hasLang && (component == files.ComponentFolderContent || component == files.ComponentFolderLayouts)
if runtime.GOOS == "windows" { if runtime.GOOS == "windows" {
s = path.Clean(filepath.ToSlash(s)) s = path.Clean(filepath.ToSlash(s))
if s == "." { if s == "." {
@ -237,26 +140,46 @@ func (pp *PathParser) doParse(component, s string, p *Path) (*Path, error) {
p.s = s p.s = s
slashCount := 0 slashCount := 0
lastDot := 0
lastSlashIdx := strings.LastIndex(s, "/")
numDots := strings.Count(s[lastSlashIdx+1:], ".")
if strings.Contains(s, "/_shortcodes/") {
p.pathType = TypeShortcode
}
for i := len(s) - 1; i >= 0; i-- { for i := len(s) - 1; i >= 0; i-- {
c := s[i] c := s[i]
switch c { switch c {
case '.': case '.':
pp.parseIdentifier(component, s, p, i, lastDot, numDots, false) if p.posContainerHigh == -1 {
lastDot = i var high int
if len(p.identifiers) > 0 {
high = p.identifiers[len(p.identifiers)-1].Low - 1
} else {
high = len(p.s)
}
id := types.LowHigh[string]{Low: i + 1, High: high}
if len(p.identifiers) == 0 {
p.identifiers = append(p.identifiers, id)
} else if len(p.identifiers) == 1 {
// Check for a valid language.
s := p.s[id.Low:id.High]
if hasLang {
var disabled bool
_, langFound := pp.LanguageIndex[s]
if !langFound {
disabled = pp.IsLangDisabled != nil && pp.IsLangDisabled(s)
if disabled {
p.disabled = true
langFound = true
}
}
if langFound {
p.posIdentifierLanguage = 1
p.identifiers = append(p.identifiers, id)
}
}
}
}
case '/': case '/':
slashCount++ slashCount++
if p.posContainerHigh == -1 { if p.posContainerHigh == -1 {
if lastDot > 0 {
pp.parseIdentifier(component, s, p, i, lastDot, numDots, true)
}
p.posContainerHigh = i + 1 p.posContainerHigh = i + 1
} else if p.posContainerLow == -1 { } else if p.posContainerLow == -1 {
p.posContainerLow = i + 1 p.posContainerLow = i + 1
@ -267,52 +190,26 @@ func (pp *PathParser) doParse(component, s string, p *Path) (*Path, error) {
} }
} }
if len(p.identifiersKnown) > 0 { if len(p.identifiers) > 0 {
isContentComponent := p.component == files.ComponentFolderContent || p.component == files.ComponentFolderArchetypes isContentComponent := p.component == files.ComponentFolderContent || p.component == files.ComponentFolderArchetypes
isContent := isContentComponent && pp.IsContentExt(p.Ext()) isContent := isContentComponent && pp.IsContentExt(p.Ext())
id := p.identifiersKnown[len(p.identifiersKnown)-1] id := p.identifiers[len(p.identifiers)-1]
if id.Low > p.posContainerHigh {
b := p.s[p.posContainerHigh : id.Low-1] b := p.s[p.posContainerHigh : id.Low-1]
if isContent { if isContent {
switch b { switch b {
case "index": case "index":
p.pathType = TypeLeaf p.bundleType = PathTypeLeaf
case "_index": case "_index":
p.pathType = TypeBranch p.bundleType = PathTypeBranch
default: default:
p.pathType = TypeContentSingle p.bundleType = PathTypeContentSingle
} }
if slashCount == 2 && p.IsLeafBundle() { if slashCount == 2 && p.IsLeafBundle() {
p.posSectionHigh = 0 p.posSectionHigh = 0
} }
} else if b == files.NameContentData && files.IsContentDataExt(p.Ext()) { } else if b == files.NameContentData && files.IsContentDataExt(p.Ext()) {
p.pathType = TypeContentData p.bundleType = PathTypeContentData
}
}
}
if p.pathType < TypeMarkup && component == files.ComponentFolderLayouts {
if p.posIdentifierBaseof != -1 {
p.pathType = TypeBaseof
} else {
pth := p.Path()
if strings.Contains(pth, "/_shortcodes/") {
p.pathType = TypeShortcode
} else if strings.Contains(pth, "/_markup/") {
p.pathType = TypeMarkup
} else if strings.HasPrefix(pth, "/_partials/") {
p.pathType = TypePartial
}
}
}
if p.pathType == TypeShortcode && p.posIdentifierLayout != -1 {
id := p.identifiersKnown[p.posIdentifierLayout]
if id.Low == p.posContainerHigh {
// First identifier is shortcode name.
p.posIdentifierLayout = -1
} }
} }
@ -321,44 +218,35 @@ func (pp *PathParser) doParse(component, s string, p *Path) (*Path, error) {
func ModifyPathBundleTypeResource(p *Path) { func ModifyPathBundleTypeResource(p *Path) {
if p.IsContent() { if p.IsContent() {
p.pathType = TypeContentResource p.bundleType = PathTypeContentResource
} else { } else {
p.pathType = TypeFile p.bundleType = PathTypeFile
} }
} }
//go:generate stringer -type Type type PathType int
type Type int
const ( const (
// A generic resource, e.g. a JSON file. // A generic resource, e.g. a JSON file.
TypeFile Type = iota PathTypeFile PathType = iota
// All below are content files. // All below are content files.
// A resource of a content type with front matter. // A resource of a content type with front matter.
TypeContentResource PathTypeContentResource
// E.g. /blog/my-post.md // E.g. /blog/my-post.md
TypeContentSingle PathTypeContentSingle
// All below are bundled content files. // All below are bundled content files.
// Leaf bundles, e.g. /blog/my-post/index.md // Leaf bundles, e.g. /blog/my-post/index.md
TypeLeaf PathTypeLeaf
// Branch bundles, e.g. /blog/_index.md // Branch bundles, e.g. /blog/_index.md
TypeBranch PathTypeBranch
// Content data file, _content.gotmpl. // Content data file, _content.gotmpl.
TypeContentData PathTypeContentData
// Layout types.
TypeMarkup
TypeShortcode
TypePartial
TypeBaseof
) )
type Path struct { type Path struct {
@ -370,16 +258,11 @@ type Path struct {
posSectionHigh int posSectionHigh int
component string component string
pathType Type bundleType PathType
identifiersKnown []types.LowHigh[string] identifiers []types.LowHigh[string]
identifiersUnknown []types.LowHigh[string]
posIdentifierLanguage int posIdentifierLanguage int
posIdentifierOutputFormat int
posIdentifierKind int
posIdentifierLayout int
posIdentifierBaseof int
disabled bool disabled bool
trimLeadingSlash bool trimLeadingSlash bool
@ -410,13 +293,9 @@ func (p *Path) reset() {
p.posContainerHigh = -1 p.posContainerHigh = -1
p.posSectionHigh = -1 p.posSectionHigh = -1
p.component = "" p.component = ""
p.pathType = 0 p.bundleType = 0
p.identifiersKnown = p.identifiersKnown[:0] p.identifiers = p.identifiers[:0]
p.posIdentifierLanguage = -1 p.posIdentifierLanguage = -1
p.posIdentifierOutputFormat = -1
p.posIdentifierKind = -1
p.posIdentifierLayout = -1
p.posIdentifierBaseof = -1
p.disabled = false p.disabled = false
p.trimLeadingSlash = false p.trimLeadingSlash = false
p.unnormalized = nil p.unnormalized = nil
@ -437,9 +316,6 @@ func (p *Path) norm(s string) string {
// IdentifierBase satisfies identity.Identity. // IdentifierBase satisfies identity.Identity.
func (p *Path) IdentifierBase() string { func (p *Path) IdentifierBase() string {
if p.Component() == files.ComponentFolderLayouts {
return p.Path()
}
return p.Base() return p.Base()
} }
@ -456,13 +332,6 @@ func (p *Path) Container() string {
return p.norm(p.s[p.posContainerLow : p.posContainerHigh-1]) return p.norm(p.s[p.posContainerLow : p.posContainerHigh-1])
} }
func (p *Path) String() string {
if p == nil {
return "<nil>"
}
return p.Path()
}
// ContainerDir returns the container directory for this path. // ContainerDir returns the container directory for this path.
// For content bundles this will be the parent directory. // For content bundles this will be the parent directory.
func (p *Path) ContainerDir() string { func (p *Path) ContainerDir() string {
@ -483,13 +352,13 @@ func (p *Path) Section() string {
// IsContent returns true if the path is a content file (e.g. mypost.md). // IsContent returns true if the path is a content file (e.g. mypost.md).
// Note that this will also return true for content files in a bundle. // Note that this will also return true for content files in a bundle.
func (p *Path) IsContent() bool { func (p *Path) IsContent() bool {
return p.Type() >= TypeContentResource && p.Type() <= TypeContentData return p.BundleType() >= PathTypeContentResource
} }
// isContentPage returns true if the path is a content file (e.g. mypost.md), // isContentPage returns true if the path is a content file (e.g. mypost.md),
// but nof if inside a leaf bundle. // but nof if inside a leaf bundle.
func (p *Path) isContentPage() bool { func (p *Path) isContentPage() bool {
return p.Type() >= TypeContentSingle && p.Type() <= TypeContentData return p.BundleType() >= PathTypeContentSingle
} }
// Name returns the last element of path. // Name returns the last element of path.
@ -503,7 +372,7 @@ func (p *Path) Name() string {
// Name returns the last element of path without any extension. // Name returns the last element of path without any extension.
func (p *Path) NameNoExt() string { func (p *Path) NameNoExt() string {
if i := p.identifierIndex(0); i != -1 { if i := p.identifierIndex(0); i != -1 {
return p.s[p.posContainerHigh : p.identifiersKnown[i].Low-1] return p.s[p.posContainerHigh : p.identifiers[i].Low-1]
} }
return p.s[p.posContainerHigh:] return p.s[p.posContainerHigh:]
} }
@ -515,7 +384,7 @@ func (p *Path) NameNoLang() string {
return p.Name() return p.Name()
} }
return p.s[p.posContainerHigh:p.identifiersKnown[i].Low-1] + p.s[p.identifiersKnown[i].High:] return p.s[p.posContainerHigh:p.identifiers[i].Low-1] + p.s[p.identifiers[i].High:]
} }
// BaseNameNoIdentifier returns the logical base name for a resource without any identifier (e.g. no extension). // BaseNameNoIdentifier returns the logical base name for a resource without any identifier (e.g. no extension).
@ -529,26 +398,10 @@ func (p *Path) BaseNameNoIdentifier() string {
// NameNoIdentifier returns the last element of path without any identifier (e.g. no extension). // NameNoIdentifier returns the last element of path without any identifier (e.g. no extension).
func (p *Path) NameNoIdentifier() string { func (p *Path) NameNoIdentifier() string {
lowHigh := p.nameLowHigh() if len(p.identifiers) > 0 {
return p.s[lowHigh.Low:lowHigh.High] return p.s[p.posContainerHigh : p.identifiers[len(p.identifiers)-1].Low-1]
}
func (p *Path) nameLowHigh() types.LowHigh[string] {
if len(p.identifiersKnown) > 0 {
lastID := p.identifiersKnown[len(p.identifiersKnown)-1]
if p.posContainerHigh == lastID.Low {
// The last identifier is the name.
return lastID
}
return types.LowHigh[string]{
Low: p.posContainerHigh,
High: p.identifiersKnown[len(p.identifiersKnown)-1].Low - 1,
}
}
return types.LowHigh[string]{
Low: p.posContainerHigh,
High: len(p.s),
} }
return p.s[p.posContainerHigh:]
} }
// Dir returns all but the last element of path, typically the path's directory. // Dir returns all but the last element of path, typically the path's directory.
@ -568,11 +421,6 @@ func (p *Path) Path() (d string) {
return p.norm(p.s) return p.norm(p.s)
} }
// PathNoLeadingSlash returns the full path without the leading slash.
func (p *Path) PathNoLeadingSlash() string {
return p.Path()[1:]
}
// Unnormalized returns the Path with the original case preserved. // Unnormalized returns the Path with the original case preserved.
func (p *Path) Unnormalized() *Path { func (p *Path) Unnormalized() *Path {
return p.unnormalized return p.unnormalized
@ -588,28 +436,6 @@ func (p *Path) PathNoIdentifier() string {
return p.base(false, false) return p.base(false, false)
} }
// PathBeforeLangAndOutputFormatAndExt returns the path up to the first identifier that is not a language or output format.
func (p *Path) PathBeforeLangAndOutputFormatAndExt() string {
if len(p.identifiersKnown) == 0 {
return p.norm(p.s)
}
i := p.identifierIndex(0)
if j := p.posIdentifierOutputFormat; i == -1 || (j != -1 && j < i) {
i = j
}
if j := p.posIdentifierLanguage; i == -1 || (j != -1 && j < i) {
i = j
}
if i == -1 {
return p.norm(p.s)
}
id := p.identifiersKnown[i]
return p.norm(p.s[:id.Low-1])
}
// PathRel returns the path relative to the given owner. // PathRel returns the path relative to the given owner.
func (p *Path) PathRel(owner *Path) string { func (p *Path) PathRel(owner *Path) string {
ob := owner.Base() ob := owner.Base()
@ -636,42 +462,26 @@ func (p *Path) Base() string {
return p.base(!p.isContentPage(), p.IsBundle()) return p.base(!p.isContentPage(), p.IsBundle())
} }
// Used in template lookups.
// For pages with Type set, we treat that as the section.
func (p *Path) BaseReTyped(typ string) (d string) {
base := p.Base()
if typ == "" || p.Section() == typ {
return base
}
d = "/" + typ
if p.posSectionHigh != -1 {
d += base[p.posSectionHigh:]
}
d = p.norm(d)
return
}
// BaseNoLeadingSlash returns the base path without the leading slash. // BaseNoLeadingSlash returns the base path without the leading slash.
func (p *Path) BaseNoLeadingSlash() string { func (p *Path) BaseNoLeadingSlash() string {
return p.Base()[1:] return p.Base()[1:]
} }
func (p *Path) base(preserveExt, isBundle bool) string { func (p *Path) base(preserveExt, isBundle bool) string {
if len(p.identifiersKnown) == 0 { if len(p.identifiers) == 0 {
return p.norm(p.s) return p.norm(p.s)
} }
if preserveExt && len(p.identifiersKnown) == 1 { if preserveExt && len(p.identifiers) == 1 {
// Preserve extension. // Preserve extension.
return p.norm(p.s) return p.norm(p.s)
} }
var high int id := p.identifiers[len(p.identifiers)-1]
high := id.Low - 1
if isBundle { if isBundle {
high = p.posContainerHigh - 1 high = p.posContainerHigh - 1
} else {
high = p.nameLowHigh().High
} }
if high == 0 { if high == 0 {
@ -683,7 +493,7 @@ func (p *Path) base(preserveExt, isBundle bool) string {
} }
// For txt files etc. we want to preserve the extension. // For txt files etc. we want to preserve the extension.
id := p.identifiersKnown[0] id = p.identifiers[0]
return p.norm(p.s[:high] + p.s[id.Low-1:id.High]) return p.norm(p.s[:high] + p.s[id.Low-1:id.High])
} }
@ -692,20 +502,8 @@ func (p *Path) Ext() string {
return p.identifierAsString(0) return p.identifierAsString(0)
} }
func (p *Path) OutputFormat() string {
return p.identifierAsString(p.posIdentifierOutputFormat)
}
func (p *Path) Kind() string {
return p.identifierAsString(p.posIdentifierKind)
}
func (p *Path) Layout() string {
return p.identifierAsString(p.posIdentifierLayout)
}
func (p *Path) Lang() string { func (p *Path) Lang() string {
return p.identifierAsString(p.posIdentifierLanguage) return p.identifierAsString(1)
} }
func (p *Path) Identifier(i int) string { func (p *Path) Identifier(i int) string {
@ -717,43 +515,35 @@ func (p *Path) Disabled() bool {
} }
func (p *Path) Identifiers() []string { func (p *Path) Identifiers() []string {
ids := make([]string, len(p.identifiersKnown)) ids := make([]string, len(p.identifiers))
for i, id := range p.identifiersKnown { for i, id := range p.identifiers {
ids[i] = p.s[id.Low:id.High] ids[i] = p.s[id.Low:id.High]
} }
return ids return ids
} }
func (p *Path) IdentifiersUnknown() []string { func (p *Path) BundleType() PathType {
ids := make([]string, len(p.identifiersUnknown)) return p.bundleType
for i, id := range p.identifiersUnknown {
ids[i] = p.s[id.Low:id.High]
}
return ids
}
func (p *Path) Type() Type {
return p.pathType
} }
func (p *Path) IsBundle() bool { func (p *Path) IsBundle() bool {
return p.pathType >= TypeLeaf && p.pathType <= TypeContentData return p.bundleType >= PathTypeLeaf
} }
func (p *Path) IsBranchBundle() bool { func (p *Path) IsBranchBundle() bool {
return p.pathType == TypeBranch return p.bundleType == PathTypeBranch
} }
func (p *Path) IsLeafBundle() bool { func (p *Path) IsLeafBundle() bool {
return p.pathType == TypeLeaf return p.bundleType == PathTypeLeaf
} }
func (p *Path) IsContentData() bool { func (p *Path) IsContentData() bool {
return p.pathType == TypeContentData return p.bundleType == PathTypeContentData
} }
func (p Path) ForType(t Type) *Path { func (p Path) ForBundleType(t PathType) *Path {
p.pathType = t p.bundleType = t
return &p return &p
} }
@ -763,12 +553,12 @@ func (p *Path) identifierAsString(i int) string {
return "" return ""
} }
id := p.identifiersKnown[i] id := p.identifiers[i]
return p.s[id.Low:id.High] return p.s[id.Low:id.High]
} }
func (p *Path) identifierIndex(i int) int { func (p *Path) identifierIndex(i int) int {
if i < 0 || i >= len(p.identifiersKnown) { if i < 0 || i >= len(p.identifiers) {
return -1 return -1
} }
return i return i

View file

@ -18,7 +18,6 @@ import (
"testing" "testing"
"github.com/gohugoio/hugo/hugofs/files" "github.com/gohugoio/hugo/hugofs/files"
"github.com/gohugoio/hugo/resources/kinds"
qt "github.com/frankban/quicktest" qt "github.com/frankban/quicktest"
) )
@ -27,18 +26,10 @@ var testParser = &PathParser{
LanguageIndex: map[string]int{ LanguageIndex: map[string]int{
"no": 0, "no": 0,
"en": 1, "en": 1,
"fr": 2,
}, },
IsContentExt: func(ext string) bool { IsContentExt: func(ext string) bool {
return ext == "md" return ext == "md"
}, },
IsOutputFormat: func(name, ext string) bool {
switch name {
case "html", "amp", "csv", "rss":
return true
}
return false
},
} }
func TestParse(t *testing.T) { func TestParse(t *testing.T) {
@ -114,19 +105,17 @@ func TestParse(t *testing.T) {
"Basic Markdown file", "Basic Markdown file",
"/a/b/c.md", "/a/b/c.md",
func(c *qt.C, p *Path) { func(c *qt.C, p *Path) {
c.Assert(p.Ext(), qt.Equals, "md")
c.Assert(p.Type(), qt.Equals, TypeContentSingle)
c.Assert(p.IsContent(), qt.IsTrue) c.Assert(p.IsContent(), qt.IsTrue)
c.Assert(p.IsLeafBundle(), qt.IsFalse) c.Assert(p.IsLeafBundle(), qt.IsFalse)
c.Assert(p.Name(), qt.Equals, "c.md") c.Assert(p.Name(), qt.Equals, "c.md")
c.Assert(p.Base(), qt.Equals, "/a/b/c") c.Assert(p.Base(), qt.Equals, "/a/b/c")
c.Assert(p.BaseReTyped("foo"), qt.Equals, "/foo/b/c")
c.Assert(p.Section(), qt.Equals, "a") c.Assert(p.Section(), qt.Equals, "a")
c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "c") c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "c")
c.Assert(p.Path(), qt.Equals, "/a/b/c.md") c.Assert(p.Path(), qt.Equals, "/a/b/c.md")
c.Assert(p.Dir(), qt.Equals, "/a/b") c.Assert(p.Dir(), qt.Equals, "/a/b")
c.Assert(p.Container(), qt.Equals, "b") c.Assert(p.Container(), qt.Equals, "b")
c.Assert(p.ContainerDir(), qt.Equals, "/a/b") c.Assert(p.ContainerDir(), qt.Equals, "/a/b")
c.Assert(p.Ext(), qt.Equals, "md")
}, },
}, },
{ {
@ -141,7 +130,7 @@ func TestParse(t *testing.T) {
// Reclassify it as a content resource. // Reclassify it as a content resource.
ModifyPathBundleTypeResource(p) ModifyPathBundleTypeResource(p)
c.Assert(p.Type(), qt.Equals, TypeContentResource) c.Assert(p.BundleType(), qt.Equals, PathTypeContentResource)
c.Assert(p.IsContent(), qt.IsTrue) c.Assert(p.IsContent(), qt.IsTrue)
c.Assert(p.Name(), qt.Equals, "b.md") c.Assert(p.Name(), qt.Equals, "b.md")
c.Assert(p.Base(), qt.Equals, "/a/b.md") c.Assert(p.Base(), qt.Equals, "/a/b.md")
@ -174,10 +163,8 @@ func TestParse(t *testing.T) {
c.Assert(p.NameNoIdentifier(), qt.Equals, "b.a.b") c.Assert(p.NameNoIdentifier(), qt.Equals, "b.a.b")
c.Assert(p.NameNoLang(), qt.Equals, "b.a.b.txt") c.Assert(p.NameNoLang(), qt.Equals, "b.a.b.txt")
c.Assert(p.Identifiers(), qt.DeepEquals, []string{"txt", "no"}) c.Assert(p.Identifiers(), qt.DeepEquals, []string{"txt", "no"})
c.Assert(p.IdentifiersUnknown(), qt.DeepEquals, []string{"b", "a", "b"})
c.Assert(p.Base(), qt.Equals, "/a/b.a.b.txt") c.Assert(p.Base(), qt.Equals, "/a/b.a.b.txt")
c.Assert(p.BaseNoLeadingSlash(), qt.Equals, "a/b.a.b.txt") c.Assert(p.BaseNoLeadingSlash(), qt.Equals, "a/b.a.b.txt")
c.Assert(p.Path(), qt.Equals, "/a/b.a.b.no.txt")
c.Assert(p.PathNoLang(), qt.Equals, "/a/b.a.b.txt") c.Assert(p.PathNoLang(), qt.Equals, "/a/b.a.b.txt")
c.Assert(p.Ext(), qt.Equals, "txt") c.Assert(p.Ext(), qt.Equals, "txt")
c.Assert(p.PathNoIdentifier(), qt.Equals, "/a/b.a.b") c.Assert(p.PathNoIdentifier(), qt.Equals, "/a/b.a.b")
@ -187,11 +174,7 @@ func TestParse(t *testing.T) {
"Home branch cundle", "Home branch cundle",
"/_index.md", "/_index.md",
func(c *qt.C, p *Path) { func(c *qt.C, p *Path) {
c.Assert(p.Identifiers(), qt.DeepEquals, []string{"md"})
c.Assert(p.IsBranchBundle(), qt.IsTrue)
c.Assert(p.IsBundle(), qt.IsTrue)
c.Assert(p.Base(), qt.Equals, "/") c.Assert(p.Base(), qt.Equals, "/")
c.Assert(p.BaseReTyped("foo"), qt.Equals, "/foo")
c.Assert(p.Path(), qt.Equals, "/_index.md") c.Assert(p.Path(), qt.Equals, "/_index.md")
c.Assert(p.Container(), qt.Equals, "") c.Assert(p.Container(), qt.Equals, "")
c.Assert(p.ContainerDir(), qt.Equals, "/") c.Assert(p.ContainerDir(), qt.Equals, "/")
@ -202,14 +185,12 @@ func TestParse(t *testing.T) {
"/a/index.md", "/a/index.md",
func(c *qt.C, p *Path) { func(c *qt.C, p *Path) {
c.Assert(p.Base(), qt.Equals, "/a") c.Assert(p.Base(), qt.Equals, "/a")
c.Assert(p.BaseReTyped("foo"), qt.Equals, "/foo/a")
c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "a") c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "a")
c.Assert(p.Container(), qt.Equals, "a") c.Assert(p.Container(), qt.Equals, "a")
c.Assert(p.Container(), qt.Equals, "a") c.Assert(p.Container(), qt.Equals, "a")
c.Assert(p.ContainerDir(), qt.Equals, "") c.Assert(p.ContainerDir(), qt.Equals, "")
c.Assert(p.Dir(), qt.Equals, "/a") c.Assert(p.Dir(), qt.Equals, "/a")
c.Assert(p.Ext(), qt.Equals, "md") c.Assert(p.Ext(), qt.Equals, "md")
c.Assert(p.IdentifiersUnknown(), qt.DeepEquals, []string{"index"})
c.Assert(p.Identifiers(), qt.DeepEquals, []string{"md"}) c.Assert(p.Identifiers(), qt.DeepEquals, []string{"md"})
c.Assert(p.IsBranchBundle(), qt.IsFalse) c.Assert(p.IsBranchBundle(), qt.IsFalse)
c.Assert(p.IsBundle(), qt.IsTrue) c.Assert(p.IsBundle(), qt.IsTrue)
@ -227,7 +208,6 @@ func TestParse(t *testing.T) {
func(c *qt.C, p *Path) { func(c *qt.C, p *Path) {
c.Assert(p.Base(), qt.Equals, "/a/b") c.Assert(p.Base(), qt.Equals, "/a/b")
c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "b") c.Assert(p.BaseNameNoIdentifier(), qt.Equals, "b")
c.Assert(p.BaseReTyped("foo"), qt.Equals, "/foo/b")
c.Assert(p.Container(), qt.Equals, "b") c.Assert(p.Container(), qt.Equals, "b")
c.Assert(p.ContainerDir(), qt.Equals, "/a") c.Assert(p.ContainerDir(), qt.Equals, "/a")
c.Assert(p.Dir(), qt.Equals, "/a/b") c.Assert(p.Dir(), qt.Equals, "/a/b")
@ -240,7 +220,6 @@ func TestParse(t *testing.T) {
c.Assert(p.NameNoExt(), qt.Equals, "index.no") c.Assert(p.NameNoExt(), qt.Equals, "index.no")
c.Assert(p.NameNoIdentifier(), qt.Equals, "index") c.Assert(p.NameNoIdentifier(), qt.Equals, "index")
c.Assert(p.NameNoLang(), qt.Equals, "index.md") c.Assert(p.NameNoLang(), qt.Equals, "index.md")
c.Assert(p.Path(), qt.Equals, "/a/b/index.no.md")
c.Assert(p.PathNoLang(), qt.Equals, "/a/b/index.md") c.Assert(p.PathNoLang(), qt.Equals, "/a/b/index.md")
c.Assert(p.Section(), qt.Equals, "a") c.Assert(p.Section(), qt.Equals, "a")
}, },
@ -376,225 +355,11 @@ func TestParse(t *testing.T) {
} }
for _, test := range tests { for _, test := range tests {
c.Run(test.name, func(c *qt.C) { c.Run(test.name, func(c *qt.C) {
if test.name != "Home branch cundle" {
// return
}
test.assert(c, testParser.Parse(files.ComponentFolderContent, test.path)) test.assert(c, testParser.Parse(files.ComponentFolderContent, test.path))
}) })
} }
} }
func TestParseLayouts(t *testing.T) {
c := qt.New(t)
tests := []struct {
name string
path string
assert func(c *qt.C, p *Path)
}{
{
"Basic",
"/list.html",
func(c *qt.C, p *Path) {
c.Assert(p.Base(), qt.Equals, "/list.html")
c.Assert(p.OutputFormat(), qt.Equals, "html")
},
},
{
"Lang",
"/list.no.html",
func(c *qt.C, p *Path) {
c.Assert(p.Identifiers(), qt.DeepEquals, []string{"html", "no", "list"})
c.Assert(p.IdentifiersUnknown(), qt.DeepEquals, []string{})
c.Assert(p.Base(), qt.Equals, "/list.html")
c.Assert(p.Lang(), qt.Equals, "no")
},
},
{
"Kind",
"/section.no.html",
func(c *qt.C, p *Path) {
c.Assert(p.Kind(), qt.Equals, kinds.KindSection)
c.Assert(p.Identifiers(), qt.DeepEquals, []string{"html", "no", "section"})
c.Assert(p.IdentifiersUnknown(), qt.DeepEquals, []string{})
c.Assert(p.Base(), qt.Equals, "/section.html")
c.Assert(p.Lang(), qt.Equals, "no")
},
},
{
"Layout",
"/list.section.no.html",
func(c *qt.C, p *Path) {
c.Assert(p.Layout(), qt.Equals, "list")
c.Assert(p.Identifiers(), qt.DeepEquals, []string{"html", "no", "section", "list"})
c.Assert(p.IdentifiersUnknown(), qt.DeepEquals, []string{})
c.Assert(p.Base(), qt.Equals, "/list.html")
c.Assert(p.Lang(), qt.Equals, "no")
},
},
{
"Layout multiple",
"/mylayout.list.section.no.html",
func(c *qt.C, p *Path) {
c.Assert(p.Layout(), qt.Equals, "mylayout")
c.Assert(p.Identifiers(), qt.DeepEquals, []string{"html", "no", "section", "list", "mylayout"})
c.Assert(p.IdentifiersUnknown(), qt.DeepEquals, []string{})
c.Assert(p.Base(), qt.Equals, "/mylayout.html")
c.Assert(p.Lang(), qt.Equals, "no")
},
},
{
"Layout shortcode",
"/_shortcodes/myshort.list.no.html",
func(c *qt.C, p *Path) {
c.Assert(p.Layout(), qt.Equals, "list")
},
},
{
"Layout baseof",
"/baseof.list.no.html",
func(c *qt.C, p *Path) {
c.Assert(p.Layout(), qt.Equals, "list")
},
},
{
"Lang and output format",
"/list.no.amp.not.html",
func(c *qt.C, p *Path) {
c.Assert(p.Identifiers(), qt.DeepEquals, []string{"html", "not", "amp", "no", "list"})
c.Assert(p.OutputFormat(), qt.Equals, "amp")
c.Assert(p.Ext(), qt.Equals, "html")
c.Assert(p.Lang(), qt.Equals, "no")
c.Assert(p.Base(), qt.Equals, "/list.html")
},
},
{
"Term",
"/term.html",
func(c *qt.C, p *Path) {
c.Assert(p.Base(), qt.Equals, "/term.html")
c.Assert(p.Identifiers(), qt.DeepEquals, []string{"html", "term"})
c.Assert(p.PathNoIdentifier(), qt.Equals, "/term")
c.Assert(p.PathBeforeLangAndOutputFormatAndExt(), qt.Equals, "/term")
c.Assert(p.Lang(), qt.Equals, "")
c.Assert(p.Kind(), qt.Equals, "term")
c.Assert(p.OutputFormat(), qt.Equals, "html")
},
},
{
"Shortcode with layout",
"/_shortcodes/myshortcode.list.html",
func(c *qt.C, p *Path) {
c.Assert(p.Base(), qt.Equals, "/_shortcodes/myshortcode.html")
c.Assert(p.Type(), qt.Equals, TypeShortcode)
c.Assert(p.Identifiers(), qt.DeepEquals, []string{"html", "list"})
c.Assert(p.Layout(), qt.Equals, "list")
c.Assert(p.PathNoIdentifier(), qt.Equals, "/_shortcodes/myshortcode")
c.Assert(p.PathBeforeLangAndOutputFormatAndExt(), qt.Equals, "/_shortcodes/myshortcode.list")
c.Assert(p.Lang(), qt.Equals, "")
c.Assert(p.Kind(), qt.Equals, "")
c.Assert(p.OutputFormat(), qt.Equals, "html")
},
},
{
"Sub dir",
"/pages/home.html",
func(c *qt.C, p *Path) {
c.Assert(p.Identifiers(), qt.DeepEquals, []string{"html", "home"})
c.Assert(p.Lang(), qt.Equals, "")
c.Assert(p.Kind(), qt.Equals, "home")
c.Assert(p.OutputFormat(), qt.Equals, "html")
c.Assert(p.Dir(), qt.Equals, "/pages")
},
},
{
"Baseof",
"/pages/baseof.list.section.fr.amp.html",
func(c *qt.C, p *Path) {
c.Assert(p.Identifiers(), qt.DeepEquals, []string{"html", "amp", "fr", "section", "list", "baseof"})
c.Assert(p.IdentifiersUnknown(), qt.DeepEquals, []string{})
c.Assert(p.Kind(), qt.Equals, kinds.KindSection)
c.Assert(p.Lang(), qt.Equals, "fr")
c.Assert(p.OutputFormat(), qt.Equals, "amp")
c.Assert(p.Dir(), qt.Equals, "/pages")
c.Assert(p.NameNoIdentifier(), qt.Equals, "baseof")
c.Assert(p.Type(), qt.Equals, TypeBaseof)
c.Assert(p.IdentifierBase(), qt.Equals, "/pages/baseof.list.section.fr.amp.html")
},
},
{
"Markup",
"/_markup/render-link.html",
func(c *qt.C, p *Path) {
c.Assert(p.Type(), qt.Equals, TypeMarkup)
},
},
{
"Markup nested",
"/foo/_markup/render-link.html",
func(c *qt.C, p *Path) {
c.Assert(p.Type(), qt.Equals, TypeMarkup)
},
},
{
"Shortcode",
"/_shortcodes/myshortcode.html",
func(c *qt.C, p *Path) {
c.Assert(p.Type(), qt.Equals, TypeShortcode)
},
},
{
"Shortcode nested",
"/foo/_shortcodes/myshortcode.html",
func(c *qt.C, p *Path) {
c.Assert(p.Type(), qt.Equals, TypeShortcode)
},
},
{
"Shortcode nested sub",
"/foo/_shortcodes/foo/myshortcode.html",
func(c *qt.C, p *Path) {
c.Assert(p.Type(), qt.Equals, TypeShortcode)
},
},
{
"Partials",
"/_partials/foo.bar",
func(c *qt.C, p *Path) {
c.Assert(p.Type(), qt.Equals, TypePartial)
},
},
{
"Shortcode lang in root",
"/_shortcodes/no.html",
func(c *qt.C, p *Path) {
c.Assert(p.Type(), qt.Equals, TypeShortcode)
c.Assert(p.Lang(), qt.Equals, "")
c.Assert(p.NameNoIdentifier(), qt.Equals, "no")
},
},
{
"Shortcode lang layout",
"/_shortcodes/myshortcode.no.html",
func(c *qt.C, p *Path) {
c.Assert(p.Type(), qt.Equals, TypeShortcode)
c.Assert(p.Lang(), qt.Equals, "no")
c.Assert(p.Layout(), qt.Equals, "")
c.Assert(p.NameNoIdentifier(), qt.Equals, "myshortcode")
},
},
}
for _, test := range tests {
c.Run(test.name, func(c *qt.C) {
if test.name != "Shortcode lang layout" {
// return
}
test.assert(c, testParser.Parse(files.ComponentFolderLayouts, test.path))
})
}
}
func TestHasExt(t *testing.T) { func TestHasExt(t *testing.T) {
c := qt.New(t) c := qt.New(t)

View file

@ -78,26 +78,3 @@ disablePathToLower = true
b.AssertFileContent("public/en/mysection/mybundle/index.html", "en|Single") b.AssertFileContent("public/en/mysection/mybundle/index.html", "en|Single")
b.AssertFileContent("public/fr/MySection/MyBundle/index.html", "fr|Single") b.AssertFileContent("public/fr/MySection/MyBundle/index.html", "fr|Single")
} }
func TestIssue13596(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
disableKinds = ['home','rss','section','sitemap','taxonomy','term']
-- content/p1/index.md --
---
title: p1
---
-- content/p1/a.1.txt --
-- content/p1/a.2.txt --
-- layouts/all.html --
{{ range .Resources.Match "*" }}{{ .Name }}|{{ end }}
`
b := hugolib.Test(t, files)
b.AssertFileContent("public/p1/index.html", "a.1.txt|a.2.txt|")
b.AssertFileExists("public/p1/a.1.txt", true)
b.AssertFileExists("public/p1/a.2.txt", true) // fails
}

View file

@ -0,0 +1,27 @@
// Code generated by "stringer -type=PathType"; DO NOT EDIT.
package paths
import "strconv"
func _() {
// An "invalid array index" compiler error signifies that the constant values have changed.
// Re-run the stringer command to generate them again.
var x [1]struct{}
_ = x[PathTypeFile-0]
_ = x[PathTypeContentResource-1]
_ = x[PathTypeContentSingle-2]
_ = x[PathTypeLeaf-3]
_ = x[PathTypeBranch-4]
}
const _PathType_name = "PathTypeFilePathTypeContentResourcePathTypeContentSinglePathTypeLeafPathTypeBranch"
var _PathType_index = [...]uint8{0, 12, 35, 56, 68, 82}
func (i PathType) String() string {
if i < 0 || i >= PathType(len(_PathType_index)-1) {
return "PathType(" + strconv.FormatInt(int64(i), 10) + ")"
}
return _PathType_name[_PathType_index[i]:_PathType_index[i+1]]
}

View file

@ -1,32 +0,0 @@
// Code generated by "stringer -type Type"; DO NOT EDIT.
package paths
import "strconv"
func _() {
// An "invalid array index" compiler error signifies that the constant values have changed.
// Re-run the stringer command to generate them again.
var x [1]struct{}
_ = x[TypeFile-0]
_ = x[TypeContentResource-1]
_ = x[TypeContentSingle-2]
_ = x[TypeLeaf-3]
_ = x[TypeBranch-4]
_ = x[TypeContentData-5]
_ = x[TypeMarkup-6]
_ = x[TypeShortcode-7]
_ = x[TypePartial-8]
_ = x[TypeBaseof-9]
}
const _Type_name = "TypeFileTypeContentResourceTypeContentSingleTypeLeafTypeBranchTypeContentDataTypeMarkupTypeShortcodeTypePartialTypeBaseof"
var _Type_index = [...]uint8{0, 8, 27, 44, 52, 62, 77, 87, 100, 111, 121}
func (i Type) String() string {
if i < 0 || i >= Type(len(_Type_index)-1) {
return "Type(" + strconv.FormatInt(int64(i), 10) + ")"
}
return _Type_name[_Type_index[i]:_Type_index[i+1]]
}

View file

@ -1,4 +1,4 @@
// Copyright 2024 The Hugo Authors. All rights reserved. // Copyright 2021 The Hugo Authors. All rights reserved.
// //
// Licensed under the Apache License, Version 2.0 (the "License"); // Licensed under the Apache License, Version 2.0 (the "License");
// you may not use this file except in compliance with the License. // you may not use this file except in compliance with the License.
@ -18,7 +18,6 @@ import (
"net/url" "net/url"
"path" "path"
"path/filepath" "path/filepath"
"runtime"
"strings" "strings"
) )
@ -160,77 +159,9 @@ func Uglify(in string) string {
return path.Clean(in) return path.Clean(in)
} }
// URLEscape escapes unicode letters. // UrlToFilename converts the URL s to a filename.
func URLEscape(uri string) string {
// escape unicode letters
u, err := url.Parse(uri)
if err != nil {
panic(err)
}
return u.String()
}
// TrimExt trims the extension from a path..
func TrimExt(in string) string {
return strings.TrimSuffix(in, path.Ext(in))
}
// From https://github.com/golang/go/blob/e0c76d95abfc1621259864adb3d101cf6f1f90fc/src/cmd/go/internal/web/url.go#L45
func UrlFromFilename(filename string) (*url.URL, error) {
if !filepath.IsAbs(filename) {
return nil, fmt.Errorf("filepath must be absolute")
}
// If filename has a Windows volume name, convert the volume to a host and prefix
// per https://blogs.msdn.microsoft.com/ie/2006/12/06/file-uris-in-windows/.
if vol := filepath.VolumeName(filename); vol != "" {
if strings.HasPrefix(vol, `\\`) {
filename = filepath.ToSlash(filename[2:])
i := strings.IndexByte(filename, '/')
if i < 0 {
// A degenerate case.
// \\host.example.com (without a share name)
// becomes
// file://host.example.com/
return &url.URL{
Scheme: "file",
Host: filename,
Path: "/",
}, nil
}
// \\host.example.com\Share\path\to\file
// becomes
// file://host.example.com/Share/path/to/file
return &url.URL{
Scheme: "file",
Host: filename[:i],
Path: filepath.ToSlash(filename[i:]),
}, nil
}
// C:\path\to\file
// becomes
// file:///C:/path/to/file
return &url.URL{
Scheme: "file",
Path: "/" + filepath.ToSlash(filename),
}, nil
}
// /path/to/file
// becomes
// file:///path/to/file
return &url.URL{
Scheme: "file",
Path: filepath.ToSlash(filename),
}, nil
}
// UrlStringToFilename converts the URL s to a filename.
// If ParseRequestURI fails, the input is just converted to OS specific slashes and returned. // If ParseRequestURI fails, the input is just converted to OS specific slashes and returned.
func UrlStringToFilename(s string) (string, bool) { func UrlToFilename(s string) (string, bool) {
u, err := url.ParseRequestURI(s) u, err := url.ParseRequestURI(s)
if err != nil { if err != nil {
return filepath.FromSlash(s), false return filepath.FromSlash(s), false
@ -240,34 +171,25 @@ func UrlStringToFilename(s string) (string, bool) {
if p == "" { if p == "" {
p, _ = url.QueryUnescape(u.Opaque) p, _ = url.QueryUnescape(u.Opaque)
return filepath.FromSlash(p), false return filepath.FromSlash(p), true
}
if runtime.GOOS != "windows" {
return p, true
}
if len(p) == 0 || p[0] != '/' {
return filepath.FromSlash(p), false
} }
p = filepath.FromSlash(p) p = filepath.FromSlash(p)
if len(u.Host) == 1 { if u.Host != "" {
// file://c/Users/... // C:\data\file.txt
return strings.ToUpper(u.Host) + ":" + p, true p = strings.ToUpper(u.Host) + ":" + p
} }
if u.Host != "" && u.Host != "localhost" { return p, true
if filepath.VolumeName(u.Host) != "" { }
return "", false
} // URLEscape escapes unicode letters.
return `\\` + u.Host + p, true func URLEscape(uri string) string {
} // escape unicode letters
u, err := url.Parse(uri)
if vol := filepath.VolumeName(p[1:]); vol == "" || strings.HasPrefix(vol, `\\`) { if err != nil {
return "", false panic(err)
} }
return u.String()
return p[1:], true
} }

View file

@ -51,7 +51,7 @@ func Run[T any](ctx context.Context, cfg Config[T]) Group[T] {
// Buffered for performance. // Buffered for performance.
ch := make(chan T, cfg.NumWorkers) ch := make(chan T, cfg.NumWorkers)
for range cfg.NumWorkers { for i := 0; i < cfg.NumWorkers; i++ {
g.Go(func() error { g.Go(func() error {
for { for {
select { select {

View file

@ -103,7 +103,10 @@ func (r *RunEvery) Add(name string, f Func) {
f.IntervalHigh = 20 * time.Second f.IntervalHigh = 20 * time.Second
} }
start := max(f.IntervalHigh/3, f.IntervalLow) start := f.IntervalHigh / 3
if start < f.IntervalLow {
start = f.IntervalLow
}
f.interval = start f.interval = start
f.last = time.Now() f.last = time.Now()

View file

@ -17,6 +17,7 @@ package terminal
import ( import (
"fmt" "fmt"
"os" "os"
"runtime"
"strings" "strings"
isatty "github.com/mattn/go-isatty" isatty "github.com/mattn/go-isatty"
@ -40,6 +41,10 @@ func PrintANSIColors(f *os.File) bool {
// IsTerminal return true if the file descriptor is terminal and the TERM // IsTerminal return true if the file descriptor is terminal and the TERM
// environment variable isn't a dumb one. // environment variable isn't a dumb one.
func IsTerminal(f *os.File) bool { func IsTerminal(f *os.File) bool {
if runtime.GOOS == "windows" {
return false
}
fd := f.Fd() fd := f.Fd()
return os.Getenv("TERM") != "dumb" && (isatty.IsTerminal(fd) || isatty.IsCygwinTerminal(fd)) return os.Getenv("TERM") != "dumb" && (isatty.IsTerminal(fd) || isatty.IsCygwinTerminal(fd))
} }

View file

@ -19,13 +19,6 @@ type Closer interface {
Close() error Close() error
} }
// CloserFunc is a convenience type to create a Closer from a function.
type CloserFunc func() error
func (f CloserFunc) Close() error {
return f()
}
type CloseAdder interface { type CloseAdder interface {
Add(Closer) Add(Closer)
} }

View file

@ -69,7 +69,7 @@ func ToStringSlicePreserveStringE(v any) ([]string, error) {
switch vv.Kind() { switch vv.Kind() {
case reflect.Slice, reflect.Array: case reflect.Slice, reflect.Array:
result = make([]string, vv.Len()) result = make([]string, vv.Len())
for i := range vv.Len() { for i := 0; i < vv.Len(); i++ {
s, err := cast.ToStringE(vv.Index(i).Interface()) s, err := cast.ToStringE(vv.Index(i).Interface())
if err != nil { if err != nil {
return nil, err return nil, err

View file

@ -15,28 +15,27 @@
package types package types
import ( import (
"slices"
"sync" "sync"
) )
// EvictingQueue is a queue which automatically evicts elements from the head of // EvictingStringQueue is a queue which automatically evicts elements from the head of
// the queue when attempting to add new elements onto the queue and it is full. // the queue when attempting to add new elements onto the queue and it is full.
// This queue orders elements LIFO (last-in-first-out). It throws away duplicates. // This queue orders elements LIFO (last-in-first-out). It throws away duplicates.
type EvictingQueue[T comparable] struct { // Note: This queue currently does not contain any remove (poll etc.) methods.
type EvictingStringQueue struct {
size int size int
vals []T vals []string
set map[T]bool set map[string]bool
mu sync.Mutex mu sync.Mutex
zero T
} }
// NewEvictingQueue creates a new queue with the given size. // NewEvictingStringQueue creates a new queue with the given size.
func NewEvictingQueue[T comparable](size int) *EvictingQueue[T] { func NewEvictingStringQueue(size int) *EvictingStringQueue {
return &EvictingQueue[T]{size: size, set: make(map[T]bool)} return &EvictingStringQueue{size: size, set: make(map[string]bool)}
} }
// Add adds a new string to the tail of the queue if it's not already there. // Add adds a new string to the tail of the queue if it's not already there.
func (q *EvictingQueue[T]) Add(v T) *EvictingQueue[T] { func (q *EvictingStringQueue) Add(v string) *EvictingStringQueue {
q.mu.Lock() q.mu.Lock()
if q.set[v] { if q.set[v] {
q.mu.Unlock() q.mu.Unlock()
@ -46,7 +45,7 @@ func (q *EvictingQueue[T]) Add(v T) *EvictingQueue[T] {
if len(q.set) == q.size { if len(q.set) == q.size {
// Full // Full
delete(q.set, q.vals[0]) delete(q.set, q.vals[0])
q.vals = slices.Delete(q.vals, 0, 1) q.vals = append(q.vals[:0], q.vals[1:]...)
} }
q.set[v] = true q.set[v] = true
q.vals = append(q.vals, v) q.vals = append(q.vals, v)
@ -55,7 +54,7 @@ func (q *EvictingQueue[T]) Add(v T) *EvictingQueue[T] {
return q return q
} }
func (q *EvictingQueue[T]) Len() int { func (q *EvictingStringQueue) Len() int {
if q == nil { if q == nil {
return 0 return 0
} }
@ -65,22 +64,19 @@ func (q *EvictingQueue[T]) Len() int {
} }
// Contains returns whether the queue contains v. // Contains returns whether the queue contains v.
func (q *EvictingQueue[T]) Contains(v T) bool { func (q *EvictingStringQueue) Contains(v string) bool {
if q == nil {
return false
}
q.mu.Lock() q.mu.Lock()
defer q.mu.Unlock() defer q.mu.Unlock()
return q.set[v] return q.set[v]
} }
// Peek looks at the last element added to the queue. // Peek looks at the last element added to the queue.
func (q *EvictingQueue[T]) Peek() T { func (q *EvictingStringQueue) Peek() string {
q.mu.Lock() q.mu.Lock()
l := len(q.vals) l := len(q.vals)
if l == 0 { if l == 0 {
q.mu.Unlock() q.mu.Unlock()
return q.zero return ""
} }
elem := q.vals[l-1] elem := q.vals[l-1]
q.mu.Unlock() q.mu.Unlock()
@ -88,12 +84,9 @@ func (q *EvictingQueue[T]) Peek() T {
} }
// PeekAll looks at all the elements in the queue, with the newest first. // PeekAll looks at all the elements in the queue, with the newest first.
func (q *EvictingQueue[T]) PeekAll() []T { func (q *EvictingStringQueue) PeekAll() []string {
if q == nil {
return nil
}
q.mu.Lock() q.mu.Lock()
vals := make([]T, len(q.vals)) vals := make([]string, len(q.vals))
copy(vals, q.vals) copy(vals, q.vals)
q.mu.Unlock() q.mu.Unlock()
for i, j := 0, len(vals)-1; i < j; i, j = i+1, j-1 { for i, j := 0, len(vals)-1; i < j; i, j = i+1, j-1 {
@ -103,9 +96,9 @@ func (q *EvictingQueue[T]) PeekAll() []T {
} }
// PeekAllSet returns PeekAll as a set. // PeekAllSet returns PeekAll as a set.
func (q *EvictingQueue[T]) PeekAllSet() map[T]bool { func (q *EvictingStringQueue) PeekAllSet() map[string]bool {
all := q.PeekAll() all := q.PeekAll()
set := make(map[T]bool) set := make(map[string]bool)
for _, v := range all { for _, v := range all {
set[v] = true set[v] = true
} }

View file

@ -23,7 +23,7 @@ import (
func TestEvictingStringQueue(t *testing.T) { func TestEvictingStringQueue(t *testing.T) {
c := qt.New(t) c := qt.New(t)
queue := NewEvictingQueue[string](3) queue := NewEvictingStringQueue(3)
c.Assert(queue.Peek(), qt.Equals, "") c.Assert(queue.Peek(), qt.Equals, "")
queue.Add("a") queue.Add("a")
@ -53,9 +53,9 @@ func TestEvictingStringQueueConcurrent(t *testing.T) {
var wg sync.WaitGroup var wg sync.WaitGroup
val := "someval" val := "someval"
queue := NewEvictingQueue[string](3) queue := NewEvictingStringQueue(3)
for range 100 { for j := 0; j < 100; j++ {
wg.Add(1) wg.Add(1)
go func() { go func() {
defer wg.Done() defer wg.Done()

View file

@ -28,16 +28,6 @@ type RLocker interface {
RUnlock() RUnlock()
} }
type Locker interface {
Lock()
Unlock()
}
type RWLocker interface {
RLocker
Locker
}
// KeyValue is a interface{} tuple. // KeyValue is a interface{} tuple.
type KeyValue struct { type KeyValue struct {
Key any Key any
@ -69,7 +59,7 @@ func (k KeyValues) String() string {
// KeyValues struct. // KeyValues struct.
func NewKeyValuesStrings(key string, values ...string) KeyValues { func NewKeyValuesStrings(key string, values ...string) KeyValues {
iv := make([]any, len(values)) iv := make([]any, len(values))
for i := range values { for i := 0; i < len(values); i++ {
iv[i] = values[i] iv[i] = values[i]
} }
return KeyValues{Key: key, Values: iv} return KeyValues{Key: key, Values: iv}
@ -143,3 +133,22 @@ func NewBool(b bool) *bool {
type PrintableValueProvider interface { type PrintableValueProvider interface {
PrintableValue() any PrintableValue() any
} }
var _ PrintableValueProvider = Result[any]{}
// Result is a generic result type.
type Result[T any] struct {
// The result value.
Value T
// The error value.
Err error
}
// PrintableValue returns the value or panics if there is an error.
func (r Result[T]) PrintableValue() any {
if r.Err != nil {
panic(r.Err)
}
return r.Value
}

View file

@ -82,7 +82,7 @@ func init() {
} }
configLanguageKeys = make(map[string]bool) configLanguageKeys = make(map[string]bool)
addKeys := func(v reflect.Value) { addKeys := func(v reflect.Value) {
for i := range v.NumField() { for i := 0; i < v.NumField(); i++ {
name := strings.ToLower(v.Type().Field(i).Name) name := strings.ToLower(v.Type().Field(i).Name)
if skip[name] { if skip[name] {
continue continue
@ -128,9 +128,6 @@ type Config struct {
// <docsmeta>{"identifiers": ["markup"] }</docsmeta> // <docsmeta>{"identifiers": ["markup"] }</docsmeta>
Markup markup_config.Config `mapstructure:"-"` Markup markup_config.Config `mapstructure:"-"`
// ContentTypes are the media types that's considered content in Hugo.
ContentTypes *config.ConfigNamespace[map[string]media.ContentTypeConfig, media.ContentTypes] `mapstructure:"-"`
// The mediatypes configuration section maps the MIME type (a string) to a configuration object for that type. // The mediatypes configuration section maps the MIME type (a string) to a configuration object for that type.
// <docsmeta>{"identifiers": ["mediatypes"], "refs": ["types:media:type"] }</docsmeta> // <docsmeta>{"identifiers": ["mediatypes"], "refs": ["types:media:type"] }</docsmeta>
MediaTypes *config.ConfigNamespace[map[string]media.MediaTypeConfig, media.Types] `mapstructure:"-"` MediaTypes *config.ConfigNamespace[map[string]media.MediaTypeConfig, media.Types] `mapstructure:"-"`
@ -146,7 +143,7 @@ type Config struct {
// The cascade configuration section contains the top level front matter cascade configuration options, // The cascade configuration section contains the top level front matter cascade configuration options,
// a slice of page matcher and params to apply to those pages. // a slice of page matcher and params to apply to those pages.
Cascade *config.ConfigNamespace[[]page.PageMatcherParamsConfig, *maps.Ordered[page.PageMatcher, page.PageMatcherParamsConfig]] `mapstructure:"-"` Cascade *config.ConfigNamespace[[]page.PageMatcherParamsConfig, map[page.PageMatcher]maps.Params] `mapstructure:"-"`
// The segments defines segments for the site. Used for partial/segmented builds. // The segments defines segments for the site. Used for partial/segmented builds.
Segments *config.ConfigNamespace[map[string]segments.SegmentConfig, segments.Segments] `mapstructure:"-"` Segments *config.ConfigNamespace[map[string]segments.SegmentConfig, segments.Segments] `mapstructure:"-"`
@ -304,18 +301,6 @@ func (c *Config) CompileConfig(logger loggers.Logger) error {
} }
} }
defaultOutputFormat := outputFormats[0]
c.DefaultOutputFormat = strings.ToLower(c.DefaultOutputFormat)
if c.DefaultOutputFormat != "" {
f, found := outputFormats.GetByName(c.DefaultOutputFormat)
if !found {
return fmt.Errorf("unknown default output format %q", c.DefaultOutputFormat)
}
defaultOutputFormat = f
} else {
c.DefaultOutputFormat = defaultOutputFormat.Name
}
disabledLangs := make(map[string]bool) disabledLangs := make(map[string]bool)
for _, lang := range c.DisableLanguages { for _, lang := range c.DisableLanguages {
disabledLangs[lang] = true disabledLangs[lang] = true
@ -396,46 +381,15 @@ func (c *Config) CompileConfig(logger loggers.Logger) error {
// Legacy paginate values. // Legacy paginate values.
if c.Paginate != 0 { if c.Paginate != 0 {
hugo.DeprecateWithLogger("site config key paginate", "Use pagination.pagerSize instead.", "v0.128.0", logger.Logger()) hugo.Deprecate("site config key paginate", "Use pagination.pagerSize instead.", "v0.128.0")
c.Pagination.PagerSize = c.Paginate c.Pagination.PagerSize = c.Paginate
} }
if c.PaginatePath != "" { if c.PaginatePath != "" {
hugo.DeprecateWithLogger("site config key paginatePath", "Use pagination.path instead.", "v0.128.0", logger.Logger()) hugo.Deprecate("site config key paginatePath", "Use pagination.path instead.", "v0.128.0")
c.Pagination.Path = c.PaginatePath c.Pagination.Path = c.PaginatePath
} }
// Legacy privacy values.
if c.Privacy.Twitter.Disable {
hugo.DeprecateWithLogger("site config key privacy.twitter.disable", "Use privacy.x.disable instead.", "v0.141.0", logger.Logger())
c.Privacy.X.Disable = c.Privacy.Twitter.Disable
}
if c.Privacy.Twitter.EnableDNT {
hugo.DeprecateWithLogger("site config key privacy.twitter.enableDNT", "Use privacy.x.enableDNT instead.", "v0.141.0", logger.Logger())
c.Privacy.X.EnableDNT = c.Privacy.Twitter.EnableDNT
}
if c.Privacy.Twitter.Simple {
hugo.DeprecateWithLogger("site config key privacy.twitter.simple", "Use privacy.x.simple instead.", "v0.141.0", logger.Logger())
c.Privacy.X.Simple = c.Privacy.Twitter.Simple
}
// Legacy services values.
if c.Services.Twitter.DisableInlineCSS {
hugo.DeprecateWithLogger("site config key services.twitter.disableInlineCSS", "Use services.x.disableInlineCSS instead.", "v0.141.0", logger.Logger())
c.Services.X.DisableInlineCSS = c.Services.Twitter.DisableInlineCSS
}
// Legacy permalink tokens
vs := fmt.Sprintf("%v", c.Permalinks)
if strings.Contains(vs, ":filename") {
hugo.DeprecateWithLogger("the \":filename\" permalink token", "Use \":contentbasename\" instead.", "0.144.0", logger.Logger())
}
if strings.Contains(vs, ":slugorfilename") {
hugo.DeprecateWithLogger("the \":slugorfilename\" permalink token", "Use \":slugorcontentbasename\" instead.", "0.144.0", logger.Logger())
}
c.C = &ConfigCompiled{ c.C = &ConfigCompiled{
Timeout: timeout, Timeout: timeout,
BaseURL: baseURL, BaseURL: baseURL,
@ -444,7 +398,7 @@ func (c *Config) CompileConfig(logger loggers.Logger) error {
DisabledLanguages: disabledLangs, DisabledLanguages: disabledLangs,
IgnoredLogs: ignoredLogIDs, IgnoredLogs: ignoredLogIDs,
KindOutputFormats: kindOutputFormats, KindOutputFormats: kindOutputFormats,
DefaultOutputFormat: defaultOutputFormat, ContentTypes: media.DefaultContentTypes.FromTypes(c.MediaTypes.Config),
CreateTitle: helpers.GetTitleFunc(c.TitleCaseStyle), CreateTitle: helpers.GetTitleFunc(c.TitleCaseStyle),
IsUglyURLSection: isUglyURL, IsUglyURLSection: isUglyURL,
IgnoreFile: ignoreFile, IgnoreFile: ignoreFile,
@ -481,7 +435,7 @@ type ConfigCompiled struct {
BaseURLLiveReload urls.BaseURL BaseURLLiveReload urls.BaseURL
ServerInterface string ServerInterface string
KindOutputFormats map[string]output.Formats KindOutputFormats map[string]output.Formats
DefaultOutputFormat output.Format ContentTypes media.ContentTypes
DisabledKinds map[string]bool DisabledKinds map[string]bool
DisabledLanguages map[string]bool DisabledLanguages map[string]bool
IgnoredLogs map[string]bool IgnoredLogs map[string]bool
@ -551,13 +505,6 @@ type RootConfig struct {
// Set this to true to put all languages below their language ID. // Set this to true to put all languages below their language ID.
DefaultContentLanguageInSubdir bool DefaultContentLanguageInSubdir bool
// The default output format to use for the site.
// If not set, we will use the first output format.
DefaultOutputFormat string
// Disable generation of redirect to the default language when DefaultContentLanguageInSubdir is enabled.
DisableDefaultLanguageRedirect bool
// Disable creation of alias redirect pages. // Disable creation of alias redirect pages.
DisableAliases bool DisableAliases bool
@ -776,16 +723,15 @@ type Configs struct {
} }
func (c *Configs) Validate(logger loggers.Logger) error { func (c *Configs) Validate(logger loggers.Logger) error {
c.Base.Cascade.Config.Range(func(p page.PageMatcher, cfg page.PageMatcherParamsConfig) bool { for p := range c.Base.Cascade.Config {
page.CheckCascadePattern(logger, p) page.CheckCascadePattern(logger, p)
return true }
})
return nil return nil
} }
// transientErr returns the last transient error found during config compilation. // transientErr returns the last transient error found during config compilation.
func (c *Configs) transientErr() error { func (c *Configs) transientErr() error {
for _, l := range c.LanguageConfigMap { for _, l := range c.LanguageConfigSlice {
if l.C.transientErr != nil { if l.C.transientErr != nil {
return l.C.transientErr return l.C.transientErr
} }
@ -800,58 +746,31 @@ func (c *Configs) IsZero() bool {
func (c *Configs) Init() error { func (c *Configs) Init() error {
var languages langs.Languages var languages langs.Languages
defaultContentLanguage := c.Base.DefaultContentLanguage
var langKeys []string for k, v := range c.LanguageConfigMap {
var hasEn bool
const en = "en"
for k := range c.LanguageConfigMap {
langKeys = append(langKeys, k)
if k == en {
hasEn = true
}
}
// Sort the LanguageConfigSlice by language weight (if set) or lang.
sort.Slice(langKeys, func(i, j int) bool {
ki := langKeys[i]
kj := langKeys[j]
lki := c.LanguageConfigMap[ki]
lkj := c.LanguageConfigMap[kj]
li := lki.Languages[ki]
lj := lkj.Languages[kj]
if li.Weight != lj.Weight {
return li.Weight < lj.Weight
}
return ki < kj
})
// See issue #13646.
defaultConfigLanguageFallback := en
if !hasEn {
// Pick the first one.
defaultConfigLanguageFallback = langKeys[0]
}
if c.Base.DefaultContentLanguage == "" {
c.Base.DefaultContentLanguage = defaultConfigLanguageFallback
}
for _, k := range langKeys {
v := c.LanguageConfigMap[k]
if v.DefaultContentLanguage == "" {
v.DefaultContentLanguage = defaultConfigLanguageFallback
}
c.LanguageConfigSlice = append(c.LanguageConfigSlice, v) c.LanguageConfigSlice = append(c.LanguageConfigSlice, v)
languageConf := v.Languages[k] languageConf := v.Languages[k]
language, err := langs.NewLanguage(k, c.Base.DefaultContentLanguage, v.TimeZone, languageConf) language, err := langs.NewLanguage(k, defaultContentLanguage, v.TimeZone, languageConf)
if err != nil { if err != nil {
return err return err
} }
languages = append(languages, language) languages = append(languages, language)
} }
// Sort the sites by language weight (if set) or lang.
sort.Slice(languages, func(i, j int) bool {
li := languages[i]
lj := languages[j]
if li.Weight != lj.Weight {
return li.Weight < lj.Weight
}
return li.Lang < lj.Lang
})
for _, l := range languages {
c.LanguageConfigSlice = append(c.LanguageConfigSlice, c.LanguageConfigMap[l.Lang])
}
// Filter out disabled languages. // Filter out disabled languages.
var n int var n int
for _, l := range languages { for _, l := range languages {
@ -864,12 +783,12 @@ func (c *Configs) Init() error {
var languagesDefaultFirst langs.Languages var languagesDefaultFirst langs.Languages
for _, l := range languages { for _, l := range languages {
if l.Lang == c.Base.DefaultContentLanguage { if l.Lang == defaultContentLanguage {
languagesDefaultFirst = append(languagesDefaultFirst, l) languagesDefaultFirst = append(languagesDefaultFirst, l)
} }
} }
for _, l := range languages { for _, l := range languages {
if l.Lang != c.Base.DefaultContentLanguage { if l.Lang != defaultContentLanguage {
languagesDefaultFirst = append(languagesDefaultFirst, l) languagesDefaultFirst = append(languagesDefaultFirst, l)
} }
} }
@ -877,24 +796,7 @@ func (c *Configs) Init() error {
c.Languages = languages c.Languages = languages
c.LanguagesDefaultFirst = languagesDefaultFirst c.LanguagesDefaultFirst = languagesDefaultFirst
c.ContentPathParser = &paths.PathParser{ c.ContentPathParser = &paths.PathParser{LanguageIndex: languagesDefaultFirst.AsIndexSet(), IsLangDisabled: c.Base.IsLangDisabled, IsContentExt: c.Base.C.ContentTypes.IsContentSuffix}
LanguageIndex: languagesDefaultFirst.AsIndexSet(),
IsLangDisabled: c.Base.IsLangDisabled,
IsContentExt: c.Base.ContentTypes.Config.IsContentSuffix,
IsOutputFormat: func(name, ext string) bool {
if name == "" {
return false
}
if of, ok := c.Base.OutputFormats.Config.GetByName(name); ok {
if ext != "" && !of.MediaType.HasSuffix(ext) {
return false
}
return true
}
return false
},
}
c.configLangs = make([]config.AllProvider, len(c.Languages)) c.configLangs = make([]config.AllProvider, len(c.Languages))
for i, l := range c.LanguagesDefaultFirst { for i, l := range c.LanguagesDefaultFirst {
@ -955,48 +857,17 @@ func (c Configs) GetByLang(lang string) config.AllProvider {
return nil return nil
} }
func newDefaultConfig() *Config {
return &Config{
Taxonomies: map[string]string{"tag": "tags", "category": "categories"},
Sitemap: config.SitemapConfig{Priority: -1, Filename: "sitemap.xml"},
RootConfig: RootConfig{
Environment: hugo.EnvironmentProduction,
TitleCaseStyle: "AP",
PluralizeListTitles: true,
CapitalizeListTitles: true,
StaticDir: []string{"static"},
SummaryLength: 70,
Timeout: "60s",
CommonDirs: config.CommonDirs{
ArcheTypeDir: "archetypes",
ContentDir: "content",
ResourceDir: "resources",
PublishDir: "public",
ThemesDir: "themes",
AssetDir: "assets",
LayoutDir: "layouts",
I18nDir: "i18n",
DataDir: "data",
},
},
}
}
// fromLoadConfigResult creates a new Config from res. // fromLoadConfigResult creates a new Config from res.
func fromLoadConfigResult(fs afero.Fs, logger loggers.Logger, res config.LoadConfigResult) (*Configs, error) { func fromLoadConfigResult(fs afero.Fs, logger loggers.Logger, res config.LoadConfigResult) (*Configs, error) {
if !res.Cfg.IsSet("languages") { if !res.Cfg.IsSet("languages") {
// We need at least one // We need at least one
lang := res.Cfg.GetString("defaultContentLanguage") lang := res.Cfg.GetString("defaultContentLanguage")
if lang == "" {
lang = "en"
}
res.Cfg.Set("languages", maps.Params{lang: maps.Params{}}) res.Cfg.Set("languages", maps.Params{lang: maps.Params{}})
} }
bcfg := res.BaseConfig bcfg := res.BaseConfig
cfg := res.Cfg cfg := res.Cfg
all := newDefaultConfig() all := &Config{}
err := decodeConfigFromParams(fs, logger, bcfg, cfg, all, nil) err := decodeConfigFromParams(fs, logger, bcfg, cfg, all, nil)
if err != nil { if err != nil {
@ -1006,7 +877,6 @@ func fromLoadConfigResult(fs afero.Fs, logger loggers.Logger, res config.LoadCon
langConfigMap := make(map[string]*Config) langConfigMap := make(map[string]*Config)
languagesConfig := cfg.GetStringMap("languages") languagesConfig := cfg.GetStringMap("languages")
var isMultihost bool var isMultihost bool
if err := all.CompileConfig(logger); err != nil { if err := all.CompileConfig(logger); err != nil {
@ -1018,17 +888,30 @@ func fromLoadConfigResult(fs afero.Fs, logger loggers.Logger, res config.LoadCon
var differentRootKeys []string var differentRootKeys []string
switch x := v.(type) { switch x := v.(type) {
case maps.Params: case maps.Params:
_, found := x["params"] var params maps.Params
if !found { pv, found := x["params"]
x["params"] = maps.Params{ if found {
params = pv.(maps.Params)
} else {
params = maps.Params{
maps.MergeStrategyKey: maps.ParamsMergeStrategyDeep, maps.MergeStrategyKey: maps.ParamsMergeStrategyDeep,
} }
x["params"] = params
} }
for kk, vv := range x { for kk, vv := range x {
if kk == "_merge" { if kk == "_merge" {
continue continue
} }
if kk != maps.MergeStrategyKey && !configLanguageKeys[kk] {
// This should have been placed below params.
// We accidentally allowed it in the past, so we need to support it a little longer,
// But log a warning.
if _, found := params[kk]; !found {
hugo.Deprecate(fmt.Sprintf("config: languages.%s.%s: custom params on the language top level", k, kk), fmt.Sprintf("Put the value below [languages.%s.params]. See https://gohugo.io/content-management/multilingual/#changes-in-hugo-01120", k), "v0.112.0")
params[kk] = vv
}
}
if kk == "baseurl" { if kk == "baseurl" {
// baseURL configure don the language level is a multihost setup. // baseURL configure don the language level is a multihost setup.
isMultihost = true isMultihost = true

View file

@ -5,10 +5,8 @@ import (
"testing" "testing"
qt "github.com/frankban/quicktest" qt "github.com/frankban/quicktest"
"github.com/gohugoio/hugo/common/hugo"
"github.com/gohugoio/hugo/config/allconfig" "github.com/gohugoio/hugo/config/allconfig"
"github.com/gohugoio/hugo/hugolib" "github.com/gohugoio/hugo/hugolib"
"github.com/gohugoio/hugo/media"
) )
func TestDirsMount(t *testing.T) { func TestDirsMount(t *testing.T) {
@ -99,7 +97,7 @@ suffixes = ["html", "xhtml"]
b := hugolib.Test(t, files) b := hugolib.Test(t, files)
conf := b.H.Configs.Base conf := b.H.Configs.Base
contentTypes := conf.ContentTypes.Config contentTypes := conf.C.ContentTypes
b.Assert(contentTypes.HTML.Suffixes(), qt.DeepEquals, []string{"html", "xhtml"}) b.Assert(contentTypes.HTML.Suffixes(), qt.DeepEquals, []string{"html", "xhtml"})
b.Assert(contentTypes.Markdown.Suffixes(), qt.DeepEquals, []string{"md", "mdown", "markdown"}) b.Assert(contentTypes.Markdown.Suffixes(), qt.DeepEquals, []string{"md", "mdown", "markdown"})
@ -177,205 +175,3 @@ func TestMapUglyURLs(t *testing.T) {
b.Assert(c.C.IsUglyURLSection("posts"), qt.IsTrue) b.Assert(c.C.IsUglyURLSection("posts"), qt.IsTrue)
b.Assert(c.C.IsUglyURLSection("blog"), qt.IsFalse) b.Assert(c.C.IsUglyURLSection("blog"), qt.IsFalse)
} }
// Issue 13199
func TestInvalidOutputFormat(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
disableKinds = ['page','rss','section','sitemap','taxonomy','term']
[outputs]
home = ['html','foo']
-- layouts/index.html --
x
`
b, err := hugolib.TestE(t, files)
b.Assert(err, qt.IsNotNil)
b.Assert(err.Error(), qt.Contains, `failed to create config: unknown output format "foo" for kind "home"`)
}
// Issue 13201
func TestLanguageConfigSlice(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
disableKinds = ['page','rss','section','sitemap','taxonomy','term']
[languages.en]
title = 'TITLE_EN'
weight = 2
[languages.de]
title = 'TITLE_DE'
weight = 1
[languages.fr]
title = 'TITLE_FR'
weight = 3
`
b := hugolib.Test(t, files)
b.Assert(b.H.Configs.LanguageConfigSlice[0].Title, qt.Equals, `TITLE_DE`)
}
func TestContentTypesDefault(t *testing.T) {
files := `
-- hugo.toml --
baseURL = "https://example.com"
`
b := hugolib.Test(t, files)
ct := b.H.Configs.Base.ContentTypes
c := ct.Config
s := ct.SourceStructure.(map[string]media.ContentTypeConfig)
b.Assert(c.IsContentFile("foo.md"), qt.Equals, true)
b.Assert(len(s), qt.Equals, 6)
}
func TestMergeDeep(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
baseURL = "https://example.com"
theme = ["theme1", "theme2"]
_merge = "deep"
-- themes/theme1/hugo.toml --
[sitemap]
filename = 'mysitemap.xml'
[services]
[services.googleAnalytics]
id = 'foo bar'
[taxonomies]
foo = 'bars'
-- themes/theme2/config/_default/hugo.toml --
[taxonomies]
bar = 'baz'
-- layouts/home.html --
GA ID: {{ site.Config.Services.GoogleAnalytics.ID }}.
`
b := hugolib.Test(t, files)
conf := b.H.Configs
base := conf.Base
b.Assert(base.Environment, qt.Equals, hugo.EnvironmentProduction)
b.Assert(base.BaseURL, qt.Equals, "https://example.com")
b.Assert(base.Sitemap.Filename, qt.Equals, "mysitemap.xml")
b.Assert(base.Taxonomies, qt.DeepEquals, map[string]string{"bar": "baz", "foo": "bars"})
b.AssertFileContent("public/index.html", "GA ID: foo bar.")
}
func TestMergeDeepBuildStats(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
baseURL = "https://example.com"
title = "Theme 1"
_merge = "deep"
[module]
[module.hugoVersion]
[[module.imports]]
path = "theme1"
-- themes/theme1/hugo.toml --
[build]
[build.buildStats]
disableIDs = true
enable = true
-- layouts/home.html --
Home.
`
b := hugolib.Test(t, files, hugolib.TestOptOsFs())
conf := b.H.Configs
base := conf.Base
b.Assert(base.Title, qt.Equals, "Theme 1")
b.Assert(len(base.Module.Imports), qt.Equals, 1)
b.Assert(base.Build.BuildStats.Enable, qt.Equals, true)
b.AssertFileExists("/hugo_stats.json", true)
}
func TestMergeDeepBuildStatsTheme(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
baseURL = "https://example.com"
_merge = "deep"
theme = ["theme1"]
-- themes/theme1/hugo.toml --
title = "Theme 1"
[build]
[build.buildStats]
disableIDs = true
enable = true
-- layouts/home.html --
Home.
`
b := hugolib.Test(t, files, hugolib.TestOptOsFs())
conf := b.H.Configs
base := conf.Base
b.Assert(base.Title, qt.Equals, "Theme 1")
b.Assert(len(base.Module.Imports), qt.Equals, 1)
b.Assert(base.Build.BuildStats.Enable, qt.Equals, true)
b.AssertFileExists("/hugo_stats.json", true)
}
func TestDefaultConfigLanguageBlankWhenNoEnglishExists(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
baseURL = "https://example.com"
[languages]
[languages.nn]
weight = 20
[languages.sv]
weight = 10
[languages.sv.taxonomies]
tag = "taggar"
-- layouts/all.html --
All.
`
b := hugolib.Test(t, files)
b.Assert(b.H.Conf.DefaultContentLanguage(), qt.Equals, "sv")
}
func TestDefaultConfigEnvDisableLanguagesIssue13707(t *testing.T) {
t.Parallel()
files := `
-- hugo.toml --
disableLanguages = []
[languages]
[languages.en]
weight = 1
[languages.nn]
weight = 2
[languages.sv]
weight = 3
`
b := hugolib.Test(t, files, hugolib.TestOptWithConfig(func(conf *hugolib.IntegrationTestConfig) {
conf.Environ = []string{`HUGO_DISABLELANGUAGES=sv nn`}
}))
b.Assert(len(b.H.Sites), qt.Equals, 1)
}

View file

@ -163,15 +163,6 @@ var allDecoderSetups = map[string]decodeWeight{
return err return err
}, },
}, },
"contenttypes": {
key: "contenttypes",
weight: 100, // This needs to be decoded after media types.
decode: func(d decodeWeight, p decodeConfig) error {
var err error
p.c.ContentTypes, err = media.DecodeContentTypes(p.p.GetStringMap(d.key), p.c.MediaTypes.Config)
return err
},
},
"mediatypes": { "mediatypes": {
key: "mediatypes", key: "mediatypes",
decode: func(d decodeWeight, p decodeConfig) error { decode: func(d decodeWeight, p decodeConfig) error {
@ -249,18 +240,14 @@ var allDecoderSetups = map[string]decodeWeight{
key: "sitemap", key: "sitemap",
decode: func(d decodeWeight, p decodeConfig) error { decode: func(d decodeWeight, p decodeConfig) error {
var err error var err error
if p.p.IsSet(d.key) { p.c.Sitemap, err = config.DecodeSitemap(config.SitemapConfig{Priority: -1, Filename: "sitemap.xml"}, p.p.GetStringMap(d.key))
p.c.Sitemap, err = config.DecodeSitemap(p.c.Sitemap, p.p.GetStringMap(d.key))
}
return err return err
}, },
}, },
"taxonomies": { "taxonomies": {
key: "taxonomies", key: "taxonomies",
decode: func(d decodeWeight, p decodeConfig) error { decode: func(d decodeWeight, p decodeConfig) error {
if p.p.IsSet(d.key) {
p.c.Taxonomies = maps.CleanConfigStringMapString(p.p.GetStringMapString(d.key)) p.c.Taxonomies = maps.CleanConfigStringMapString(p.p.GetStringMapString(d.key))
}
return nil return nil
}, },
}, },
@ -310,7 +297,6 @@ var allDecoderSetups = map[string]decodeWeight{
} }
// Validate defaultContentLanguage. // Validate defaultContentLanguage.
if p.c.DefaultContentLanguage != "" {
var found bool var found bool
for lang := range p.c.Languages { for lang := range p.c.Languages {
if lang == p.c.DefaultContentLanguage { if lang == p.c.DefaultContentLanguage {
@ -321,7 +307,6 @@ var allDecoderSetups = map[string]decodeWeight{
if !found { if !found {
return fmt.Errorf("config value %q for defaultContentLanguage does not match any language definition", p.c.DefaultContentLanguage) return fmt.Errorf("config value %q for defaultContentLanguage does not match any language definition", p.c.DefaultContentLanguage)
} }
}
return nil return nil
}, },
@ -330,7 +315,7 @@ var allDecoderSetups = map[string]decodeWeight{
key: "cascade", key: "cascade",
decode: func(d decodeWeight, p decodeConfig) error { decode: func(d decodeWeight, p decodeConfig) error {
var err error var err error
p.c.Cascade, err = page.DecodeCascadeConfig(nil, true, p.p.Get(d.key)) p.c.Cascade, err = page.DecodeCascadeConfig(nil, p.p.Get(d.key))
return err return err
}, },
}, },

View file

@ -137,15 +137,15 @@ func (c ConfigLanguage) Watching() bool {
return c.m.Base.Internal.Watch return c.m.Base.Internal.Watch
} }
func (c ConfigLanguage) NewIdentityManager(name string, opts ...identity.ManagerOption) identity.Manager { func (c ConfigLanguage) NewIdentityManager(name string) identity.Manager {
if !c.Watching() { if !c.Watching() {
return identity.NopManager return identity.NopManager
} }
return identity.NewManager(name, opts...) return identity.NewManager(name)
} }
func (c ConfigLanguage) ContentTypes() config.ContentTypesProvider { func (c ConfigLanguage) ContentTypes() config.ContentTypesProvider {
return c.config.ContentTypes.Config return c.config.C.ContentTypes
} }
// GetConfigSection is mostly used in tests. The switch statement isn't complete, but what's in use. // GetConfigSection is mostly used in tests. The switch statement isn't complete, but what's in use.

View file

@ -91,7 +91,7 @@ func LoadConfig(d ConfigSourceDescriptor) (*Configs, error) {
return nil, fmt.Errorf("failed to init config: %w", err) return nil, fmt.Errorf("failed to init config: %w", err)
} }
loggers.SetGlobalLogger(d.Logger) loggers.InitGlobalLogger(d.Logger.Level(), configs.Base.PanicOnWarning)
return configs, nil return configs, nil
} }
@ -159,9 +159,63 @@ func (l configLoader) applyConfigAliases() error {
func (l configLoader) applyDefaultConfig() error { func (l configLoader) applyDefaultConfig() error {
defaultSettings := maps.Params{ defaultSettings := maps.Params{
// These dirs are used early/before we build the config struct. "baseURL": "",
"cleanDestinationDir": false,
"watch": false,
"contentDir": "content",
"resourceDir": "resources",
"publishDir": "public",
"publishDirOrig": "public",
"themesDir": "themes", "themesDir": "themes",
"assetDir": "assets",
"layoutDir": "layouts",
"i18nDir": "i18n",
"dataDir": "data",
"archetypeDir": "archetypes",
"configDir": "config", "configDir": "config",
"staticDir": "static",
"buildDrafts": false,
"buildFuture": false,
"buildExpired": false,
"params": maps.Params{},
"environment": hugo.EnvironmentProduction,
"uglyURLs": false,
"verbose": false,
"ignoreCache": false,
"canonifyURLs": false,
"relativeURLs": false,
"removePathAccents": false,
"titleCaseStyle": "AP",
"taxonomies": maps.Params{"tag": "tags", "category": "categories"},
"permalinks": maps.Params{},
"sitemap": maps.Params{"priority": -1, "filename": "sitemap.xml"},
"menus": maps.Params{},
"disableLiveReload": false,
"pluralizeListTitles": true,
"capitalizeListTitles": true,
"forceSyncStatic": false,
"footnoteAnchorPrefix": "",
"footnoteReturnLinkContents": "",
"newContentEditor": "",
"paginate": 0, // Moved into the paginator struct in Hugo v0.128.0.
"paginatePath": "", // Moved into the paginator struct in Hugo v0.128.0.
"summaryLength": 70,
"rssLimit": -1,
"sectionPagesMenu": "",
"disablePathToLower": false,
"hasCJKLanguage": false,
"enableEmoji": false,
"defaultContentLanguage": "en",
"defaultContentLanguageInSubdir": false,
"enableMissingTranslationPlaceholders": false,
"enableGitInfo": false,
"ignoreFiles": make([]string, 0),
"disableAliases": false,
"debug": false,
"disableFastRender": false,
"timeout": "30s",
"timeZone": "",
"enableInlineShortcodes": false,
} }
l.cfg.SetDefaults(defaultSettings) l.cfg.SetDefaults(defaultSettings)
@ -233,18 +287,17 @@ func (l configLoader) applyOsEnvOverrides(environ []string) error {
if existing != nil { if existing != nil {
val, err := metadecoders.Default.UnmarshalStringTo(env.Value, existing) val, err := metadecoders.Default.UnmarshalStringTo(env.Value, existing)
if err == nil { if err != nil {
val = l.envValToVal(env.Key, val) continue
}
if owner != nil { if owner != nil {
owner[nestedKey] = val owner[nestedKey] = val
} else { } else {
l.cfg.Set(env.Key, val) l.cfg.Set(env.Key, val)
} }
continue } else {
} if nestedKey != "" {
}
if owner != nil && nestedKey != "" {
owner[nestedKey] = env.Value owner[nestedKey] = env.Value
} else { } else {
var val any var val any
@ -252,32 +305,22 @@ func (l configLoader) applyOsEnvOverrides(environ []string) error {
_, ok := allDecoderSetups[key] _, ok := allDecoderSetups[key]
if ok { if ok {
// A map. // A map.
if v, err := metadecoders.Default.UnmarshalStringTo(env.Value, map[string]any{}); err == nil { if v, err := metadecoders.Default.UnmarshalStringTo(env.Value, map[string]interface{}{}); err == nil {
val = v val = v
} }
} }
if val == nil { if val == nil {
// A string. // A string.
val = l.envStringToVal(key, env.Value) val = l.envStringToVal(key, env.Value)
} }
l.cfg.Set(key, val) l.cfg.Set(key, val)
} }
}
} }
return nil return nil
} }
func (l *configLoader) envValToVal(k string, v any) any {
switch v := v.(type) {
case string:
return l.envStringToVal(k, v)
default:
return v
}
}
func (l *configLoader) envStringToVal(k, v string) any { func (l *configLoader) envStringToVal(k, v string) any {
switch k { switch k {
case "disablekinds", "disablelanguages": case "disablekinds", "disablelanguages":
@ -427,7 +470,7 @@ func (l *configLoader) loadModules(configs *Configs, ignoreModuleDoesNotExist bo
ignoreVendor, _ = hglob.GetGlob(hglob.NormalizePath(s)) ignoreVendor, _ = hglob.GetGlob(hglob.NormalizePath(s))
} }
ex := hexec.New(conf.Security, workingDir, l.Logger) ex := hexec.New(conf.Security, workingDir)
hook := func(m *modules.ModulesConfig) error { hook := func(m *modules.ModulesConfig) error {
for _, tc := range m.AllModules { for _, tc := range m.AllModules {

View file

@ -15,9 +15,7 @@ package config
import ( import (
"fmt" "fmt"
"net/http"
"regexp" "regexp"
"slices"
"sort" "sort"
"strings" "strings"
@ -129,7 +127,7 @@ func (w BuildStats) Enabled() bool {
} }
func (b BuildConfig) clone() BuildConfig { func (b BuildConfig) clone() BuildConfig {
b.CacheBusters = slices.Clone(b.CacheBusters) b.CacheBusters = append([]CacheBuster{}, b.CacheBusters...)
return b return b
} }
@ -228,22 +226,7 @@ type Server struct {
Redirects []Redirect Redirects []Redirect
compiledHeaders []glob.Glob compiledHeaders []glob.Glob
compiledRedirects []redirect compiledRedirects []glob.Glob
}
type redirect struct {
from glob.Glob
fromRe *regexp.Regexp
headers map[string]glob.Glob
}
func (r redirect) matchHeader(header http.Header) bool {
for k, v := range r.headers {
if !v.Match(header.Get(k)) {
return false
}
}
return true
} }
func (s *Server) CompileConfig(logger loggers.Logger) error { func (s *Server) CompileConfig(logger loggers.Logger) error {
@ -251,41 +234,10 @@ func (s *Server) CompileConfig(logger loggers.Logger) error {
return nil return nil
} }
for _, h := range s.Headers { for _, h := range s.Headers {
g, err := glob.Compile(h.For) s.compiledHeaders = append(s.compiledHeaders, glob.MustCompile(h.For))
if err != nil {
return fmt.Errorf("failed to compile Headers glob %q: %w", h.For, err)
}
s.compiledHeaders = append(s.compiledHeaders, g)
} }
for _, r := range s.Redirects { for _, r := range s.Redirects {
if r.From == "" && r.FromRe == "" { s.compiledRedirects = append(s.compiledRedirects, glob.MustCompile(r.From))
return fmt.Errorf("redirects must have either From or FromRe set")
}
rd := redirect{
headers: make(map[string]glob.Glob),
}
if r.From != "" {
g, err := glob.Compile(r.From)
if err != nil {
return fmt.Errorf("failed to compile Redirect glob %q: %w", r.From, err)
}
rd.from = g
}
if r.FromRe != "" {
re, err := regexp.Compile(r.FromRe)
if err != nil {
return fmt.Errorf("failed to compile Redirect regexp %q: %w", r.FromRe, err)
}
rd.fromRe = re
}
for k, v := range r.FromHeaders {
g, err := glob.Compile(v)
if err != nil {
return fmt.Errorf("failed to compile Redirect header glob %q: %w", v, err)
}
rd.headers[k] = g
}
s.compiledRedirects = append(s.compiledRedirects, rd)
} }
return nil return nil
@ -314,42 +266,22 @@ func (s *Server) MatchHeaders(pattern string) []types.KeyValueStr {
return matches return matches
} }
func (s *Server) MatchRedirect(pattern string, header http.Header) Redirect { func (s *Server) MatchRedirect(pattern string) Redirect {
if s.compiledRedirects == nil { if s.compiledRedirects == nil {
return Redirect{} return Redirect{}
} }
pattern = strings.TrimSuffix(pattern, "index.html") pattern = strings.TrimSuffix(pattern, "index.html")
for i, r := range s.compiledRedirects { for i, g := range s.compiledRedirects {
redir := s.Redirects[i] redir := s.Redirects[i]
var found bool // No redirect to self.
if redir.To == pattern {
if r.from != nil { return Redirect{}
if r.from.Match(pattern) {
found = header == nil || r.matchHeader(header)
// We need to do regexp group replacements if needed.
}
} }
if r.fromRe != nil { if g.Match(pattern) {
m := r.fromRe.FindStringSubmatch(pattern)
if m != nil {
if !found {
found = header == nil || r.matchHeader(header)
}
if found {
// Replace $1, $2 etc. in To.
for i, g := range m[1:] {
redir.To = strings.ReplaceAll(redir.To, fmt.Sprintf("$%d", i+1), g)
}
}
}
}
if found {
return redir return redir
} }
} }
@ -363,23 +295,9 @@ type Headers struct {
} }
type Redirect struct { type Redirect struct {
// From is the Glob pattern to match.
// One of From or FromRe must be set.
From string From string
// FromRe is the regexp to match.
// This regexp can contain group matches (e.g. $1) that can be used in the To field.
// One of From or FromRe must be set.
FromRe string
// To is the target URL.
To string To string
// Headers to match for the redirect.
// This maps the HTTP header name to a Glob pattern with values to match.
// If the map is empty, the redirect will always be triggered.
FromHeaders map[string]string
// HTTP status code to use for the redirect. // HTTP status code to use for the redirect.
// A status code of 200 will trigger a URL rewrite. // A status code of 200 will trigger a URL rewrite.
Status int Status int
@ -451,7 +369,7 @@ func (c *CacheBuster) CompileConfig(logger loggers.Logger) error {
} }
func (r Redirect) IsZero() bool { func (r Redirect) IsZero() bool {
return r.From == "" && r.FromRe == "" return r.From == ""
} }
const ( const (
@ -465,7 +383,17 @@ func DecodeServer(cfg Provider) (Server, error) {
_ = mapstructure.WeakDecode(cfg.GetStringMap("server"), s) _ = mapstructure.WeakDecode(cfg.GetStringMap("server"), s)
for i, redir := range s.Redirects { for i, redir := range s.Redirects {
// Get it in line with the Hugo server for OK responses.
// We currently treat the 404 as a special case, they are always "ugly", so keep them as is.
if redir.Status != 404 {
redir.To = strings.TrimSuffix(redir.To, "index.html") redir.To = strings.TrimSuffix(redir.To, "index.html")
if !strings.HasPrefix(redir.To, "https") && !strings.HasSuffix(redir.To, "/") {
// There are some tricky infinite loop situations when dealing
// when the target does not have a trailing slash.
// This can certainly be handled better, but not time for that now.
return Server{}, fmt.Errorf("unsupported redirect to value %q in server config; currently this must be either a remote destination or a local folder, e.g. \"/blog/\" or \"/blog/index.html\"", redir.To)
}
}
s.Redirects[i] = redir s.Redirects[i] = redir
} }
@ -473,7 +401,7 @@ func DecodeServer(cfg Provider) (Server, error) {
// Set up a default redirect for 404s. // Set up a default redirect for 404s.
s.Redirects = []Redirect{ s.Redirects = []Redirect{
{ {
From: "/**", From: "**",
To: "/404.html", To: "/404.html",
Status: 404, Status: 404,
}, },

View file

@ -71,28 +71,7 @@ X-Content-Type-Options = "nosniff"
[[server.redirects]] [[server.redirects]]
from = "/foo/**" from = "/foo/**"
to = "/baz/index.html" to = "/foo/index.html"
status = 200
[[server.redirects]]
from = "/loop/**"
to = "/loop/foo/"
status = 200
[[server.redirects]]
from = "/b/**"
fromRe = "/b/(.*)/"
to = "/baz/$1/"
status = 200
[[server.redirects]]
fromRe = "/c/(.*)/"
to = "/boo/$1/"
status = 200
[[server.redirects]]
fromRe = "/d/(.*)/"
to = "/boo/$1/"
status = 200 status = 200
[[server.redirects]] [[server.redirects]]
@ -100,6 +79,11 @@ from = "/google/**"
to = "https://google.com/" to = "https://google.com/"
status = 301 status = 301
[[server.redirects]]
from = "/**"
to = "/default/index.html"
status = 301
`, "toml") `, "toml")
@ -116,35 +100,45 @@ status = 301
{Key: "X-XSS-Protection", Value: "1; mode=block"}, {Key: "X-XSS-Protection", Value: "1; mode=block"},
}) })
c.Assert(s.MatchRedirect("/foo/bar/baz", nil), qt.DeepEquals, Redirect{ c.Assert(s.MatchRedirect("/foo/bar/baz"), qt.DeepEquals, Redirect{
From: "/foo/**", From: "/foo/**",
To: "/baz/", To: "/foo/",
Status: 200, Status: 200,
}) })
c.Assert(s.MatchRedirect("/foo/bar/", nil), qt.DeepEquals, Redirect{ c.Assert(s.MatchRedirect("/someother"), qt.DeepEquals, Redirect{
From: "/foo/**", From: "/**",
To: "/baz/", To: "/default/",
Status: 200, Status: 301,
}) })
c.Assert(s.MatchRedirect("/b/c/", nil), qt.DeepEquals, Redirect{ c.Assert(s.MatchRedirect("/google/foo"), qt.DeepEquals, Redirect{
From: "/b/**",
FromRe: "/b/(.*)/",
To: "/baz/c/",
Status: 200,
})
c.Assert(s.MatchRedirect("/c/d/", nil).To, qt.Equals, "/boo/d/")
c.Assert(s.MatchRedirect("/c/d/e/", nil).To, qt.Equals, "/boo/d/e/")
c.Assert(s.MatchRedirect("/someother", nil), qt.DeepEquals, Redirect{})
c.Assert(s.MatchRedirect("/google/foo", nil), qt.DeepEquals, Redirect{
From: "/google/**", From: "/google/**",
To: "https://google.com/", To: "https://google.com/",
Status: 301, Status: 301,
}) })
// No redirect loop, please.
c.Assert(s.MatchRedirect("/default/index.html"), qt.DeepEquals, Redirect{})
c.Assert(s.MatchRedirect("/default/"), qt.DeepEquals, Redirect{})
for _, errorCase := range []string{
`[[server.redirects]]
from = "/**"
to = "/file"
status = 301`,
`[[server.redirects]]
from = "/**"
to = "/foo/file.html"
status = 301`,
} {
cfg, err := FromConfigString(errorCase, "toml")
c.Assert(err, qt.IsNil)
_, err = DecodeServer(cfg)
c.Assert(err, qt.Not(qt.IsNil))
}
} }
func TestBuildConfigCacheBusters(t *testing.T) { func TestBuildConfigCacheBusters(t *testing.T) {
@ -166,7 +160,7 @@ func TestBuildConfigCacheBusters(t *testing.T) {
func TestBuildConfigCacheBusterstTailwindSetup(t *testing.T) { func TestBuildConfigCacheBusterstTailwindSetup(t *testing.T) {
c := qt.New(t) c := qt.New(t)
cfg := New() cfg := New()
cfg.Set("build", map[string]any{ cfg.Set("build", map[string]interface{}{
"cacheBusters": []map[string]string{ "cacheBusters": []map[string]string{
{ {
"source": "assets/watching/hugo_stats\\.json", "source": "assets/watching/hugo_stats\\.json",

View file

@ -58,7 +58,7 @@ type AllProvider interface {
BuildDrafts() bool BuildDrafts() bool
Running() bool Running() bool
Watching() bool Watching() bool
NewIdentityManager(name string, opts ...identity.ManagerOption) identity.Manager NewIdentityManager(name string) identity.Manager
FastRenderMode() bool FastRenderMode() bool
PrintUnusedTemplates() bool PrintUnusedTemplates() bool
EnableMissingTranslationPlaceholders() bool EnableMissingTranslationPlaceholders() bool
@ -76,7 +76,7 @@ type AllProvider interface {
} }
// We cannot import the media package as that would create a circular dependency. // We cannot import the media package as that would create a circular dependency.
// This interface defines a subset of what media.ContentTypes provides. // This interface defineds a sub set of what media.ContentTypes provides.
type ContentTypesProvider interface { type ContentTypesProvider interface {
IsContentSuffix(suffix string) bool IsContentSuffix(suffix string) bool
IsContentFile(filename string) bool IsContentFile(filename string) bool

View file

@ -15,6 +15,7 @@ package config
import ( import (
"fmt" "fmt"
"sort"
"strings" "strings"
"sync" "sync"
@ -25,6 +26,42 @@ import (
"github.com/gohugoio/hugo/common/maps" "github.com/gohugoio/hugo/common/maps"
) )
var (
// ConfigRootKeysSet contains all of the config map root keys.
ConfigRootKeysSet = map[string]bool{
"build": true,
"caches": true,
"cascade": true,
"frontmatter": true,
"languages": true,
"imaging": true,
"markup": true,
"mediatypes": true,
"menus": true,
"minify": true,
"module": true,
"outputformats": true,
"params": true,
"permalinks": true,
"related": true,
"sitemap": true,
"privacy": true,
"security": true,
"taxonomies": true,
}
// ConfigRootKeys is a sorted version of ConfigRootKeysSet.
ConfigRootKeys []string
)
func init() {
for k := range ConfigRootKeysSet {
ConfigRootKeys = append(ConfigRootKeys, k)
}
sort.Strings(ConfigRootKeys)
}
// New creates a Provider backed by an empty maps.Params. // New creates a Provider backed by an empty maps.Params.
func New() Provider { func New() Provider {
return &defaultConfigProvider{ return &defaultConfigProvider{
@ -345,7 +382,7 @@ func (c *defaultConfigProvider) getNestedKeyAndMap(key string, create bool) (str
c.keyCache.Store(key, parts) c.keyCache.Store(key, parts)
} }
current := c.root current := c.root
for i := range len(parts) - 1 { for i := 0; i < len(parts)-1; i++ {
next, found := current[parts[i]] next, found := current[parts[i]]
if !found { if !found {
if create { if create {

View file

@ -332,7 +332,7 @@ func TestDefaultConfigProvider(t *testing.T) {
return nil return nil
} }
for i := range 20 { for i := 0; i < 20; i++ {
i := i i := i
r.Run(func() error { r.Run(func() error {
const v = 42 const v = 42

View file

@ -22,7 +22,7 @@ import (
func DecodeNamespace[S, C any](configSource any, buildConfig func(any) (C, any, error)) (*ConfigNamespace[S, C], error) { func DecodeNamespace[S, C any](configSource any, buildConfig func(any) (C, any, error)) (*ConfigNamespace[S, C], error) {
// Calculate the hash of the input (not including any defaults applied later). // Calculate the hash of the input (not including any defaults applied later).
// This allows us to introduce new config options without breaking the hash. // This allows us to introduce new config options without breaking the hash.
h := hashing.HashStringHex(configSource) h := hashing.HashString(configSource)
// Build the config // Build the config
c, ext, err := buildConfig(configSource) c, ext, err := buildConfig(configSource)

View file

@ -29,7 +29,7 @@ func TestNamespace(t *testing.T) {
// ns, err := config.DecodeNamespace[map[string]DocsMediaTypeConfig](in, defaultMediaTypesConfig, buildConfig) // ns, err := config.DecodeNamespace[map[string]DocsMediaTypeConfig](in, defaultMediaTypesConfig, buildConfig)
ns, err := DecodeNamespace[[]*tstNsExt]( ns, err := DecodeNamespace[[]*tstNsExt](
map[string]any{"foo": "bar"}, map[string]interface{}{"foo": "bar"},
func(v any) (*tstNsExt, any, error) { func(v any) (*tstNsExt, any, error) {
t := &tstNsExt{} t := &tstNsExt{}
m, err := maps.ToStringMapE(v) m, err := maps.ToStringMapE(v)
@ -42,8 +42,8 @@ func TestNamespace(t *testing.T) {
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)
c.Assert(ns, qt.Not(qt.IsNil)) c.Assert(ns, qt.Not(qt.IsNil))
c.Assert(ns.SourceStructure, qt.DeepEquals, map[string]any{"foo": "bar"}) c.Assert(ns.SourceStructure, qt.DeepEquals, map[string]interface{}{"foo": "bar"})
c.Assert(ns.SourceHash, qt.Equals, "1420f6c7782f7459") c.Assert(ns.SourceHash, qt.Equals, "1450430416588600409")
c.Assert(ns.Config, qt.DeepEquals, &tstNsExt{Foo: "bar"}) c.Assert(ns.Config, qt.DeepEquals, &tstNsExt{Foo: "bar"})
c.Assert(ns.Signature(), qt.DeepEquals, []*tstNsExt(nil)) c.Assert(ns.Signature(), qt.DeepEquals, []*tstNsExt(nil))
} }

View file

@ -30,10 +30,9 @@ type Config struct {
Disqus Disqus Disqus Disqus
GoogleAnalytics GoogleAnalytics GoogleAnalytics GoogleAnalytics
Instagram Instagram Instagram Instagram
Twitter Twitter // deprecated in favor of X in v0.141.0 Twitter Twitter
Vimeo Vimeo Vimeo Vimeo
YouTube YouTube YouTube YouTube
X X
} }
// Disqus holds the privacy configuration settings related to the Disqus template. // Disqus holds the privacy configuration settings related to the Disqus template.
@ -59,8 +58,7 @@ type Instagram struct {
Simple bool Simple bool
} }
// Twitter holds the privacy configuration settings related to the Twitter shortcode. // Twitter holds the privacy configuration settingsrelated to the Twitter shortcode.
// Deprecated in favor of X in v0.141.0.
type Twitter struct { type Twitter struct {
Service `mapstructure:",squash"` Service `mapstructure:",squash"`
@ -72,7 +70,7 @@ type Twitter struct {
Simple bool Simple bool
} }
// Vimeo holds the privacy configuration settings related to the Vimeo shortcode. // Vimeo holds the privacy configuration settingsrelated to the Vimeo shortcode.
type Vimeo struct { type Vimeo struct {
Service `mapstructure:",squash"` Service `mapstructure:",squash"`
@ -86,7 +84,7 @@ type Vimeo struct {
Simple bool Simple bool
} }
// YouTube holds the privacy configuration settings related to the YouTube shortcode. // YouTube holds the privacy configuration settingsrelated to the YouTube shortcode.
type YouTube struct { type YouTube struct {
Service `mapstructure:",squash"` Service `mapstructure:",squash"`
@ -96,20 +94,6 @@ type YouTube struct {
PrivacyEnhanced bool PrivacyEnhanced bool
} }
// X holds the privacy configuration settings related to the X shortcode.
type X struct {
Service `mapstructure:",squash"`
// When set to true, the X post and its embedded page on your site are not
// used for purposes that include personalized suggestions and personalized
// ads.
EnableDNT bool
// If simple mode is enabled, a static and no-JS version of the X post will
// be built.
Simple bool
}
// DecodeConfig creates a privacy Config from a given Hugo configuration. // DecodeConfig creates a privacy Config from a given Hugo configuration.
func DecodeConfig(cfg config.Provider) (pc Config, err error) { func DecodeConfig(cfg config.Provider) (pc Config, err error) {
if !cfg.IsSet(privacyConfigKey) { if !cfg.IsSet(privacyConfigKey) {

View file

@ -36,7 +36,7 @@ respectDoNotTrack = true
[privacy.instagram] [privacy.instagram]
disable = true disable = true
simple = true simple = true
[privacy.x] [privacy.twitter]
disable = true disable = true
enableDNT = true enableDNT = true
simple = true simple = true
@ -59,10 +59,9 @@ simple = true
got := []bool{ got := []bool{
pc.Disqus.Disable, pc.GoogleAnalytics.Disable, pc.Disqus.Disable, pc.GoogleAnalytics.Disable,
pc.GoogleAnalytics.RespectDoNotTrack, pc.Instagram.Disable, pc.GoogleAnalytics.RespectDoNotTrack, pc.Instagram.Disable,
pc.Instagram.Simple, pc.Instagram.Simple, pc.Twitter.Disable, pc.Twitter.EnableDNT,
pc.Vimeo.Disable, pc.Vimeo.EnableDNT, pc.Vimeo.Simple, pc.Twitter.Simple, pc.Vimeo.Disable, pc.Vimeo.EnableDNT, pc.Vimeo.Simple,
pc.YouTube.PrivacyEnhanced, pc.YouTube.Disable, pc.X.Disable, pc.X.EnableDNT, pc.YouTube.PrivacyEnhanced, pc.YouTube.Disable,
pc.X.Simple,
} }
c.Assert(got, qt.All(qt.Equals), true) c.Assert(got, qt.All(qt.Equals), true)

View file

@ -73,7 +73,7 @@ func NewWhitelist(patterns ...string) (Whitelist, error) {
var patternsr []*regexp.Regexp var patternsr []*regexp.Regexp
for i := range patterns { for i := 0; i < len(patterns); i++ {
p := strings.TrimSpace(patterns[i]) p := strings.TrimSpace(patterns[i])
if p == "" { if p == "" {
continue continue

View file

@ -31,8 +31,7 @@ type Config struct {
Disqus Disqus Disqus Disqus
GoogleAnalytics GoogleAnalytics GoogleAnalytics GoogleAnalytics
Instagram Instagram Instagram Instagram
Twitter Twitter // deprecated in favor of X in v0.141.0 Twitter Twitter
X X
RSS RSS RSS RSS
} }
@ -62,7 +61,6 @@ type Instagram struct {
} }
// Twitter holds the functional configuration settings related to the Twitter shortcodes. // Twitter holds the functional configuration settings related to the Twitter shortcodes.
// Deprecated in favor of X in v0.141.0.
type Twitter struct { type Twitter struct {
// The Simple variant of Twitter is decorated with a basic set of inline styles. // The Simple variant of Twitter is decorated with a basic set of inline styles.
// This means that if you want to provide your own CSS, you want // This means that if you want to provide your own CSS, you want
@ -70,14 +68,6 @@ type Twitter struct {
DisableInlineCSS bool DisableInlineCSS bool
} }
// X holds the functional configuration settings related to the X shortcodes.
type X struct {
// The Simple variant of X is decorated with a basic set of inline styles.
// This means that if you want to provide your own CSS, you want
// to disable the inline CSS provided by Hugo.
DisableInlineCSS bool
}
// RSS holds the functional configuration settings related to the RSS feeds. // RSS holds the functional configuration settings related to the RSS feeds.
type RSS struct { type RSS struct {
// Limit the number of pages. // Limit the number of pages.
@ -101,9 +91,6 @@ func DecodeConfig(cfg config.Provider) (c Config, err error) {
if c.RSS.Limit == 0 { if c.RSS.Limit == 0 {
c.RSS.Limit = cfg.GetInt(rssLimitKey) c.RSS.Limit = cfg.GetInt(rssLimitKey)
if c.RSS.Limit == 0 {
c.RSS.Limit = -1
}
} }
return return

View file

@ -36,8 +36,6 @@ id = "ga_id"
disableInlineCSS = true disableInlineCSS = true
[services.twitter] [services.twitter]
disableInlineCSS = true disableInlineCSS = true
[services.x]
disableInlineCSS = true
` `
cfg, err := config.FromConfigString(tomlConfig, "toml") cfg, err := config.FromConfigString(tomlConfig, "toml")
c.Assert(err, qt.IsNil) c.Assert(err, qt.IsNil)

Some files were not shown because too many files have changed in this diff Show more