mirror of
https://github.com/gohugoio/hugo.git
synced 2025-07-04 07:57:34 +00:00
Compare commits
No commits in common. "master" and "v0.63.2" have entirely different histories.
3434 changed files with 103606 additions and 190546 deletions
|
@ -1,115 +1,51 @@
|
|||
parameters:
|
||||
|
||||
# v2: 11m.
|
||||
defaults: &defaults
|
||||
resource_class: large
|
||||
docker:
|
||||
- image: bepsays/ci-hugoreleaser:1.22400.20000
|
||||
environment: &buildenv
|
||||
GOMODCACHE: /root/project/gomodcache
|
||||
- image: bepsays/ci-goreleaser:1.13-4
|
||||
environment:
|
||||
CGO_ENABLED: "0"
|
||||
|
||||
version: 2
|
||||
jobs:
|
||||
prepare_release:
|
||||
build:
|
||||
<<: *defaults
|
||||
environment: &buildenv
|
||||
GOMODCACHE: /root/project/gomodcache
|
||||
steps:
|
||||
- setup_remote_docker
|
||||
- checkout:
|
||||
path: hugo
|
||||
- &git-config
|
||||
run:
|
||||
command: |
|
||||
git config --global user.email "bjorn.erik.pedersen+hugoreleaser@gmail.com"
|
||||
git config --global user.name "hugoreleaser"
|
||||
- run:
|
||||
command: |
|
||||
git clone git@github.com:gohugoio/hugoDocs.git
|
||||
cd hugo
|
||||
go mod download
|
||||
go run -tags release main.go release --step 1
|
||||
- save_cache:
|
||||
key: git-sha-{{ .Revision }}
|
||||
paths:
|
||||
- hugo
|
||||
- gomodcache
|
||||
build_container1:
|
||||
<<: [*defaults]
|
||||
environment:
|
||||
<<: [*buildenv]
|
||||
sleep 5
|
||||
go test -p 1 ./...
|
||||
- persist_to_workspace:
|
||||
root: .
|
||||
paths: .
|
||||
release:
|
||||
<<: *defaults
|
||||
steps:
|
||||
- &restore-cache
|
||||
restore_cache:
|
||||
key: git-sha-{{ .Revision }}
|
||||
- run:
|
||||
no_output_timeout: 20m
|
||||
command: |
|
||||
mkdir -p /tmp/files/dist1
|
||||
cd hugo
|
||||
hugoreleaser build -paths "builds/container1/**" -workers 3 -dist /tmp/files/dist1 -chunks $CIRCLE_NODE_TOTAL -chunk-index $CIRCLE_NODE_INDEX
|
||||
- &persist-workspace
|
||||
persist_to_workspace:
|
||||
root: /tmp/files
|
||||
paths:
|
||||
- dist1
|
||||
- dist2
|
||||
parallelism: 7
|
||||
build_container2:
|
||||
<<: [*defaults]
|
||||
environment:
|
||||
<<: [*buildenv]
|
||||
docker:
|
||||
- image: bepsays/ci-hugoreleaser-linux-arm64:1.22400.20000
|
||||
steps:
|
||||
- *restore-cache
|
||||
- &attach-workspace
|
||||
attach_workspace:
|
||||
at: /tmp/workspace
|
||||
- run:
|
||||
command: |
|
||||
mkdir -p /tmp/files/dist2
|
||||
cd hugo
|
||||
hugoreleaser build -paths "builds/container2/**" -workers 1 -dist /tmp/files/dist2
|
||||
- *persist-workspace
|
||||
archive_and_release:
|
||||
<<: [*defaults]
|
||||
environment:
|
||||
<<: [*buildenv]
|
||||
steps:
|
||||
- *restore-cache
|
||||
- *attach-workspace
|
||||
- *git-config
|
||||
- run:
|
||||
name: Add github.com to known hosts
|
||||
command: ssh-keyscan github.com >> ~/.ssh/known_hosts
|
||||
- run:
|
||||
command: |
|
||||
cp -a /tmp/workspace/dist1/. ./hugo/dist
|
||||
cp -a /tmp/workspace/dist2/. ./hugo/dist
|
||||
- attach_workspace:
|
||||
at: /root/project
|
||||
- run:
|
||||
command: |
|
||||
cd hugo
|
||||
hugoreleaser archive
|
||||
hugoreleaser release
|
||||
go run -tags release main.go release --step 2
|
||||
git config --global user.email "bjorn.erik.pedersen+hugoreleaser@gmail.com"
|
||||
git config --global user.name "hugoreleaser"
|
||||
go run -tags release main.go release -r ${CIRCLE_BRANCH}
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
release:
|
||||
jobs:
|
||||
- prepare_release:
|
||||
- build:
|
||||
filters:
|
||||
branches:
|
||||
only: /release-.*/
|
||||
- build_container1:
|
||||
- hold:
|
||||
type: approval
|
||||
requires:
|
||||
- prepare_release
|
||||
- build_container2:
|
||||
requires:
|
||||
- prepare_release
|
||||
- archive_and_release:
|
||||
- build
|
||||
- release:
|
||||
context: org-global
|
||||
requires:
|
||||
- build_container1
|
||||
- build_container2
|
||||
|
||||
|
||||
|
||||
- hold
|
||||
|
|
7
.github/ISSUE_TEMPLATE/bug_report.md
vendored
7
.github/ISSUE_TEMPLATE/bug_report.md
vendored
|
@ -1,15 +1,10 @@
|
|||
---
|
||||
name: 'Bug report'
|
||||
labels: 'Bug, NeedsTriage'
|
||||
labels: ''
|
||||
assignees: ''
|
||||
about: Create a report to help us improve
|
||||
---
|
||||
|
||||
<!--
|
||||
Please do not use the issue queue for questions or troubleshooting. Unless you are certain that your issue is a software defect, use the forum:
|
||||
|
||||
https://discourse.gohugo.io
|
||||
-->
|
||||
|
||||
<!-- Please answer these questions before submitting your issue. Thanks! -->
|
||||
|
||||
|
|
5
.github/ISSUE_TEMPLATE/config.yml
vendored
5
.github/ISSUE_TEMPLATE/config.yml
vendored
|
@ -1,5 +0,0 @@
|
|||
blank_issues_enabled: false
|
||||
contact_links:
|
||||
- name: SUPPORT, ISSUES and TROUBLESHOOTING
|
||||
url: https://discourse.gohugo.io/
|
||||
about: Please DO NOT use Github for support requests. Please visit https://discourse.gohugo.io for support! You will be helped much faster there. If you ignore this request your issue might be closed with a discourse label.
|
7
.github/ISSUE_TEMPLATE/feature_request.md
vendored
7
.github/ISSUE_TEMPLATE/feature_request.md
vendored
|
@ -1,11 +1,8 @@
|
|||
---
|
||||
name: Proposal
|
||||
about: Propose a new feature for Hugo
|
||||
about: Suggest an idea for Hugo
|
||||
title: ''
|
||||
labels: 'Proposal, NeedsTriage'
|
||||
labels: 'Proposal'
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
|
||||
<!-- Describe this new feature. Think about if it really belongs in the Hugo core module; you may want to discuss it on https://discourse.gohugo.io/ first. -->
|
10
.github/ISSUE_TEMPLATE/support.md
vendored
Normal file
10
.github/ISSUE_TEMPLATE/support.md
vendored
Normal file
|
@ -0,0 +1,10 @@
|
|||
---
|
||||
name: Support (Do not use)
|
||||
about: Please do not use Github for support requests. Visit https://discourse.gohugo.io for support
|
||||
title: ''
|
||||
labels: support
|
||||
assignees: ''
|
||||
|
||||
---
|
||||
|
||||
Issues created with this template will be automatically closed. Please visit https://discourse.gohugo.io for the support you really, really, want!
|
7
.github/dependabot.yml
vendored
7
.github/dependabot.yml
vendored
|
@ -1,7 +0,0 @@
|
|||
# See https://docs.github.com/en/github/administering-a-repository/configuration-options-for-dependency-updates#package-ecosystem
|
||||
version: 2
|
||||
updates:
|
||||
- package-ecosystem: "gomod"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
23
.github/stale.yml
vendored
Normal file
23
.github/stale.yml
vendored
Normal file
|
@ -0,0 +1,23 @@
|
|||
# Number of days of inactivity before an issue becomes stale
|
||||
daysUntilStale: 120
|
||||
# Number of days of inactivity before a stale issue is closed
|
||||
daysUntilClose: 30
|
||||
# Issues with these labels will never be considered stale
|
||||
exemptLabels:
|
||||
- Keep
|
||||
- Security
|
||||
# Label to use when marking an issue as stale
|
||||
staleLabel: Stale
|
||||
# Comment to post when marking an issue as stale. Set to `false` to disable
|
||||
markComment: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. The resources of the Hugo team are limited, and so we are asking for your help.
|
||||
|
||||
If this is a **bug** and you can still reproduce this error on the <code>master</code> branch, please reply with all of the information you have about it in order to keep the issue open.
|
||||
|
||||
If this is a **feature request**, and you feel that it is still relevant and valuable, please tell us why.
|
||||
|
||||
This issue will automatically be closed in the near future if no further activity occurs. Thank you for all your contributions.
|
||||
|
||||
# Comment to post when closing a stale issue. Set to `false` to disable
|
||||
closeComment: false
|
14
.github/workflows/auto_close_support.yml
vendored
Normal file
14
.github/workflows/auto_close_support.yml
vendored
Normal file
|
@ -0,0 +1,14 @@
|
|||
on:
|
||||
schedule:
|
||||
- cron: 0 5 * * 3
|
||||
name: Weekly Issue Closure
|
||||
jobs:
|
||||
cycle-weekly-close:
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@master
|
||||
- name: weekly-issue-closure
|
||||
uses: bdougie/close-issues-based-on-label@master
|
||||
env:
|
||||
LABEL: support
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
49
.github/workflows/image.yml
vendored
49
.github/workflows/image.yml
vendored
|
@ -1,49 +0,0 @@
|
|||
name: Build Docker image
|
||||
|
||||
on:
|
||||
release:
|
||||
types: [published]
|
||||
pull_request:
|
||||
permissions:
|
||||
packages: write
|
||||
|
||||
env:
|
||||
REGISTRY_IMAGE: ghcr.io/gohugoio/hugo
|
||||
|
||||
jobs:
|
||||
build:
|
||||
runs-on: ubuntu-latest
|
||||
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
|
||||
|
||||
- name: Docker meta
|
||||
id: meta
|
||||
uses: docker/metadata-action@8e5442c4ef9f78752691e2d8f8d19755c6f78e81 # v5.5.1
|
||||
with:
|
||||
images: ${{ env.REGISTRY_IMAGE }}
|
||||
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@988b5a0280414f521da01fcc63a27aeeb4b104db # v3.6.1
|
||||
|
||||
- name: Login to GHCR
|
||||
# Login is only needed when the image is pushed
|
||||
uses: docker/login-action@9780b0c442fbb1117ed29e0efdff1e18412f7567 # v3.3.0
|
||||
with:
|
||||
registry: ghcr.io
|
||||
username: ${{ github.repository_owner }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Build and push
|
||||
id: build
|
||||
uses: docker/build-push-action@16ebe778df0e7752d2cfcbd924afdbbd89c1a755 # v6.6.1
|
||||
with:
|
||||
context: .
|
||||
provenance: mode=max
|
||||
sbom: true
|
||||
push: ${{ github.event_name != 'pull_request' }}
|
||||
platforms: linux/amd64,linux/arm64
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
build-args: HUGO_BUILD_TAGS=extended,withdeploy
|
52
.github/workflows/stale.yml
vendored
52
.github/workflows/stale.yml
vendored
|
@ -1,52 +0,0 @@
|
|||
name: 'Close stale and lock closed issues and PRs'
|
||||
on:
|
||||
workflow_dispatch:
|
||||
schedule:
|
||||
- cron: '30 1 * * *'
|
||||
permissions:
|
||||
contents: read
|
||||
jobs:
|
||||
stale:
|
||||
permissions:
|
||||
issues: write
|
||||
pull-requests: write
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: dessant/lock-threads@7de207be1d3ce97a9abe6ff1306222982d1ca9f9 # v5.0.1
|
||||
with:
|
||||
issue-inactive-days: 21
|
||||
add-issue-labels: 'Outdated'
|
||||
issue-comment: >
|
||||
This issue has been automatically locked since there
|
||||
has not been any recent activity after it was closed.
|
||||
Please open a new issue for related bugs.
|
||||
pr-comment: >
|
||||
This pull request has been automatically locked since there
|
||||
has not been any recent activity after it was closed.
|
||||
Please open a new issue for related bugs.
|
||||
- uses: actions/stale@28ca1036281a5e5922ead5184a1bbf96e5fc984e # v9.0.0
|
||||
with:
|
||||
operations-per-run: 999
|
||||
days-before-issue-stale: 365
|
||||
days-before-pr-stale: 365
|
||||
days-before-issue-close: 56
|
||||
days-before-pr-close: 56
|
||||
stale-issue-message: >
|
||||
This issue has been automatically marked as stale because it has not had
|
||||
recent activity. The resources of the Hugo team are limited, and so we are asking for your help.
|
||||
|
||||
If this is a **bug** and you can still reproduce this error on the <code>master</code> branch, please reply with all of the information you have about it in order to keep the issue open.
|
||||
|
||||
If this is a **feature request**, and you feel that it is still relevant and valuable, please tell us why.
|
||||
|
||||
This issue will automatically be closed in the near future if no further activity occurs. Thank you for all your contributions.
|
||||
stale-pr-message: This PR has been automatically marked as stale because it has not had
|
||||
recent activity. The resources of the Hugo team are limited, and so we are asking for your help.
|
||||
|
||||
Please check https://github.com/gohugoio/hugo/blob/master/CONTRIBUTING.md#code-contribution and verify that this code contribution fits with the description. If yes, tell is in a comment.
|
||||
|
||||
This PR will automatically be closed in the near future if no further activity occurs. Thank you for all your contributions.
|
||||
stale-issue-label: 'Stale'
|
||||
exempt-issue-labels: 'Keep,Security'
|
||||
stale-pr-label: 'Stale'
|
||||
exempt-pr-labels: 'Keep,Security'
|
132
.github/workflows/test.yml
vendored
132
.github/workflows/test.yml
vendored
|
@ -1,132 +0,0 @@
|
|||
on:
|
||||
push:
|
||||
branches: [master]
|
||||
pull_request:
|
||||
name: Test
|
||||
env:
|
||||
GOPROXY: https://proxy.golang.org
|
||||
GO111MODULE: on
|
||||
SASS_VERSION: 1.80.3
|
||||
DART_SASS_SHA_LINUX: 7c933edbad0a7d389192c5b79393485c088bd2c4398e32f5754c32af006a9ffd
|
||||
DART_SASS_SHA_MACOS: 79e060b0e131c3bb3c16926bafc371dc33feab122bfa8c01aa337a072097967b
|
||||
DART_SASS_SHA_WINDOWS: 0bc4708b37cd1bac4740e83ac5e3176e66b774f77fd5dd364da5b5cfc9bfb469
|
||||
permissions:
|
||||
contents: read
|
||||
jobs:
|
||||
test:
|
||||
strategy:
|
||||
matrix:
|
||||
go-version: [1.23.x, 1.24.x]
|
||||
os: [ubuntu-latest, windows-latest] # macos disabled for now because of disk space issues.
|
||||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- if: matrix.os == 'ubuntu-latest'
|
||||
name: Free Disk Space (Ubuntu)
|
||||
uses: jlumbroso/free-disk-space@54081f138730dfa15788a46383842cd2f914a1be # v1.3.1
|
||||
with:
|
||||
# this might remove tools that are actually needed,
|
||||
# if set to "true" but frees about 6 GB
|
||||
tool-cache: false
|
||||
android: true
|
||||
dotnet: true
|
||||
haskell: true
|
||||
large-packages: true
|
||||
docker-images: true
|
||||
swap-storage: true
|
||||
- name: Checkout code
|
||||
uses: actions/checkout@692973e3d937129bcbf40652eb9f2f61becf3332 # v4.1.7
|
||||
- name: Install Go
|
||||
uses: actions/setup-go@0a12ed9d6a96ab950c8f026ed9f722fe0da7ef32 # v5.0.2
|
||||
with:
|
||||
go-version: ${{ matrix.go-version }}
|
||||
check-latest: true
|
||||
cache: true
|
||||
cache-dependency-path: |
|
||||
**/go.sum
|
||||
**/go.mod
|
||||
- name: Install Ruby
|
||||
uses: ruby/setup-ruby@a6e6f86333f0a2523ece813039b8b4be04560854 # v1.190.0
|
||||
with:
|
||||
ruby-version: "2.7"
|
||||
bundler-cache: true #
|
||||
- name: Install Python
|
||||
uses: actions/setup-python@39cd14951b08e74b54015e9e001cdefcf80e669f # v5.1.1
|
||||
with:
|
||||
python-version: "3.x"
|
||||
- name: Install Mage
|
||||
run: go install github.com/magefile/mage@v1.15.0
|
||||
- name: Install asciidoctor
|
||||
uses: reitzig/actions-asciidoctor@c642db5eedd1d729bb8c92034770d0b2f769eda6 # v2.0.2
|
||||
- name: Install docutils
|
||||
run: |
|
||||
pip install docutils
|
||||
rst2html --version
|
||||
- if: matrix.os == 'ubuntu-latest'
|
||||
name: Install pandoc on Linux
|
||||
run: |
|
||||
sudo apt-get update -y
|
||||
sudo apt-get install -y pandoc
|
||||
- if: matrix.os == 'macos-latest'
|
||||
run: |
|
||||
brew install pandoc
|
||||
- if: matrix.os == 'windows-latest'
|
||||
run: |
|
||||
choco install pandoc
|
||||
- run: pandoc -v
|
||||
- if: matrix.os == 'windows-latest'
|
||||
run: |
|
||||
choco install mingw
|
||||
- if: matrix.os == 'ubuntu-latest'
|
||||
name: Install dart-sass Linux
|
||||
run: |
|
||||
echo "Install Dart Sass version ${SASS_VERSION} ..."
|
||||
curl -LJO "https://github.com/sass/dart-sass/releases/download/${SASS_VERSION}/dart-sass-${SASS_VERSION}-linux-x64.tar.gz";
|
||||
echo "${DART_SASS_SHA_LINUX} dart-sass-${SASS_VERSION}-linux-x64.tar.gz" | sha256sum -c;
|
||||
tar -xvf "dart-sass-${SASS_VERSION}-linux-x64.tar.gz";
|
||||
echo "$GOBIN"
|
||||
echo "$GITHUB_WORKSPACE/dart-sass/" >> $GITHUB_PATH
|
||||
- if: matrix.os == 'macos-latest'
|
||||
name: Install dart-sass MacOS
|
||||
run: |
|
||||
echo "Install Dart Sass version ${SASS_VERSION} ..."
|
||||
curl -LJO "https://github.com/sass/dart-sass/releases/download/${SASS_VERSION}/dart-sass-${SASS_VERSION}-macos-x64.tar.gz";
|
||||
echo "${DART_SASS_SHA_MACOS} dart-sass-${SASS_VERSION}-macos-x64.tar.gz" | shasum -a 256 -c;
|
||||
tar -xvf "dart-sass-${SASS_VERSION}-macos-x64.tar.gz";
|
||||
echo "$GITHUB_WORKSPACE/dart-sass/" >> $GITHUB_PATH
|
||||
- if: matrix.os == 'windows-latest'
|
||||
name: Install dart-sass Windows
|
||||
run: |
|
||||
echo "Install Dart Sass version ${env:SASS_VERSION} ..."
|
||||
curl -LJO "https://github.com/sass/dart-sass/releases/download/${env:SASS_VERSION}/dart-sass-${env:SASS_VERSION}-windows-x64.zip";
|
||||
Expand-Archive -Path "dart-sass-${env:SASS_VERSION}-windows-x64.zip" -DestinationPath .;
|
||||
echo "$env:GITHUB_WORKSPACE/dart-sass/" | Out-File -FilePath $Env:GITHUB_PATH -Encoding utf-8 -Append
|
||||
- if: matrix.os == 'ubuntu-latest'
|
||||
name: Install staticcheck
|
||||
run: go install honnef.co/go/tools/cmd/staticcheck@latest
|
||||
- if: matrix.os == 'ubuntu-latest'
|
||||
name: Run staticcheck
|
||||
run: staticcheck ./...
|
||||
- if: matrix.os != 'windows-latest'
|
||||
name: Check
|
||||
run: |
|
||||
sass --version;
|
||||
mage -v check;
|
||||
env:
|
||||
HUGO_BUILD_TAGS: extended,withdeploy
|
||||
- if: matrix.os == 'windows-latest'
|
||||
# See issue #11052. We limit the build to regular test (no -race flag) on Windows for now.
|
||||
name: Test
|
||||
run: |
|
||||
mage -v test;
|
||||
env:
|
||||
HUGO_BUILD_TAGS: extended,withdeploy
|
||||
- name: Build tags
|
||||
run: |
|
||||
go install -tags extended
|
||||
- if: matrix.os == 'ubuntu-latest'
|
||||
name: Build for dragonfly
|
||||
run: |
|
||||
go install
|
||||
env:
|
||||
GOARCH: amd64
|
||||
GOOS: dragonfly
|
28
.gitignore
vendored
28
.gitignore
vendored
|
@ -1,6 +1,26 @@
|
|||
|
||||
/hugo
|
||||
docs/public*
|
||||
/.idea
|
||||
hugo.exe
|
||||
*.test
|
||||
imports.*
|
||||
dist/
|
||||
public/
|
||||
*.prof
|
||||
nohup.out
|
||||
cover.out
|
||||
*.swp
|
||||
*.swo
|
||||
.DS_Store
|
||||
*~
|
||||
vendor/*/
|
||||
*.bench
|
||||
*.debug
|
||||
coverage*.out
|
||||
|
||||
dock.sh
|
||||
|
||||
GoBuilds
|
||||
dist
|
||||
|
||||
resources/sunset.jpg
|
||||
|
||||
vendor
|
||||
|
||||
|
|
77
.travis.yml
Normal file
77
.travis.yml
Normal file
|
@ -0,0 +1,77 @@
|
|||
language: go
|
||||
|
||||
dist: bionic
|
||||
|
||||
env:
|
||||
global:
|
||||
- CACHE_NAME=${TRAVIS_ARCH}
|
||||
- GO111MODULE=on
|
||||
- GOPROXY=https://proxy.golang.org
|
||||
- HUGO_BUILD_TAGS=extended
|
||||
|
||||
git:
|
||||
depth: false
|
||||
|
||||
go:
|
||||
- "1.12.13"
|
||||
- "1.13.4"
|
||||
- master
|
||||
|
||||
arch:
|
||||
- amd64
|
||||
- arm64
|
||||
|
||||
os:
|
||||
- linux
|
||||
- osx
|
||||
- windows
|
||||
|
||||
jobs:
|
||||
allow_failures:
|
||||
- go: master
|
||||
- arch: arm64
|
||||
fast_finish: true
|
||||
exclude:
|
||||
- os: windows
|
||||
go: master
|
||||
- arch: arm64
|
||||
os: osx
|
||||
- arch: arm64
|
||||
os: windows
|
||||
|
||||
cache:
|
||||
directories:
|
||||
- $HOME/gopath/pkg/mod
|
||||
- $HOME/.cache/go-build
|
||||
- $HOME/Library/Caches/go-build
|
||||
- $HOME/AppData/Local/go-build
|
||||
|
||||
before_install:
|
||||
- df -h
|
||||
# https://travis-ci.community/t/go-cant-find-gcc-with-go1-11-1-on-windows/293/5
|
||||
- if [ "$TRAVIS_OS_NAME" = "windows" ]; then
|
||||
choco install mingw -y;
|
||||
export PATH=/c/tools/mingw64/bin:"$PATH";
|
||||
fi
|
||||
- gem install asciidoctor
|
||||
- type asciidoctor
|
||||
|
||||
install:
|
||||
- mkdir -p $HOME/src
|
||||
- mv $TRAVIS_BUILD_DIR $HOME/src
|
||||
- export TRAVIS_BUILD_DIR=$HOME/src/hugo
|
||||
- cd $HOME/src/hugo
|
||||
- go get github.com/magefile/mage
|
||||
|
||||
script:
|
||||
- go mod download || true
|
||||
- mage -v test
|
||||
- if [ "$TRAVIS_ARCH" = "amd64" ]; then
|
||||
mage -v check;
|
||||
else
|
||||
HUGO_TIMEOUT=30000 mage -v check;
|
||||
fi
|
||||
- mage -v hugo
|
||||
- ./hugo -s docs/
|
||||
- ./hugo --renderToMemory -s docs/
|
||||
- df -h
|
|
@ -1,5 +1,3 @@
|
|||
>**Note:** We would appreciate if you hold on with any big refactoring (like renaming deprecated Go packages), mainly because of potential for extra merge work for future coming in in the near future.
|
||||
|
||||
# Contributing to Hugo
|
||||
|
||||
We welcome contributions to Hugo of any kind including documentation, themes,
|
||||
|
@ -50,15 +48,15 @@ Hugo has become a fully featured static site generator, so any new functionality
|
|||
* strive not to break existing sites.
|
||||
* close or update an open [Hugo issue](https://github.com/gohugoio/hugo/issues)
|
||||
|
||||
If it is of some complexity, the contributor is expected to maintain and support the new feature in the future (answer questions on the forum, fix any bugs etc.).
|
||||
If it is of some complexity, the contributor is expected to maintain and support the new future (answer questions on the forum, fix any bugs etc.).
|
||||
|
||||
Any non-trivial code change needs to update an open [issue](https://github.com/gohugoio/hugo/issues). A non-trivial code change without an issue reference with one of the labels `bug` or `enhancement` will not be merged.
|
||||
It is recommended to open up a discussion on the [Hugo Forum](https://discourse.gohugo.io/) to get feedback on your idea before you begin. If you are submitting a complex feature, create a small design proposal on the [Hugo issue tracker](https://github.com/gohugoio/hugo/issues) before you start.
|
||||
|
||||
Note that we do not accept new features that require [CGO](https://github.com/golang/go/wiki/cgo).
|
||||
We have one exception to this rule which is LibSASS.
|
||||
|
||||
**Bug fixes are, of course, always welcome.**
|
||||
|
||||
|
||||
|
||||
## Submitting Patches
|
||||
|
||||
The Hugo project welcomes all contributors and contributions regardless of skill or experience level. If you are interested in helping with the project, we will help you with your contribution.
|
||||
|
@ -81,23 +79,19 @@ To make the contribution process as seamless as possible, we ask for the followi
|
|||
|
||||
### Git Commit Message Guidelines
|
||||
|
||||
This [blog article](https://cbea.ms/git-commit/) is a good resource for learning how to write good commit messages,
|
||||
This [blog article](http://chris.beams.io/posts/git-commit/) is a good resource for learning how to write good commit messages,
|
||||
the most important part being that each commit message should have a title/subject in imperative mood starting with a capital letter and no trailing period:
|
||||
*"js: Return error when option x is not set"*, **NOT** *"returning some error."*
|
||||
|
||||
Most title/subjects should have a lower-cased prefix with a colon and one whitespace. The prefix can be:
|
||||
|
||||
* The name of the package where (most of) the changes are made (e.g. `media: Add text/calendar`)
|
||||
* If the package name is deeply nested/long, try to shorten it from the left side, e.g. `markup/goldmark` is OK, `resources/resource_transformers/js` can be shortened to `js`.
|
||||
* If this commit touches several packages with a common functional topic, use that as a prefix, e.g. `errors: Resolve correct line numbers`)
|
||||
* If this commit touches many packages without a common functional topic, prefix with `all:` (e.g. `all: Reformat Go code`)
|
||||
* If this is a documentation update, prefix with `docs:`.
|
||||
* If nothing of the above applies, just leave the prefix out.
|
||||
* Note that the above excludes nouns seen in other repositories, e.g. "chore:".
|
||||
*"Return error on wrong use of the Paginator"*, **NOT** *"returning some error."*
|
||||
|
||||
Also, if your commit references one or more GitHub issues, always end your commit message body with *See #1234* or *Fixes #1234*.
|
||||
Replace *1234* with the GitHub issue ID. The last example will close the issue when the commit is merged into *master*.
|
||||
|
||||
Sometimes it makes sense to prefix the commit message with the package name (or docs folder) all lowercased ending with a colon.
|
||||
That is fine, but the rest of the rules above apply.
|
||||
So it is "tpl: Add emojify template func", not "tpl: add emojify template func.", and "docs: Document emoji", not "doc: document emoji."
|
||||
|
||||
Please use a short and descriptive branch name, e.g. **NOT** "patch-1". It's very common but creates a naming conflict each time when a submission is pulled for a review.
|
||||
|
||||
An example:
|
||||
|
||||
```text
|
||||
|
@ -123,10 +117,12 @@ cd hugo
|
|||
go install
|
||||
```
|
||||
|
||||
>Note: Some Go tools may not be fully updated to support Go Modules yet. One example would be LiteIDE. Follow [this workaround](https://github.com/visualfc/liteide/issues/986#issuecomment-428117702) for how to continue to work with Hugo below `GOPATH`.
|
||||
|
||||
For some convenient build and test targets, you also will want to install Mage:
|
||||
|
||||
```bash
|
||||
go install github.com/magefile/mage
|
||||
go get github.com/magefile/mage
|
||||
```
|
||||
|
||||
Now, to make a change to Hugo's source:
|
||||
|
@ -148,7 +144,7 @@ Now, to make a change to Hugo's source:
|
|||
1. Add your fork as a new remote (the remote name, "fork" in this example, is arbitrary):
|
||||
|
||||
```bash
|
||||
git remote add fork git@github.com:USERNAME/hugo.git
|
||||
git remote add fork git://github.com/USERNAME/hugo.git
|
||||
```
|
||||
|
||||
1. Push the changes to your new remote:
|
||||
|
|
102
Dockerfile
102
Dockerfile
|
@ -2,98 +2,44 @@
|
|||
# Twitter: https://twitter.com/gohugoio
|
||||
# Website: https://gohugo.io/
|
||||
|
||||
ARG GO_VERSION="1.24"
|
||||
ARG ALPINE_VERSION="3.22"
|
||||
ARG DART_SASS_VERSION="1.79.3"
|
||||
FROM golang:1.13-alpine AS build
|
||||
|
||||
FROM --platform=$BUILDPLATFORM tonistiigi/xx:1.5.0 AS xx
|
||||
FROM --platform=$BUILDPLATFORM golang:${GO_VERSION}-alpine${ALPINE_VERSION} AS gobuild
|
||||
FROM golang:${GO_VERSION}-alpine${ALPINE_VERSION} AS gorun
|
||||
# Optionally set HUGO_BUILD_TAGS to "extended" when building like so:
|
||||
# docker build --build-arg HUGO_BUILD_TAGS=extended .
|
||||
ARG HUGO_BUILD_TAGS
|
||||
|
||||
|
||||
FROM gobuild AS build
|
||||
|
||||
RUN apk add clang lld
|
||||
|
||||
# Set up cross-compilation helpers
|
||||
COPY --from=xx / /
|
||||
|
||||
ARG TARGETPLATFORM
|
||||
RUN xx-apk add musl-dev gcc g++
|
||||
|
||||
# Optionally set HUGO_BUILD_TAGS to "none" or "withdeploy" when building like so:
|
||||
# docker build --build-arg HUGO_BUILD_TAGS=withdeploy .
|
||||
#
|
||||
# We build the extended version by default.
|
||||
ARG HUGO_BUILD_TAGS="extended"
|
||||
ENV CGO_ENABLED=1
|
||||
ENV GOPROXY=https://proxy.golang.org
|
||||
ENV GOCACHE=/root/.cache/go-build
|
||||
ENV GOMODCACHE=/go/pkg/mod
|
||||
ARG TARGETPLATFORM
|
||||
ARG CGO=1
|
||||
ENV CGO_ENABLED=${CGO}
|
||||
ENV GOOS=linux
|
||||
ENV GO111MODULE=on
|
||||
|
||||
WORKDIR /go/src/github.com/gohugoio/hugo
|
||||
|
||||
# For --mount=type=cache the value of target is the default cache id, so
|
||||
# for the go mod cache it would be good if we could share it with other Go images using the same setup,
|
||||
# but the go build cache needs to be per platform.
|
||||
# See this comment: https://github.com/moby/buildkit/issues/1706#issuecomment-702238282
|
||||
RUN --mount=target=. \
|
||||
--mount=type=cache,target=/go/pkg/mod \
|
||||
--mount=type=cache,target=/root/.cache/go-build,id=go-build-$TARGETPLATFORM <<EOT
|
||||
set -ex
|
||||
xx-go build -tags "$HUGO_BUILD_TAGS" -ldflags "-s -w -X github.com/gohugoio/hugo/common/hugo.vendorInfo=docker" -o /usr/bin/hugo
|
||||
xx-verify /usr/bin/hugo
|
||||
EOT
|
||||
COPY . /go/src/github.com/gohugoio/hugo/
|
||||
|
||||
# dart-sass downloads the dart-sass runtime dependency
|
||||
FROM alpine:${ALPINE_VERSION} AS dart-sass
|
||||
ARG TARGETARCH
|
||||
ARG DART_SASS_VERSION
|
||||
ARG DART_ARCH=${TARGETARCH/amd64/x64}
|
||||
WORKDIR /out
|
||||
ADD https://github.com/sass/dart-sass/releases/download/${DART_SASS_VERSION}/dart-sass-${DART_SASS_VERSION}-linux-${DART_ARCH}.tar.gz .
|
||||
RUN tar -xf dart-sass-${DART_SASS_VERSION}-linux-${DART_ARCH}.tar.gz
|
||||
# gcc/g++ are required to build SASS libraries for extended version
|
||||
RUN apk update && \
|
||||
apk add --no-cache gcc g++ musl-dev && \
|
||||
go get github.com/magefile/mage
|
||||
|
||||
FROM gorun AS final
|
||||
RUN mage hugo && mage install
|
||||
|
||||
COPY --from=build /usr/bin/hugo /usr/bin/hugo
|
||||
# ---
|
||||
|
||||
# libc6-compat are required for extended libraries (libsass, libwebp).
|
||||
RUN apk add --no-cache \
|
||||
libc6-compat \
|
||||
git \
|
||||
runuser \
|
||||
nodejs \
|
||||
npm
|
||||
FROM alpine:3.11
|
||||
|
||||
RUN mkdir -p /var/hugo/bin /cache && \
|
||||
addgroup -Sg 1000 hugo && \
|
||||
adduser -Sg hugo -u 1000 -h /var/hugo hugo && \
|
||||
chown -R hugo: /var/hugo /cache && \
|
||||
# For the Hugo's Git integration to work.
|
||||
runuser -u hugo -- git config --global --add safe.directory /project && \
|
||||
# See https://github.com/gohugoio/hugo/issues/9810
|
||||
runuser -u hugo -- git config --global core.quotepath false
|
||||
COPY --from=build /go/bin/hugo /usr/bin/hugo
|
||||
|
||||
USER hugo:hugo
|
||||
VOLUME /project
|
||||
WORKDIR /project
|
||||
ENV HUGO_CACHEDIR=/cache
|
||||
ENV PATH="/var/hugo/bin:$PATH"
|
||||
# libc6-compat & libstdc++ are required for extended SASS libraries
|
||||
# ca-certificates are required to fetch outside resources (like Twitter oEmbeds)
|
||||
RUN apk update && \
|
||||
apk add --no-cache ca-certificates libc6-compat libstdc++
|
||||
|
||||
COPY scripts/docker/entrypoint.sh /entrypoint.sh
|
||||
COPY --from=dart-sass /out/dart-sass /var/hugo/bin/dart-sass
|
||||
|
||||
# Update PATH to reflect the new dependencies.
|
||||
# For more complex setups, we should probably find a way to
|
||||
# delegate this to the script itself, but this will have to do for now.
|
||||
# Also, the dart-sass binary is a little special, other binaries can be put/linked
|
||||
# directly in /var/hugo/bin.
|
||||
ENV PATH="/var/hugo/bin/dart-sass:$PATH"
|
||||
VOLUME /site
|
||||
WORKDIR /site
|
||||
|
||||
# Expose port for live server
|
||||
EXPOSE 1313
|
||||
|
||||
ENTRYPOINT ["/entrypoint.sh"]
|
||||
ENTRYPOINT ["hugo"]
|
||||
CMD ["--help"]
|
||||
|
|
393
README.md
393
README.md
|
@ -1,282 +1,187 @@
|
|||
[bep]: https://github.com/bep
|
||||
[bugs]: https://github.com/gohugoio/hugo/issues?q=is%3Aopen+is%3Aissue+label%3ABug
|
||||
[contributing]: CONTRIBUTING.md
|
||||
[create a proposal]: https://github.com/gohugoio/hugo/issues/new?labels=Proposal%2C+NeedsTriage&template=feature_request.md
|
||||
[documentation repository]: https://github.com/gohugoio/hugoDocs
|
||||
[documentation]: https://gohugo.io/documentation
|
||||
[dragonfly bsd, freebsd, netbsd, and openbsd]: https://gohugo.io/installation/bsd
|
||||
[features]: https://gohugo.io/about/features/
|
||||
[forum]: https://discourse.gohugo.io
|
||||
[friends]: https://github.com/gohugoio/hugo/graphs/contributors
|
||||
[go]: https://go.dev/
|
||||
[hugo modules]: https://gohugo.io/hugo-modules/
|
||||
[installation]: https://gohugo.io/installation
|
||||
[issue queue]: https://github.com/gohugoio/hugo/issues
|
||||
[linux]: https://gohugo.io/installation/linux
|
||||
[macos]: https://gohugo.io/installation/macos
|
||||
[prebuilt binary]: https://github.com/gohugoio/hugo/releases/latest
|
||||
[requesting help]: https://discourse.gohugo.io/t/requesting-help/9132
|
||||
[spf13]: https://github.com/spf13
|
||||
[static site generator]: https://en.wikipedia.org/wiki/Static_site_generator
|
||||
[support]: https://discourse.gohugo.io
|
||||
[themes]: https://themes.gohugo.io/
|
||||
[website]: https://gohugo.io
|
||||
[windows]: https://gohugo.io/installation/windows
|
||||
<img src="https://raw.githubusercontent.com/gohugoio/gohugoioTheme/master/static/images/hugo-logo-wide.svg?sanitize=true" alt="Hugo" width="565">
|
||||
|
||||
<a href="https://gohugo.io/"><img src="https://raw.githubusercontent.com/gohugoio/gohugoioTheme/master/static/images/hugo-logo-wide.svg?sanitize=true" alt="Hugo" width="565"></a>
|
||||
A Fast and Flexible Static Site Generator built with love by [bep](https://github.com/bep), [spf13](http://spf13.com/) and [friends](https://github.com/gohugoio/hugo/graphs/contributors) in [Go][].
|
||||
|
||||
A fast and flexible static site generator built with love by [bep], [spf13], and [friends] in [Go].
|
||||
|
||||
---
|
||||
[Website](https://gohugo.io) |
|
||||
[Forum](https://discourse.gohugo.io) |
|
||||
[Documentation](https://gohugo.io/getting-started/) |
|
||||
[Installation Guide](https://gohugo.io/getting-started/installing/) |
|
||||
[Contribution Guide](CONTRIBUTING.md) |
|
||||
[Twitter](https://twitter.com/gohugoio)
|
||||
|
||||
[](https://godoc.org/github.com/gohugoio/hugo)
|
||||
[](https://github.com/gohugoio/hugo/actions?query=workflow%3ATest)
|
||||
[](https://travis-ci.org/gohugoio/hugo)
|
||||
[](https://goreportcard.com/report/github.com/gohugoio/hugo)
|
||||
|
||||
[Website] | [Installation] | [Documentation] | [Support] | [Contributing] | <a rel="me" href="https://fosstodon.org/@gohugoio">Mastodon</a>
|
||||
|
||||
## Overview
|
||||
|
||||
Hugo is a [static site generator] written in [Go], optimized for speed and designed for flexibility. With its advanced templating system and fast asset pipelines, Hugo renders a complete site in seconds, often less.
|
||||
Hugo is a static HTML and CSS website generator written in [Go][].
|
||||
It is optimized for speed, ease of use, and configurability.
|
||||
Hugo takes a directory with content and templates and renders them into a full HTML website.
|
||||
|
||||
Due to its flexible framework, multilingual support, and powerful taxonomy system, Hugo is widely used to create:
|
||||
Hugo relies on Markdown files with front matter for metadata, and you can run Hugo from any directory.
|
||||
This works well for shared hosts and other systems where you don’t have a privileged account.
|
||||
|
||||
- Corporate, government, nonprofit, education, news, event, and project sites
|
||||
- Documentation sites
|
||||
- Image portfolios
|
||||
- Landing pages
|
||||
- Business, professional, and personal blogs
|
||||
- Resumes and CVs
|
||||
Hugo renders a typical website of moderate size in a fraction of a second.
|
||||
A good rule of thumb is that each piece of content renders in around 1 millisecond.
|
||||
|
||||
Use Hugo's embedded web server during development to instantly see changes to content, structure, behavior, and presentation. Then deploy the site to your host, or push changes to your Git provider for automated builds and deployment.
|
||||
Hugo is designed to work well for any kind of website including blogs, tumbles, and docs.
|
||||
|
||||
Hugo's fast asset pipelines include:
|
||||
#### Supported Architectures
|
||||
|
||||
- Image processing – Convert, resize, crop, rotate, adjust colors, apply filters, overlay text and images, and extract EXIF data
|
||||
- JavaScript bundling – Transpile TypeScript and JSX to JavaScript, bundle, tree shake, minify, create source maps, and perform SRI hashing.
|
||||
- Sass processing – Transpile Sass to CSS, bundle, tree shake, minify, create source maps, perform SRI hashing, and integrate with PostCSS
|
||||
- Tailwind CSS processing – Compile Tailwind CSS utility classes into standard CSS, bundle, tree shake, optimize, minify, perform SRI hashing, and integrate with PostCSS
|
||||
Currently, we provide pre-built Hugo binaries for Windows, Linux, FreeBSD, NetBSD, DragonFly BSD, Open BSD, macOS (Darwin), and [Android](https://gist.github.com/bep/a0d8a26cf6b4f8bc992729b8e50b480b) for x64, i386 and ARM architectures.
|
||||
|
||||
And with [Hugo Modules], you can share content, assets, data, translations, themes, templates, and configuration with other projects via public or private Git repositories.
|
||||
Hugo may also be compiled from source wherever the Go compiler tool chain can run, e.g. for other operating systems including Plan 9 and Solaris.
|
||||
|
||||
See the [features] section of the documentation for a comprehensive summary of Hugo's capabilities.
|
||||
**Complete documentation is available at [Hugo Documentation](https://gohugo.io/getting-started/).**
|
||||
|
||||
## Sponsors
|
||||
## Choose How to Install
|
||||
|
||||
<p> </p>
|
||||
<p float="left">
|
||||
<a href="https://www.linode.com/?utm_campaign=hugosponsor&utm_medium=banner&utm_source=hugogithub" target="_blank"><img src="https://raw.githubusercontent.com/gohugoio/hugoDocs/master/assets/images/sponsors/linode-logo_standard_light_medium.png" width="200" alt="Linode"></a>
|
||||
|
||||
<a href="https://www.jetbrains.com/go/?utm_source=OSS&utm_medium=referral&utm_campaign=hugo" target="_blank"><img src="https://raw.githubusercontent.com/gohugoio/hugoDocs/master/assets/images/sponsors/goland.svg" width="200" alt="The complete IDE crafted for professional Go developers."></a>
|
||||
|
||||
<a href="https://pinme.eth.limo/?s=hugo" target="_blank"><img src="https://raw.githubusercontent.com/gohugoio/hugoDocs/master/assets/images/sponsors/logo-pinme.svg" width="200" alt="PinMe."></a>
|
||||
</p>
|
||||
If you want to use Hugo as your site generator, simply install the Hugo binaries.
|
||||
The Hugo binaries have no external dependencies.
|
||||
|
||||
## Editions
|
||||
To contribute to the Hugo source code or documentation, you should [fork the Hugo GitHub project](https://github.com/gohugoio/hugo#fork-destination-box) and clone it to your local machine.
|
||||
|
||||
Hugo is available in three editions: standard, extended, and extended/deploy. While the standard edition provides core functionality, the extended and extended/deploy editions offer advanced features.
|
||||
Finally, you can install the Hugo source code with `go`, build the binaries yourself, and run Hugo that way.
|
||||
Building the binaries is an easy task for an experienced `go` getter.
|
||||
|
||||
Feature|extended edition|extended/deploy edition
|
||||
:--|:-:|:-:
|
||||
Encode to the WebP format when [processing images]. You can decode WebP images with any edition.|:heavy_check_mark:|:heavy_check_mark:
|
||||
[Transpile Sass to CSS] using the embedded LibSass transpiler. You can use the [Dart Sass] transpiler with any edition.|:heavy_check_mark:|:heavy_check_mark:
|
||||
Deploy your site directly to a Google Cloud Storage bucket, an AWS S3 bucket, or an Azure Storage container. See [details].|:x:|:heavy_check_mark:
|
||||
### Install Hugo as Your Site Generator (Binary Install)
|
||||
|
||||
[dart sass]: https://gohugo.io/functions/css/sass/#dart-sass
|
||||
[processing images]: https://gohugo.io/content-management/image-processing/
|
||||
[transpile sass to css]: https://gohugo.io/functions/css/sass/
|
||||
[details]: https://gohugo.io/hosting-and-deployment/hugo-deploy/
|
||||
Use the [installation instructions in the Hugo documentation](https://gohugo.io/getting-started/installing/).
|
||||
|
||||
Unless your specific deployment needs require the extended/deploy edition, we recommend the extended edition.
|
||||
### Build and Install the Binaries from Source (Advanced Install)
|
||||
|
||||
## Installation
|
||||
#### Prerequisite Tools
|
||||
|
||||
Install Hugo from a [prebuilt binary], package manager, or package repository. Please see the installation instructions for your operating system:
|
||||
* [Git](https://git-scm.com/)
|
||||
* [Go (at least Go 1.11)](https://golang.org/dl/)
|
||||
|
||||
- [macOS]
|
||||
- [Linux]
|
||||
- [Windows]
|
||||
- [DragonFly BSD, FreeBSD, NetBSD, and OpenBSD]
|
||||
#### Fetch from GitHub
|
||||
|
||||
## Build from source
|
||||
Since Hugo 0.48, Hugo uses the Go Modules support built into Go 1.11 to build. The easiest is to clone Hugo in a directory outside of `GOPATH`, as in the following example:
|
||||
|
||||
Prerequisites to build Hugo from source:
|
||||
|
||||
- Standard edition: Go 1.23.0 or later
|
||||
- Extended edition: Go 1.23.0 or later, and GCC
|
||||
- Extended/deploy edition: Go 1.23.0 or later, and GCC
|
||||
|
||||
Build the standard edition:
|
||||
|
||||
```text
|
||||
go install github.com/gohugoio/hugo@latest
|
||||
```bash
|
||||
mkdir $HOME/src
|
||||
cd $HOME/src
|
||||
git clone https://github.com/gohugoio/hugo.git
|
||||
cd hugo
|
||||
go install
|
||||
```
|
||||
|
||||
Build the extended edition:
|
||||
**If you are a Windows user, substitute the `$HOME` environment variable above with `%USERPROFILE%`.**
|
||||
|
||||
```text
|
||||
CGO_ENABLED=1 go install -tags extended github.com/gohugoio/hugo@latest
|
||||
## The Hugo Documentation
|
||||
|
||||
The Hugo documentation now lives in its own repository, see https://github.com/gohugoio/hugoDocs. But we do keep a version of that documentation as a `git subtree` in this repository. To build the sub folder `/docs` as a Hugo site, you need to clone this repo:
|
||||
|
||||
```bash
|
||||
git clone git@github.com:gohugoio/hugo.git
|
||||
```
|
||||
|
||||
Build the extended/deploy edition:
|
||||
|
||||
```text
|
||||
CGO_ENABLED=1 go install -tags extended,withdeploy github.com/gohugoio/hugo@latest
|
||||
```
|
||||
|
||||
## Star History
|
||||
|
||||
[](https://star-history.com/#gohugoio/hugo&Timeline)
|
||||
|
||||
## Documentation
|
||||
|
||||
Hugo's [documentation] includes installation instructions, a quick start guide, conceptual explanations, reference information, and examples.
|
||||
|
||||
Please submit documentation issues and pull requests to the [documentation repository].
|
||||
|
||||
## Support
|
||||
|
||||
Please **do not use the issue queue** for questions or troubleshooting. Unless you are certain that your issue is a software defect, use the [forum].
|
||||
|
||||
Hugo’s [forum] is an active community of users and developers who answer questions, share knowledge, and provide examples. A quick search of over 20,000 topics will often answer your question. Please be sure to read about [requesting help] before asking your first question.
|
||||
|
||||
## Contributing
|
||||
|
||||
You can contribute to the Hugo project by:
|
||||
|
||||
- Answering questions on the [forum]
|
||||
- Improving the [documentation]
|
||||
- Monitoring the [issue queue]
|
||||
- Creating or improving [themes]
|
||||
- Squashing [bugs]
|
||||
|
||||
Please submit documentation issues and pull requests to the [documentation repository].
|
||||
|
||||
If you have an idea for an enhancement or new feature, create a new topic on the [forum] in the "Feature" category. This will help you to:
|
||||
|
||||
- Determine if the capability already exists
|
||||
- Measure interest
|
||||
- Refine the concept
|
||||
|
||||
If there is sufficient interest, [create a proposal]. Do not submit a pull request until the project lead accepts the proposal.
|
||||
## Contributing to Hugo
|
||||
|
||||
For a complete guide to contributing to Hugo, see the [Contribution Guide](CONTRIBUTING.md).
|
||||
|
||||
We welcome contributions to Hugo of any kind including documentation, themes,
|
||||
organization, tutorials, blog posts, bug reports, issues, feature requests,
|
||||
feature implementations, pull requests, answering questions on the forum,
|
||||
helping to manage issues, etc.
|
||||
|
||||
The Hugo community and maintainers are [very active](https://github.com/gohugoio/hugo/pulse/monthly) and helpful, and the project benefits greatly from this activity.
|
||||
|
||||
### Asking Support Questions
|
||||
|
||||
We have an active [discussion forum](https://discourse.gohugo.io) where users and developers can ask questions.
|
||||
Please don't use the GitHub issue tracker to ask questions.
|
||||
|
||||
### Reporting Issues
|
||||
|
||||
If you believe you have found a defect in Hugo or its documentation, use
|
||||
the GitHub issue tracker to report the problem to the Hugo maintainers.
|
||||
If you're not sure if it's a bug or not, start by asking in the [discussion forum](https://discourse.gohugo.io).
|
||||
When reporting the issue, please provide the version of Hugo in use (`hugo version`).
|
||||
|
||||
### Submitting Patches
|
||||
|
||||
The Hugo project welcomes all contributors and contributions regardless of skill or experience level.
|
||||
If you are interested in helping with the project, we will help you with your contribution.
|
||||
Hugo is a very active project with many contributions happening daily.
|
||||
|
||||
We want to create the best possible product for our users and the best contribution experience for our developers,
|
||||
we have a set of guidelines which ensure that all contributions are acceptable.
|
||||
The guidelines are not intended as a filter or barrier to participation.
|
||||
If you are unfamiliar with the contribution process, the Hugo team will help you and teach you how to bring your contribution in accordance with the guidelines.
|
||||
|
||||
For a complete guide to contributing code to Hugo, see the [Contribution Guide](CONTRIBUTING.md).
|
||||
|
||||
[](https://github.com/igrigorik/ga-beacon)
|
||||
|
||||
[Go]: https://golang.org/
|
||||
[Hugo Documentation]: https://gohugo.io/overview/introduction/
|
||||
|
||||
## Dependencies
|
||||
|
||||
Hugo stands on the shoulders of great open source libraries. Run `hugo env --logLevel info` to display a list of dependencies.
|
||||
Hugo stands on the shoulder of many great open source libraries, in lexical order:
|
||||
|
||||
| Dependency | License |
|
||||
| :------------- | :------------- |
|
||||
| [github.com/BurntSushi/locker](https://github.com/BurntSushi/locker) | The Unlicense |
|
||||
| [github.com/BurntSushi/toml](https://github.com/BurntSushi/toml) | MIT License |
|
||||
| [github.com/PuerkitoBio/purell](https://github.com/PuerkitoBio/purell) | BSD 3-Clause "New" or "Revised" License |
|
||||
| [github.com/PuerkitoBio/urlesc](https://github.com/PuerkitoBio/urlesc) | BSD 3-Clause "New" or "Revised" License |
|
||||
| [github.com/alecthomas/chroma](https://github.com/alecthomas/chroma) | MIT License |
|
||||
| [github.com/bep/debounce](https://github.com/bep/debounce) | MIT License |
|
||||
| [github.com/bep/gitmap](https://github.com/bep/gitmap) | MIT License |
|
||||
| [github.com/bep/go-tocss](https://github.com/bep/go-tocss) | MIT License |
|
||||
| [github.com/niklasfasching/go-org](https://github.com/niklasfasching/go-org) | MIT License |
|
||||
| [github.com/cpuguy83/go-md2man](https://github.com/cpuguy83/go-md2man) | MIT License |
|
||||
| [github.com/danwakefield/fnmatch](https://github.com/danwakefield/fnmatch) | BSD 2-Clause "Simplified" License |
|
||||
| [github.com/disintegration/imaging](https://github.com/disintegration/imaging) | MIT License |
|
||||
| [github.com/dlclark/regexp2](https://github.com/dlclark/regexp2) | MIT License |
|
||||
| [github.com/eknkc/amber](https://github.com/eknkc/amber) | MIT License |
|
||||
| [github.com/fsnotify/fsnotify](https://github.com/fsnotify/fsnotify) | BSD 3-Clause "New" or "Revised" License |
|
||||
| [github.com/gobwas/glob](https://github.com/gobwas/glob) | MIT License |
|
||||
| [github.com/gorilla/websocket](https://github.com/gorilla/websocket) | BSD 2-Clause "Simplified" License |
|
||||
| [github.com/hashicorp/go-immutable-radix](https://github.com/hashicorp/go-immutable-radix) | Mozilla Public License 2.0 |
|
||||
| [github.com/hashicorp/golang-lru](https://github.com/hashicorp/golang-lru) | Mozilla Public License 2.0 |
|
||||
| [github.com/hashicorp/hcl](https://github.com/hashicorp/hcl) | Mozilla Public License 2.0 |
|
||||
| [github.com/jdkato/prose](https://github.com/jdkato/prose) | MIT License |
|
||||
| [github.com/kyokomi/emoji](https://github.com/kyokomi/emoji) | MIT License |
|
||||
| [github.com/magiconair/properties](https://github.com/magiconair/properties) | BSD 2-Clause "Simplified" License |
|
||||
| [github.com/markbates/inflect](https://github.com/markbates/inflect) | MIT License |
|
||||
| [github.com/mattn/go-isatty](https://github.com/mattn/go-isatty) | MIT License |
|
||||
| [github.com/mattn/go-runewidth](https://github.com/mattn/go-runewidth) | MIT License |
|
||||
| [github.com/miekg/mmark](https://github.com/miekg/mmark) | Simplified BSD License |
|
||||
| [github.com/mitchellh/hashstructure](https://github.com/mitchellh/hashstructure) | MIT License |
|
||||
| [github.com/mitchellh/mapstructure](https://github.com/mitchellh/mapstructure) | MIT License |
|
||||
| [github.com/muesli/smartcrop](https://github.com/muesli/smartcrop) | MIT License |
|
||||
| [github.com/nicksnyder/go-i18n](https://github.com/nicksnyder/go-i18n) | MIT License |
|
||||
| [github.com/olekukonko/tablewriter](https://github.com/olekukonko/tablewriter) | MIT License |
|
||||
| [github.com/pelletier/go-toml](https://github.com/pelletier/go-toml) | MIT License |
|
||||
| [github.com/pkg/errors](https://github.com/pkg/errors) | BSD 2-Clause "Simplified" License |
|
||||
| [github.com/russross/blackfriday](https://github.com/russross/blackfriday) | Simplified BSD License |
|
||||
| [github.com/shurcooL/sanitized_anchor_name](https://github.com/shurcooL/sanitized_anchor_name) | MIT License |
|
||||
| [github.com/spf13/afero](https://github.com/spf13/afero) | Apache License 2.0 |
|
||||
| [github.com/spf13/cast](https://github.com/spf13/cast) | MIT License |
|
||||
| [github.com/spf13/cobra](https://github.com/spf13/cobra) | Apache License 2.0 |
|
||||
| [github.com/spf13/fsync](https://github.com/spf13/fsync) | MIT License |
|
||||
| [github.com/spf13/jwalterweatherman](https://github.com/spf13/jwalterweatherman) | MIT License |
|
||||
| [github.com/spf13/nitro](https://github.com/spf13/nitro) | Apache License 2.0 |
|
||||
| [github.com/spf13/pflag](https://github.com/spf13/pflag) | BSD 3-Clause "New" or "Revised" License |
|
||||
| [github.com/spf13/viper](https://github.com/spf13/viper) | MIT License |
|
||||
| [github.com/tdewolff/minify](https://github.com/tdewolff/minify) | MIT License |
|
||||
| [github.com/tdewolff/parse](https://github.com/tdewolff/parse) | MIT License |
|
||||
| [github.com/wellington/go-libsass](https://github.com/wellington/go-libsass) | Apache License 2.0 |
|
||||
| [github.com/yosssi/ace](https://github.com/yosssi/ace) | MIT License |
|
||||
| [golang.org/x/image](https://golang.org/x/image) | BSD 3-Clause "New" or "Revised" License |
|
||||
| [golang.org/x/net](https://golang.org/x/net) | BSD 3-Clause "New" or "Revised" License |
|
||||
| [golang.org/x/sync](https://golang.org/x/sync) | BSD 3-Clause "New" or "Revised" License |
|
||||
| [golang.org/x/sys](https://golang.org/x/sys) | BSD 3-Clause "New" or "Revised" License |
|
||||
| [golang.org/x/text](https://golang.org/x/text) | BSD 3-Clause "New" or "Revised" License
|
||||
| [gopkg.in/yaml.v2](https://gopkg.in/yaml.v2) | Apache License 2.0 |
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
|
||||
<details>
|
||||
<summary>See current dependencies</summary>
|
||||
|
||||
```text
|
||||
github.com/BurntSushi/locker="v0.0.0-20171006230638-a6e239ea1c69"
|
||||
github.com/PuerkitoBio/goquery="v1.10.1"
|
||||
github.com/alecthomas/chroma/v2="v2.15.0"
|
||||
github.com/andybalholm/cascadia="v1.3.3"
|
||||
github.com/armon/go-radix="v1.0.1-0.20221118154546-54df44f2176c"
|
||||
github.com/bep/clocks="v0.5.0"
|
||||
github.com/bep/debounce="v1.2.0"
|
||||
github.com/bep/gitmap="v1.6.0"
|
||||
github.com/bep/goat="v0.5.0"
|
||||
github.com/bep/godartsass/v2="v2.3.2"
|
||||
github.com/bep/golibsass="v1.2.0"
|
||||
github.com/bep/gowebp="v0.3.0"
|
||||
github.com/bep/imagemeta="v0.8.4"
|
||||
github.com/bep/lazycache="v0.7.0"
|
||||
github.com/bep/logg="v0.4.0"
|
||||
github.com/bep/mclib="v1.20400.20402"
|
||||
github.com/bep/overlayfs="v0.9.2"
|
||||
github.com/bep/simplecobra="v0.5.0"
|
||||
github.com/bep/tmc="v0.5.1"
|
||||
github.com/cespare/xxhash/v2="v2.3.0"
|
||||
github.com/clbanning/mxj/v2="v2.7.0"
|
||||
github.com/cpuguy83/go-md2man/v2="v2.0.4"
|
||||
github.com/disintegration/gift="v1.2.1"
|
||||
github.com/dlclark/regexp2="v1.11.5"
|
||||
github.com/dop251/goja="v0.0.0-20250125213203-5ef83b82af17"
|
||||
github.com/evanw/esbuild="v0.24.2"
|
||||
github.com/fatih/color="v1.18.0"
|
||||
github.com/frankban/quicktest="v1.14.6"
|
||||
github.com/fsnotify/fsnotify="v1.8.0"
|
||||
github.com/getkin/kin-openapi="v0.129.0"
|
||||
github.com/ghodss/yaml="v1.0.0"
|
||||
github.com/go-openapi/jsonpointer="v0.21.0"
|
||||
github.com/go-openapi/swag="v0.23.0"
|
||||
github.com/go-sourcemap/sourcemap="v2.1.4+incompatible"
|
||||
github.com/gobuffalo/flect="v1.0.3"
|
||||
github.com/gobwas/glob="v0.2.3"
|
||||
github.com/gohugoio/go-i18n/v2="v2.1.3-0.20230805085216-e63c13218d0e"
|
||||
github.com/gohugoio/hashstructure="v0.5.0"
|
||||
github.com/gohugoio/httpcache="v0.7.0"
|
||||
github.com/gohugoio/hugo-goldmark-extensions/extras="v0.2.0"
|
||||
github.com/gohugoio/hugo-goldmark-extensions/passthrough="v0.3.0"
|
||||
github.com/gohugoio/locales="v0.14.0"
|
||||
github.com/gohugoio/localescompressed="v1.0.1"
|
||||
github.com/golang/freetype="v0.0.0-20170609003504-e2365dfdc4a0"
|
||||
github.com/google/go-cmp="v0.6.0"
|
||||
github.com/google/pprof="v0.0.0-20250208200701-d0013a598941"
|
||||
github.com/gorilla/websocket="v1.5.3"
|
||||
github.com/hairyhenderson/go-codeowners="v0.7.0"
|
||||
github.com/hashicorp/golang-lru/v2="v2.0.7"
|
||||
github.com/jdkato/prose="v1.2.1"
|
||||
github.com/josharian/intern="v1.0.0"
|
||||
github.com/kr/pretty="v0.3.1"
|
||||
github.com/kr/text="v0.2.0"
|
||||
github.com/kyokomi/emoji/v2="v2.2.13"
|
||||
github.com/lucasb-eyer/go-colorful="v1.2.0"
|
||||
github.com/mailru/easyjson="v0.7.7"
|
||||
github.com/makeworld-the-better-one/dither/v2="v2.4.0"
|
||||
github.com/marekm4/color-extractor="v1.2.1"
|
||||
github.com/mattn/go-colorable="v0.1.13"
|
||||
github.com/mattn/go-isatty="v0.0.20"
|
||||
github.com/mattn/go-runewidth="v0.0.9"
|
||||
github.com/mazznoer/csscolorparser="v0.1.5"
|
||||
github.com/mitchellh/mapstructure="v1.5.1-0.20231216201459-8508981c8b6c"
|
||||
github.com/mohae/deepcopy="v0.0.0-20170929034955-c48cc78d4826"
|
||||
github.com/muesli/smartcrop="v0.3.0"
|
||||
github.com/niklasfasching/go-org="v1.7.0"
|
||||
github.com/oasdiff/yaml3="v0.0.0-20241210130736-a94c01f36349"
|
||||
github.com/oasdiff/yaml="v0.0.0-20241210131133-6b86fb107d80"
|
||||
github.com/olekukonko/tablewriter="v0.0.5"
|
||||
github.com/pbnjay/memory="v0.0.0-20210728143218-7b4eea64cf58"
|
||||
github.com/pelletier/go-toml/v2="v2.2.3"
|
||||
github.com/perimeterx/marshmallow="v1.1.5"
|
||||
github.com/pkg/browser="v0.0.0-20240102092130-5ac0b6a4141c"
|
||||
github.com/pkg/errors="v0.9.1"
|
||||
github.com/rivo/uniseg="v0.4.7"
|
||||
github.com/rogpeppe/go-internal="v1.13.1"
|
||||
github.com/russross/blackfriday/v2="v2.1.0"
|
||||
github.com/sass/libsass="3.6.6"
|
||||
github.com/spf13/afero="v1.11.0"
|
||||
github.com/spf13/cast="v1.7.1"
|
||||
github.com/spf13/cobra="v1.8.1"
|
||||
github.com/spf13/fsync="v0.10.1"
|
||||
github.com/spf13/pflag="v1.0.6"
|
||||
github.com/tdewolff/minify/v2="v2.20.37"
|
||||
github.com/tdewolff/parse/v2="v2.7.15"
|
||||
github.com/tetratelabs/wazero="v1.8.2"
|
||||
github.com/webmproject/libwebp="v1.3.2"
|
||||
github.com/yuin/goldmark-emoji="v1.0.4"
|
||||
github.com/yuin/goldmark="v1.7.8"
|
||||
go.uber.org/automaxprocs="v1.5.3"
|
||||
golang.org/x/crypto="v0.33.0"
|
||||
golang.org/x/exp="v0.0.0-20250210185358-939b2ce775ac"
|
||||
golang.org/x/image="v0.24.0"
|
||||
golang.org/x/mod="v0.23.0"
|
||||
golang.org/x/net="v0.35.0"
|
||||
golang.org/x/sync="v0.11.0"
|
||||
golang.org/x/sys="v0.30.0"
|
||||
golang.org/x/text="v0.22.0"
|
||||
golang.org/x/tools="v0.30.0"
|
||||
golang.org/x/xerrors="v0.0.0-20240903120638-7835f813f4da"
|
||||
gonum.org/v1/plot="v0.15.0"
|
||||
google.golang.org/protobuf="v1.36.5"
|
||||
gopkg.in/yaml.v2="v2.4.0"
|
||||
gopkg.in/yaml.v3="v3.0.1"
|
||||
oss.terrastruct.com/d2="v0.6.9"
|
||||
oss.terrastruct.com/util-go="v0.0.0-20241005222610-44c011a04896"
|
||||
rsc.io/qr="v0.2.0"
|
||||
software.sslmate.com/src/go-pkcs12="v0.2.0"
|
||||
```
|
||||
</details>
|
||||
|
|
|
@ -1,7 +0,0 @@
|
|||
## Security Policy
|
||||
|
||||
### Reporting a Vulnerability
|
||||
|
||||
Please report (suspected) security vulnerabilities to **[bjorn.erik.pedersen@gmail.com](mailto:bjorn.erik.pedersen@gmail.com)**. You will receive a response from us within 48 hours. If we can confirm the issue, we will release a patch as soon as possible depending on the complexity of the issue but historically within days.
|
||||
|
||||
Also see [Hugo's Security Model](https://gohugo.io/about/security/).
|
37
bench.sh
Executable file
37
bench.sh
Executable file
|
@ -0,0 +1,37 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
# allow user to override go executable by running as GOEXE=xxx make ...
|
||||
GOEXE="${GOEXE-go}"
|
||||
|
||||
# Convenience script to
|
||||
# - For a given branch
|
||||
# - Run benchmark tests for a given package
|
||||
# - Do the same for master
|
||||
# - then compare the two runs with benchcmp
|
||||
|
||||
benchFilter=".*"
|
||||
|
||||
if (( $# < 2 ));
|
||||
then
|
||||
echo "USAGE: ./bench.sh <git-branch> <package-to-bench> (and <benchmark filter> (regexp, optional))"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
|
||||
|
||||
if [ $# -eq 3 ]; then
|
||||
benchFilter=$3
|
||||
fi
|
||||
|
||||
|
||||
BRANCH=$1
|
||||
PACKAGE=$2
|
||||
|
||||
git checkout $BRANCH
|
||||
"${GOEXE}" test -test.run=NONE -bench="$benchFilter" -test.benchmem=true ./$PACKAGE > /tmp/bench-$PACKAGE-$BRANCH.txt
|
||||
|
||||
git checkout master
|
||||
"${GOEXE}" test -test.run=NONE -bench="$benchFilter" -test.benchmem=true ./$PACKAGE > /tmp/bench-$PACKAGE-master.txt
|
||||
|
||||
|
||||
benchcmp /tmp/bench-$PACKAGE-master.txt /tmp/bench-$PACKAGE-$BRANCH.txt
|
12
benchSite.sh
Executable file
12
benchSite.sh
Executable file
|
@ -0,0 +1,12 @@
|
|||
#!/bin/bash
|
||||
|
||||
# allow user to override go executable by running as GOEXE=xxx make ...
|
||||
GOEXE="${GOEXE-go}"
|
||||
|
||||
# Send in a regexp mathing the benchmarks you want to run, i.e. './benchSite.sh "YAML"'.
|
||||
# Note the quotes, which will be needed for more complex expressions.
|
||||
# The above will run all variations, but only for front matter YAML.
|
||||
|
||||
echo "Running with BenchmarkSiteBuilding/${1}"
|
||||
|
||||
"${GOEXE}" test -run="NONE" -bench="BenchmarkSiteBuilding/${1}" -test.benchmem=true ./hugolib -memprofile mem.prof -count 3 -cpuprofile cpu.prof
|
1
benchbep.sh
Executable file
1
benchbep.sh
Executable file
|
@ -0,0 +1 @@
|
|||
gobench -package=./hugolib -bench="BenchmarkSiteNew/Deep_content_tree"
|
1
bepdock.sh
Executable file
1
bepdock.sh
Executable file
|
@ -0,0 +1 @@
|
|||
docker run --rm --mount type=bind,source="$(pwd)",target=/hugo -w /hugo -i -t bepsays/ci-goreleaser:1.11-2 /bin/bash
|
|
@ -20,7 +20,7 @@ import (
|
|||
)
|
||||
|
||||
var bufferPool = &sync.Pool{
|
||||
New: func() any {
|
||||
New: func() interface{} {
|
||||
return &bytes.Buffer{}
|
||||
},
|
||||
}
|
||||
|
|
2
cache/docs.go
vendored
2
cache/docs.go
vendored
|
@ -1,2 +0,0 @@
|
|||
// Package cache contains the different cache implementations.
|
||||
package cache
|
647
cache/dynacache/dynacache.go
vendored
647
cache/dynacache/dynacache.go
vendored
|
@ -1,647 +0,0 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package dynacache
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"math"
|
||||
"path"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/bep/lazycache"
|
||||
"github.com/bep/logg"
|
||||
"github.com/gohugoio/hugo/common/collections"
|
||||
"github.com/gohugoio/hugo/common/herrors"
|
||||
"github.com/gohugoio/hugo/common/loggers"
|
||||
"github.com/gohugoio/hugo/common/paths"
|
||||
"github.com/gohugoio/hugo/common/rungroup"
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/identity"
|
||||
"github.com/gohugoio/hugo/resources/resource"
|
||||
)
|
||||
|
||||
const minMaxSize = 10
|
||||
|
||||
type KeyIdentity struct {
|
||||
Key any
|
||||
Identity identity.Identity
|
||||
}
|
||||
|
||||
// New creates a new cache.
|
||||
func New(opts Options) *Cache {
|
||||
if opts.CheckInterval == 0 {
|
||||
opts.CheckInterval = time.Second * 2
|
||||
}
|
||||
|
||||
if opts.MaxSize == 0 {
|
||||
opts.MaxSize = 100000
|
||||
}
|
||||
if opts.Log == nil {
|
||||
panic("nil Log")
|
||||
}
|
||||
|
||||
if opts.MinMaxSize == 0 {
|
||||
opts.MinMaxSize = 30
|
||||
}
|
||||
|
||||
stats := &stats{
|
||||
opts: opts,
|
||||
adjustmentFactor: 1.0,
|
||||
currentMaxSize: opts.MaxSize,
|
||||
availableMemory: config.GetMemoryLimit(),
|
||||
}
|
||||
|
||||
infol := opts.Log.InfoCommand("dynacache")
|
||||
|
||||
evictedIdentities := collections.NewStack[KeyIdentity]()
|
||||
|
||||
onEvict := func(k, v any) {
|
||||
if !opts.Watching {
|
||||
return
|
||||
}
|
||||
identity.WalkIdentitiesShallow(v, func(level int, id identity.Identity) bool {
|
||||
evictedIdentities.Push(KeyIdentity{Key: k, Identity: id})
|
||||
return false
|
||||
})
|
||||
resource.MarkStale(v)
|
||||
}
|
||||
|
||||
c := &Cache{
|
||||
partitions: make(map[string]PartitionManager),
|
||||
onEvict: onEvict,
|
||||
evictedIdentities: evictedIdentities,
|
||||
opts: opts,
|
||||
stats: stats,
|
||||
infol: infol,
|
||||
}
|
||||
|
||||
c.stop = c.start()
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
// Options for the cache.
|
||||
type Options struct {
|
||||
Log loggers.Logger
|
||||
CheckInterval time.Duration
|
||||
MaxSize int
|
||||
MinMaxSize int
|
||||
Watching bool
|
||||
}
|
||||
|
||||
// Options for a partition.
|
||||
type OptionsPartition struct {
|
||||
// When to clear the this partition.
|
||||
ClearWhen ClearWhen
|
||||
|
||||
// Weight is a number between 1 and 100 that indicates how, in general, how big this partition may get.
|
||||
Weight int
|
||||
}
|
||||
|
||||
func (o OptionsPartition) WeightFraction() float64 {
|
||||
return float64(o.Weight) / 100
|
||||
}
|
||||
|
||||
func (o OptionsPartition) CalculateMaxSize(maxSizePerPartition int) int {
|
||||
return int(math.Floor(float64(maxSizePerPartition) * o.WeightFraction()))
|
||||
}
|
||||
|
||||
// A dynamic partitioned cache.
|
||||
type Cache struct {
|
||||
mu sync.RWMutex
|
||||
|
||||
partitions map[string]PartitionManager
|
||||
|
||||
onEvict func(k, v any)
|
||||
evictedIdentities *collections.Stack[KeyIdentity]
|
||||
|
||||
opts Options
|
||||
infol logg.LevelLogger
|
||||
|
||||
stats *stats
|
||||
stopOnce sync.Once
|
||||
stop func()
|
||||
}
|
||||
|
||||
// DrainEvictedIdentities drains the evicted identities from the cache.
|
||||
func (c *Cache) DrainEvictedIdentities() []KeyIdentity {
|
||||
return c.evictedIdentities.Drain()
|
||||
}
|
||||
|
||||
// DrainEvictedIdentitiesMatching drains the evicted identities from the cache that match the given predicate.
|
||||
func (c *Cache) DrainEvictedIdentitiesMatching(predicate func(KeyIdentity) bool) []KeyIdentity {
|
||||
return c.evictedIdentities.DrainMatching(predicate)
|
||||
}
|
||||
|
||||
// ClearMatching clears all partition for which the predicate returns true.
|
||||
func (c *Cache) ClearMatching(predicatePartition func(k string, p PartitionManager) bool, predicateValue func(k, v any) bool) {
|
||||
if predicatePartition == nil {
|
||||
predicatePartition = func(k string, p PartitionManager) bool { return true }
|
||||
}
|
||||
if predicateValue == nil {
|
||||
panic("nil predicateValue")
|
||||
}
|
||||
g := rungroup.Run[PartitionManager](context.Background(), rungroup.Config[PartitionManager]{
|
||||
NumWorkers: len(c.partitions),
|
||||
Handle: func(ctx context.Context, partition PartitionManager) error {
|
||||
partition.clearMatching(predicateValue)
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
for k, p := range c.partitions {
|
||||
if !predicatePartition(k, p) {
|
||||
continue
|
||||
}
|
||||
g.Enqueue(p)
|
||||
}
|
||||
|
||||
g.Wait()
|
||||
}
|
||||
|
||||
// ClearOnRebuild prepares the cache for a new rebuild taking the given changeset into account.
|
||||
// predicate is optional and will clear any entry for which it returns true.
|
||||
func (c *Cache) ClearOnRebuild(predicate func(k, v any) bool, changeset ...identity.Identity) {
|
||||
g := rungroup.Run[PartitionManager](context.Background(), rungroup.Config[PartitionManager]{
|
||||
NumWorkers: len(c.partitions),
|
||||
Handle: func(ctx context.Context, partition PartitionManager) error {
|
||||
partition.clearOnRebuild(predicate, changeset...)
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
for _, p := range c.partitions {
|
||||
g.Enqueue(p)
|
||||
}
|
||||
|
||||
g.Wait()
|
||||
|
||||
// Clear any entries marked as stale above.
|
||||
g = rungroup.Run[PartitionManager](context.Background(), rungroup.Config[PartitionManager]{
|
||||
NumWorkers: len(c.partitions),
|
||||
Handle: func(ctx context.Context, partition PartitionManager) error {
|
||||
partition.clearStale()
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
for _, p := range c.partitions {
|
||||
g.Enqueue(p)
|
||||
}
|
||||
|
||||
g.Wait()
|
||||
}
|
||||
|
||||
type keysProvider interface {
|
||||
Keys() []string
|
||||
}
|
||||
|
||||
// Keys returns a list of keys in all partitions.
|
||||
func (c *Cache) Keys(predicate func(s string) bool) []string {
|
||||
if predicate == nil {
|
||||
predicate = func(s string) bool { return true }
|
||||
}
|
||||
var keys []string
|
||||
for pn, g := range c.partitions {
|
||||
pkeys := g.(keysProvider).Keys()
|
||||
for _, k := range pkeys {
|
||||
p := path.Join(pn, k)
|
||||
if predicate(p) {
|
||||
keys = append(keys, p)
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return keys
|
||||
}
|
||||
|
||||
func calculateMaxSizePerPartition(maxItemsTotal, totalWeightQuantity, numPartitions int) int {
|
||||
if numPartitions == 0 {
|
||||
panic("numPartitions must be > 0")
|
||||
}
|
||||
if totalWeightQuantity == 0 {
|
||||
panic("totalWeightQuantity must be > 0")
|
||||
}
|
||||
|
||||
avgWeight := float64(totalWeightQuantity) / float64(numPartitions)
|
||||
return int(math.Floor(float64(maxItemsTotal) / float64(numPartitions) * (100.0 / avgWeight)))
|
||||
}
|
||||
|
||||
// Stop stops the cache.
|
||||
func (c *Cache) Stop() {
|
||||
c.stopOnce.Do(func() {
|
||||
c.stop()
|
||||
})
|
||||
}
|
||||
|
||||
func (c *Cache) adjustCurrentMaxSize() {
|
||||
c.mu.RLock()
|
||||
defer c.mu.RUnlock()
|
||||
|
||||
if len(c.partitions) == 0 {
|
||||
return
|
||||
}
|
||||
var m runtime.MemStats
|
||||
runtime.ReadMemStats(&m)
|
||||
s := c.stats
|
||||
s.memstatsCurrent = m
|
||||
// fmt.Printf("\n\nAvailable = %v\nAlloc = %v\nTotalAlloc = %v\nSys = %v\nNumGC = %v\nMaxSize = %d\nAdjustmentFactor=%f\n\n", helpers.FormatByteCount(s.availableMemory), helpers.FormatByteCount(m.Alloc), helpers.FormatByteCount(m.TotalAlloc), helpers.FormatByteCount(m.Sys), m.NumGC, c.stats.currentMaxSize, s.adjustmentFactor)
|
||||
|
||||
if s.availableMemory >= s.memstatsCurrent.Alloc {
|
||||
if s.adjustmentFactor <= 1.0 {
|
||||
s.adjustmentFactor += 0.2
|
||||
}
|
||||
} else {
|
||||
// We're low on memory.
|
||||
s.adjustmentFactor -= 0.4
|
||||
}
|
||||
|
||||
if s.adjustmentFactor <= 0 {
|
||||
s.adjustmentFactor = 0.05
|
||||
}
|
||||
|
||||
if !s.adjustCurrentMaxSize() {
|
||||
return
|
||||
}
|
||||
|
||||
totalWeight := 0
|
||||
for _, pm := range c.partitions {
|
||||
totalWeight += pm.getOptions().Weight
|
||||
}
|
||||
|
||||
maxSizePerPartition := calculateMaxSizePerPartition(c.stats.currentMaxSize, totalWeight, len(c.partitions))
|
||||
|
||||
evicted := 0
|
||||
for _, p := range c.partitions {
|
||||
evicted += p.adjustMaxSize(p.getOptions().CalculateMaxSize(maxSizePerPartition))
|
||||
}
|
||||
|
||||
if evicted > 0 {
|
||||
c.infol.
|
||||
WithFields(
|
||||
logg.Fields{
|
||||
{Name: "evicted", Value: evicted},
|
||||
{Name: "numGC", Value: m.NumGC},
|
||||
{Name: "limit", Value: helpers.FormatByteCount(c.stats.availableMemory)},
|
||||
{Name: "alloc", Value: helpers.FormatByteCount(m.Alloc)},
|
||||
{Name: "totalAlloc", Value: helpers.FormatByteCount(m.TotalAlloc)},
|
||||
},
|
||||
).Logf("adjusted partitions' max size")
|
||||
}
|
||||
}
|
||||
|
||||
func (c *Cache) start() func() {
|
||||
ticker := time.NewTicker(c.opts.CheckInterval)
|
||||
quit := make(chan struct{})
|
||||
|
||||
go func() {
|
||||
for {
|
||||
select {
|
||||
case <-ticker.C:
|
||||
c.adjustCurrentMaxSize()
|
||||
// Reset the ticker to avoid drift.
|
||||
ticker.Reset(c.opts.CheckInterval)
|
||||
case <-quit:
|
||||
ticker.Stop()
|
||||
return
|
||||
}
|
||||
}
|
||||
}()
|
||||
|
||||
return func() {
|
||||
close(quit)
|
||||
}
|
||||
}
|
||||
|
||||
var partitionNameRe = regexp.MustCompile(`^\/[a-zA-Z0-9]{4}(\/[a-zA-Z0-9]+)?(\/[a-zA-Z0-9]+)?`)
|
||||
|
||||
// GetOrCreatePartition gets or creates a partition with the given name.
|
||||
func GetOrCreatePartition[K comparable, V any](c *Cache, name string, opts OptionsPartition) *Partition[K, V] {
|
||||
if c == nil {
|
||||
panic("nil Cache")
|
||||
}
|
||||
if opts.Weight < 1 || opts.Weight > 100 {
|
||||
panic("invalid Weight, must be between 1 and 100")
|
||||
}
|
||||
|
||||
if partitionNameRe.FindString(name) != name {
|
||||
panic(fmt.Sprintf("invalid partition name %q", name))
|
||||
}
|
||||
|
||||
c.mu.RLock()
|
||||
p, found := c.partitions[name]
|
||||
c.mu.RUnlock()
|
||||
if found {
|
||||
return p.(*Partition[K, V])
|
||||
}
|
||||
|
||||
c.mu.Lock()
|
||||
defer c.mu.Unlock()
|
||||
|
||||
// Double check.
|
||||
p, found = c.partitions[name]
|
||||
if found {
|
||||
return p.(*Partition[K, V])
|
||||
}
|
||||
|
||||
// At this point, we don't know the number of partitions or their configuration, but
|
||||
// this will be re-adjusted later.
|
||||
const numberOfPartitionsEstimate = 10
|
||||
maxSize := opts.CalculateMaxSize(c.opts.MaxSize / numberOfPartitionsEstimate)
|
||||
|
||||
onEvict := func(k K, v V) {
|
||||
c.onEvict(k, v)
|
||||
}
|
||||
|
||||
// Create a new partition and cache it.
|
||||
partition := &Partition[K, V]{
|
||||
c: lazycache.New(lazycache.Options[K, V]{MaxEntries: maxSize, OnEvict: onEvict}),
|
||||
maxSize: maxSize,
|
||||
trace: c.opts.Log.Logger().WithLevel(logg.LevelTrace).WithField("partition", name),
|
||||
opts: opts,
|
||||
}
|
||||
|
||||
c.partitions[name] = partition
|
||||
|
||||
return partition
|
||||
}
|
||||
|
||||
// Partition is a partition in the cache.
|
||||
type Partition[K comparable, V any] struct {
|
||||
c *lazycache.Cache[K, V]
|
||||
|
||||
zero V
|
||||
|
||||
trace logg.LevelLogger
|
||||
opts OptionsPartition
|
||||
|
||||
maxSize int
|
||||
}
|
||||
|
||||
// GetOrCreate gets or creates a value for the given key.
|
||||
func (p *Partition[K, V]) GetOrCreate(key K, create func(key K) (V, error)) (V, error) {
|
||||
v, err := p.doGetOrCreate(key, create)
|
||||
if err != nil {
|
||||
return p.zero, err
|
||||
}
|
||||
if resource.StaleVersion(v) > 0 {
|
||||
p.c.Delete(key)
|
||||
return p.doGetOrCreate(key, create)
|
||||
}
|
||||
return v, err
|
||||
}
|
||||
|
||||
func (p *Partition[K, V]) doGetOrCreate(key K, create func(key K) (V, error)) (V, error) {
|
||||
v, _, err := p.c.GetOrCreate(key, create)
|
||||
return v, err
|
||||
}
|
||||
|
||||
func (p *Partition[K, V]) GetOrCreateWitTimeout(key K, duration time.Duration, create func(key K) (V, error)) (V, error) {
|
||||
v, err := p.doGetOrCreateWitTimeout(key, duration, create)
|
||||
if err != nil {
|
||||
return p.zero, err
|
||||
}
|
||||
if resource.StaleVersion(v) > 0 {
|
||||
p.c.Delete(key)
|
||||
return p.doGetOrCreateWitTimeout(key, duration, create)
|
||||
}
|
||||
return v, err
|
||||
}
|
||||
|
||||
// GetOrCreateWitTimeout gets or creates a value for the given key and times out if the create function
|
||||
// takes too long.
|
||||
func (p *Partition[K, V]) doGetOrCreateWitTimeout(key K, duration time.Duration, create func(key K) (V, error)) (V, error) {
|
||||
resultch := make(chan V, 1)
|
||||
errch := make(chan error, 1)
|
||||
|
||||
go func() {
|
||||
var (
|
||||
v V
|
||||
err error
|
||||
)
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
if rerr, ok := r.(error); ok {
|
||||
err = rerr
|
||||
} else {
|
||||
err = fmt.Errorf("panic: %v", r)
|
||||
}
|
||||
}
|
||||
if err != nil {
|
||||
errch <- err
|
||||
} else {
|
||||
resultch <- v
|
||||
}
|
||||
}()
|
||||
v, _, err = p.c.GetOrCreate(key, create)
|
||||
}()
|
||||
|
||||
select {
|
||||
case v := <-resultch:
|
||||
return v, nil
|
||||
case err := <-errch:
|
||||
return p.zero, err
|
||||
case <-time.After(duration):
|
||||
return p.zero, &herrors.TimeoutError{
|
||||
Duration: duration,
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Partition[K, V]) clearMatching(predicate func(k, v any) bool) {
|
||||
p.c.DeleteFunc(func(key K, v V) bool {
|
||||
if predicate(key, v) {
|
||||
p.trace.Log(
|
||||
logg.StringFunc(
|
||||
func() string {
|
||||
return fmt.Sprintf("clearing cache key %v", key)
|
||||
},
|
||||
),
|
||||
)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
func (p *Partition[K, V]) clearOnRebuild(predicate func(k, v any) bool, changeset ...identity.Identity) {
|
||||
if predicate == nil {
|
||||
predicate = func(k, v any) bool {
|
||||
return false
|
||||
}
|
||||
}
|
||||
opts := p.getOptions()
|
||||
if opts.ClearWhen == ClearNever {
|
||||
return
|
||||
}
|
||||
|
||||
if opts.ClearWhen == ClearOnRebuild {
|
||||
// Clear all.
|
||||
p.Clear()
|
||||
return
|
||||
}
|
||||
|
||||
depsFinder := identity.NewFinder(identity.FinderConfig{})
|
||||
|
||||
shouldDelete := func(key K, v V) bool {
|
||||
// We always clear elements marked as stale.
|
||||
if resource.StaleVersion(v) > 0 {
|
||||
return true
|
||||
}
|
||||
|
||||
// Now check if this entry has changed based on the changeset
|
||||
// based on filesystem events.
|
||||
if len(changeset) == 0 {
|
||||
// Nothing changed.
|
||||
return false
|
||||
}
|
||||
|
||||
var probablyDependent bool
|
||||
identity.WalkIdentitiesShallow(v, func(level int, id2 identity.Identity) bool {
|
||||
for _, id := range changeset {
|
||||
if r := depsFinder.Contains(id, id2, -1); r > 0 {
|
||||
// It's probably dependent, evict from cache.
|
||||
probablyDependent = true
|
||||
return true
|
||||
}
|
||||
}
|
||||
return false
|
||||
})
|
||||
|
||||
return probablyDependent
|
||||
}
|
||||
|
||||
// First pass.
|
||||
// Second pass needs to be done in a separate loop to catch any
|
||||
// elements marked as stale in the other partitions.
|
||||
p.c.DeleteFunc(func(key K, v V) bool {
|
||||
if predicate(key, v) || shouldDelete(key, v) {
|
||||
p.trace.Log(
|
||||
logg.StringFunc(
|
||||
func() string {
|
||||
return fmt.Sprintf("first pass: clearing cache key %v", key)
|
||||
},
|
||||
),
|
||||
)
|
||||
return true
|
||||
}
|
||||
return false
|
||||
})
|
||||
}
|
||||
|
||||
func (p *Partition[K, V]) Keys() []K {
|
||||
var keys []K
|
||||
p.c.DeleteFunc(func(key K, v V) bool {
|
||||
keys = append(keys, key)
|
||||
return false
|
||||
})
|
||||
return keys
|
||||
}
|
||||
|
||||
func (p *Partition[K, V]) clearStale() {
|
||||
p.c.DeleteFunc(func(key K, v V) bool {
|
||||
staleVersion := resource.StaleVersion(v)
|
||||
if staleVersion > 0 {
|
||||
p.trace.Log(
|
||||
logg.StringFunc(
|
||||
func() string {
|
||||
return fmt.Sprintf("second pass: clearing cache key %v", key)
|
||||
},
|
||||
),
|
||||
)
|
||||
}
|
||||
|
||||
return staleVersion > 0
|
||||
})
|
||||
}
|
||||
|
||||
// adjustMaxSize adjusts the max size of the and returns the number of items evicted.
|
||||
func (p *Partition[K, V]) adjustMaxSize(newMaxSize int) int {
|
||||
if newMaxSize < minMaxSize {
|
||||
newMaxSize = minMaxSize
|
||||
}
|
||||
oldMaxSize := p.maxSize
|
||||
if newMaxSize == oldMaxSize {
|
||||
return 0
|
||||
}
|
||||
p.maxSize = newMaxSize
|
||||
// fmt.Println("Adjusting max size of partition from", oldMaxSize, "to", newMaxSize)
|
||||
return p.c.Resize(newMaxSize)
|
||||
}
|
||||
|
||||
func (p *Partition[K, V]) getMaxSize() int {
|
||||
return p.maxSize
|
||||
}
|
||||
|
||||
func (p *Partition[K, V]) getOptions() OptionsPartition {
|
||||
return p.opts
|
||||
}
|
||||
|
||||
func (p *Partition[K, V]) Clear() {
|
||||
p.c.DeleteFunc(func(key K, v V) bool {
|
||||
return true
|
||||
})
|
||||
}
|
||||
|
||||
func (p *Partition[K, V]) Get(ctx context.Context, key K) (V, bool) {
|
||||
return p.c.Get(key)
|
||||
}
|
||||
|
||||
type PartitionManager interface {
|
||||
adjustMaxSize(addend int) int
|
||||
getMaxSize() int
|
||||
getOptions() OptionsPartition
|
||||
clearOnRebuild(predicate func(k, v any) bool, changeset ...identity.Identity)
|
||||
clearMatching(predicate func(k, v any) bool)
|
||||
clearStale()
|
||||
}
|
||||
|
||||
const (
|
||||
ClearOnRebuild ClearWhen = iota + 1
|
||||
ClearOnChange
|
||||
ClearNever
|
||||
)
|
||||
|
||||
type ClearWhen int
|
||||
|
||||
type stats struct {
|
||||
opts Options
|
||||
memstatsCurrent runtime.MemStats
|
||||
currentMaxSize int
|
||||
availableMemory uint64
|
||||
|
||||
adjustmentFactor float64
|
||||
}
|
||||
|
||||
func (s *stats) adjustCurrentMaxSize() bool {
|
||||
newCurrentMaxSize := int(math.Floor(float64(s.opts.MaxSize) * s.adjustmentFactor))
|
||||
|
||||
if newCurrentMaxSize < s.opts.MinMaxSize {
|
||||
newCurrentMaxSize = int(s.opts.MinMaxSize)
|
||||
}
|
||||
changed := newCurrentMaxSize != s.currentMaxSize
|
||||
s.currentMaxSize = newCurrentMaxSize
|
||||
return changed
|
||||
}
|
||||
|
||||
// CleanKey turns s into a format suitable for a cache key for this package.
|
||||
// The key will be a Unix-styled path with a leading slash but no trailing slash.
|
||||
func CleanKey(s string) string {
|
||||
return path.Clean(paths.ToSlashPreserveLeading(s))
|
||||
}
|
230
cache/dynacache/dynacache_test.go
vendored
230
cache/dynacache/dynacache_test.go
vendored
|
@ -1,230 +0,0 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package dynacache
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
"github.com/gohugoio/hugo/common/loggers"
|
||||
"github.com/gohugoio/hugo/identity"
|
||||
"github.com/gohugoio/hugo/resources/resource"
|
||||
)
|
||||
|
||||
var (
|
||||
_ resource.StaleInfo = (*testItem)(nil)
|
||||
_ identity.Identity = (*testItem)(nil)
|
||||
)
|
||||
|
||||
type testItem struct {
|
||||
name string
|
||||
staleVersion uint32
|
||||
}
|
||||
|
||||
func (t testItem) StaleVersion() uint32 {
|
||||
return t.staleVersion
|
||||
}
|
||||
|
||||
func (t testItem) IdentifierBase() string {
|
||||
return t.name
|
||||
}
|
||||
|
||||
func TestCache(t *testing.T) {
|
||||
t.Parallel()
|
||||
c := qt.New(t)
|
||||
|
||||
cache := New(Options{
|
||||
Log: loggers.NewDefault(),
|
||||
})
|
||||
|
||||
c.Cleanup(func() {
|
||||
cache.Stop()
|
||||
})
|
||||
|
||||
opts := OptionsPartition{Weight: 30}
|
||||
|
||||
c.Assert(cache, qt.Not(qt.IsNil))
|
||||
|
||||
p1 := GetOrCreatePartition[string, testItem](cache, "/aaaa/bbbb", opts)
|
||||
c.Assert(p1, qt.Not(qt.IsNil))
|
||||
|
||||
p2 := GetOrCreatePartition[string, testItem](cache, "/aaaa/bbbb", opts)
|
||||
|
||||
c.Assert(func() { GetOrCreatePartition[string, testItem](cache, "foo bar", opts) }, qt.PanicMatches, ".*invalid partition name.*")
|
||||
c.Assert(func() { GetOrCreatePartition[string, testItem](cache, "/aaaa/cccc", OptionsPartition{Weight: 1234}) }, qt.PanicMatches, ".*invalid Weight.*")
|
||||
|
||||
c.Assert(p2, qt.Equals, p1)
|
||||
|
||||
p3 := GetOrCreatePartition[string, testItem](cache, "/aaaa/cccc", opts)
|
||||
c.Assert(p3, qt.Not(qt.IsNil))
|
||||
c.Assert(p3, qt.Not(qt.Equals), p1)
|
||||
|
||||
c.Assert(func() { New(Options{}) }, qt.PanicMatches, ".*nil Log.*")
|
||||
}
|
||||
|
||||
func TestCalculateMaxSizePerPartition(t *testing.T) {
|
||||
t.Parallel()
|
||||
c := qt.New(t)
|
||||
|
||||
c.Assert(calculateMaxSizePerPartition(1000, 500, 5), qt.Equals, 200)
|
||||
c.Assert(calculateMaxSizePerPartition(1000, 250, 5), qt.Equals, 400)
|
||||
c.Assert(func() { calculateMaxSizePerPartition(1000, 250, 0) }, qt.PanicMatches, ".*must be > 0.*")
|
||||
c.Assert(func() { calculateMaxSizePerPartition(1000, 0, 1) }, qt.PanicMatches, ".*must be > 0.*")
|
||||
}
|
||||
|
||||
func TestCleanKey(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
|
||||
c.Assert(CleanKey("a/b/c"), qt.Equals, "/a/b/c")
|
||||
c.Assert(CleanKey("/a/b/c"), qt.Equals, "/a/b/c")
|
||||
c.Assert(CleanKey("a/b/c/"), qt.Equals, "/a/b/c")
|
||||
c.Assert(CleanKey(filepath.FromSlash("/a/b/c/")), qt.Equals, "/a/b/c")
|
||||
}
|
||||
|
||||
func newTestCache(t *testing.T) *Cache {
|
||||
cache := New(
|
||||
Options{
|
||||
Log: loggers.NewDefault(),
|
||||
},
|
||||
)
|
||||
|
||||
p1 := GetOrCreatePartition[string, testItem](cache, "/aaaa/bbbb", OptionsPartition{Weight: 30, ClearWhen: ClearOnRebuild})
|
||||
p2 := GetOrCreatePartition[string, testItem](cache, "/aaaa/cccc", OptionsPartition{Weight: 30, ClearWhen: ClearOnChange})
|
||||
|
||||
p1.GetOrCreate("clearOnRebuild", func(string) (testItem, error) {
|
||||
return testItem{}, nil
|
||||
})
|
||||
|
||||
p2.GetOrCreate("clearBecauseStale", func(string) (testItem, error) {
|
||||
return testItem{
|
||||
staleVersion: 32,
|
||||
}, nil
|
||||
})
|
||||
|
||||
p2.GetOrCreate("clearBecauseIdentityChanged", func(string) (testItem, error) {
|
||||
return testItem{
|
||||
name: "changed",
|
||||
}, nil
|
||||
})
|
||||
|
||||
p2.GetOrCreate("clearNever", func(string) (testItem, error) {
|
||||
return testItem{
|
||||
staleVersion: 0,
|
||||
}, nil
|
||||
})
|
||||
|
||||
t.Cleanup(func() {
|
||||
cache.Stop()
|
||||
})
|
||||
|
||||
return cache
|
||||
}
|
||||
|
||||
func TestClear(t *testing.T) {
|
||||
t.Parallel()
|
||||
c := qt.New(t)
|
||||
|
||||
predicateAll := func(string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
cache := newTestCache(t)
|
||||
|
||||
c.Assert(cache.Keys(predicateAll), qt.HasLen, 4)
|
||||
|
||||
cache.ClearOnRebuild(nil)
|
||||
|
||||
// Stale items are always cleared.
|
||||
c.Assert(cache.Keys(predicateAll), qt.HasLen, 2)
|
||||
|
||||
cache = newTestCache(t)
|
||||
cache.ClearOnRebuild(nil, identity.StringIdentity("changed"))
|
||||
|
||||
c.Assert(cache.Keys(nil), qt.HasLen, 1)
|
||||
|
||||
cache = newTestCache(t)
|
||||
|
||||
cache.ClearMatching(nil, func(k, v any) bool {
|
||||
return k.(string) == "clearOnRebuild"
|
||||
})
|
||||
|
||||
c.Assert(cache.Keys(predicateAll), qt.HasLen, 3)
|
||||
|
||||
cache.adjustCurrentMaxSize()
|
||||
}
|
||||
|
||||
func TestPanicInCreate(t *testing.T) {
|
||||
t.Parallel()
|
||||
c := qt.New(t)
|
||||
cache := newTestCache(t)
|
||||
|
||||
p1 := GetOrCreatePartition[string, testItem](cache, "/aaaa/bbbb", OptionsPartition{Weight: 30, ClearWhen: ClearOnRebuild})
|
||||
|
||||
willPanic := func(i int) func() {
|
||||
return func() {
|
||||
p1.GetOrCreate(fmt.Sprintf("panic-%d", i), func(key string) (testItem, error) {
|
||||
panic(errors.New(key))
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
// GetOrCreateWitTimeout needs to recover from panics in the create func.
|
||||
willErr := func(i int) error {
|
||||
_, err := p1.GetOrCreateWitTimeout(fmt.Sprintf("error-%d", i), 10*time.Second, func(key string) (testItem, error) {
|
||||
return testItem{}, errors.New(key)
|
||||
})
|
||||
return err
|
||||
}
|
||||
|
||||
for i := range 3 {
|
||||
for range 3 {
|
||||
c.Assert(willPanic(i), qt.PanicMatches, fmt.Sprintf("panic-%d", i))
|
||||
c.Assert(willErr(i), qt.ErrorMatches, fmt.Sprintf("error-%d", i))
|
||||
}
|
||||
}
|
||||
|
||||
// Test the same keys again without the panic.
|
||||
for i := range 3 {
|
||||
for range 3 {
|
||||
v, err := p1.GetOrCreate(fmt.Sprintf("panic-%d", i), func(key string) (testItem, error) {
|
||||
return testItem{
|
||||
name: key,
|
||||
}, nil
|
||||
})
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(v.name, qt.Equals, fmt.Sprintf("panic-%d", i))
|
||||
|
||||
v, err = p1.GetOrCreateWitTimeout(fmt.Sprintf("error-%d", i), 10*time.Second, func(key string) (testItem, error) {
|
||||
return testItem{
|
||||
name: key,
|
||||
}, nil
|
||||
})
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(v.name, qt.Equals, fmt.Sprintf("error-%d", i))
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestAdjustCurrentMaxSize(t *testing.T) {
|
||||
t.Parallel()
|
||||
c := qt.New(t)
|
||||
cache := newTestCache(t)
|
||||
alloc := cache.stats.memstatsCurrent.Alloc
|
||||
cache.adjustCurrentMaxSize()
|
||||
c.Assert(cache.stats.memstatsCurrent.Alloc, qt.Not(qt.Equals), alloc)
|
||||
}
|
222
cache/filecache/filecache.go
vendored
222
cache/filecache/filecache.go
vendored
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -17,15 +17,14 @@ import (
|
|||
"bytes"
|
||||
"errors"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/gohugoio/httpcache"
|
||||
"github.com/gohugoio/hugo/common/hugio"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
|
||||
|
@ -37,7 +36,7 @@ import (
|
|||
var ErrFatal = errors.New("fatal filecache error")
|
||||
|
||||
const (
|
||||
FilecacheRootDirname = "filecache"
|
||||
filecacheRootDirname = "filecache"
|
||||
)
|
||||
|
||||
// Cache caches a set of files in a directory. This is usually a file on
|
||||
|
@ -53,9 +52,6 @@ type Cache struct {
|
|||
pruneAllRootDir string
|
||||
|
||||
nlocker *lockTracker
|
||||
|
||||
initOnce sync.Once
|
||||
initErr error
|
||||
}
|
||||
|
||||
type lockTracker struct {
|
||||
|
@ -108,23 +104,9 @@ func (l *lockedFile) Close() error {
|
|||
return l.File.Close()
|
||||
}
|
||||
|
||||
func (c *Cache) init() error {
|
||||
c.initOnce.Do(func() {
|
||||
// Create the base dir if it does not exist.
|
||||
if err := c.Fs.MkdirAll("", 0o777); err != nil && !os.IsExist(err) {
|
||||
c.initErr = err
|
||||
}
|
||||
})
|
||||
return c.initErr
|
||||
}
|
||||
|
||||
// WriteCloser returns a transactional writer into the cache.
|
||||
// It's important that it's closed when done.
|
||||
func (c *Cache) WriteCloser(id string) (ItemInfo, io.WriteCloser, error) {
|
||||
if err := c.init(); err != nil {
|
||||
return ItemInfo{}, nil, err
|
||||
}
|
||||
|
||||
id = cleanID(id)
|
||||
c.nlocker.Lock(id)
|
||||
|
||||
|
@ -148,12 +130,7 @@ func (c *Cache) WriteCloser(id string) (ItemInfo, io.WriteCloser, error) {
|
|||
// it when done.
|
||||
func (c *Cache) ReadOrCreate(id string,
|
||||
read func(info ItemInfo, r io.ReadSeeker) error,
|
||||
create func(info ItemInfo, w io.WriteCloser) error,
|
||||
) (info ItemInfo, err error) {
|
||||
if err := c.init(); err != nil {
|
||||
return ItemInfo{}, err
|
||||
}
|
||||
|
||||
create func(info ItemInfo, w io.WriteCloser) error) (info ItemInfo, err error) {
|
||||
id = cleanID(id)
|
||||
|
||||
c.nlocker.Lock(id)
|
||||
|
@ -181,24 +158,13 @@ func (c *Cache) ReadOrCreate(id string,
|
|||
err = create(info, f)
|
||||
|
||||
return
|
||||
}
|
||||
|
||||
// NamedLock locks the given id. The lock is released when the returned function is called.
|
||||
func (c *Cache) NamedLock(id string) func() {
|
||||
id = cleanID(id)
|
||||
c.nlocker.Lock(id)
|
||||
return func() {
|
||||
c.nlocker.Unlock(id)
|
||||
}
|
||||
}
|
||||
|
||||
// GetOrCreate tries to get the file with the given id from cache. If not found or expired, create will
|
||||
// be invoked and the result cached.
|
||||
// This method is protected by a named lock using the given id as identifier.
|
||||
func (c *Cache) GetOrCreate(id string, create func() (io.ReadCloser, error)) (ItemInfo, io.ReadCloser, error) {
|
||||
if err := c.init(); err != nil {
|
||||
return ItemInfo{}, nil, err
|
||||
}
|
||||
id = cleanID(id)
|
||||
|
||||
c.nlocker.Lock(id)
|
||||
|
@ -210,12 +176,7 @@ func (c *Cache) GetOrCreate(id string, create func() (io.ReadCloser, error)) (It
|
|||
return info, r, nil
|
||||
}
|
||||
|
||||
var (
|
||||
r io.ReadCloser
|
||||
err error
|
||||
)
|
||||
|
||||
r, err = create()
|
||||
r, err := create()
|
||||
if err != nil {
|
||||
return info, nil, err
|
||||
}
|
||||
|
@ -228,30 +189,11 @@ func (c *Cache) GetOrCreate(id string, create func() (io.ReadCloser, error)) (It
|
|||
var buff bytes.Buffer
|
||||
return info,
|
||||
hugio.ToReadCloser(&buff),
|
||||
c.writeReader(id, io.TeeReader(r, &buff))
|
||||
}
|
||||
|
||||
func (c *Cache) writeReader(id string, r io.Reader) error {
|
||||
dir := filepath.Dir(id)
|
||||
if dir != "" {
|
||||
_ = c.Fs.MkdirAll(dir, 0o777)
|
||||
}
|
||||
f, err := c.Fs.Create(id)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
_, _ = io.Copy(f, r)
|
||||
|
||||
return nil
|
||||
afero.WriteReader(c.Fs, id, io.TeeReader(r, &buff))
|
||||
}
|
||||
|
||||
// GetOrCreateBytes is the same as GetOrCreate, but produces a byte slice.
|
||||
func (c *Cache) GetOrCreateBytes(id string, create func() ([]byte, error)) (ItemInfo, []byte, error) {
|
||||
if err := c.init(); err != nil {
|
||||
return ItemInfo{}, nil, err
|
||||
}
|
||||
id = cleanID(id)
|
||||
|
||||
c.nlocker.Lock(id)
|
||||
|
@ -261,16 +203,11 @@ func (c *Cache) GetOrCreateBytes(id string, create func() ([]byte, error)) (Item
|
|||
|
||||
if r := c.getOrRemove(id); r != nil {
|
||||
defer r.Close()
|
||||
b, err := io.ReadAll(r)
|
||||
b, err := ioutil.ReadAll(r)
|
||||
return info, b, err
|
||||
}
|
||||
|
||||
var (
|
||||
b []byte
|
||||
err error
|
||||
)
|
||||
|
||||
b, err = create()
|
||||
b, err := create()
|
||||
if err != nil {
|
||||
return info, nil, err
|
||||
}
|
||||
|
@ -279,18 +216,15 @@ func (c *Cache) GetOrCreateBytes(id string, create func() ([]byte, error)) (Item
|
|||
return info, b, nil
|
||||
}
|
||||
|
||||
if err := c.writeReader(id, bytes.NewReader(b)); err != nil {
|
||||
if err := afero.WriteReader(c.Fs, id, bytes.NewReader(b)); err != nil {
|
||||
return info, nil, err
|
||||
}
|
||||
|
||||
return info, b, nil
|
||||
|
||||
}
|
||||
|
||||
// GetBytes gets the file content with the given id from the cache, nil if none found.
|
||||
// GetBytes gets the file content with the given id from the cahce, nil if none found.
|
||||
func (c *Cache) GetBytes(id string) (ItemInfo, []byte, error) {
|
||||
if err := c.init(); err != nil {
|
||||
return ItemInfo{}, nil, err
|
||||
}
|
||||
id = cleanID(id)
|
||||
|
||||
c.nlocker.Lock(id)
|
||||
|
@ -300,18 +234,15 @@ func (c *Cache) GetBytes(id string) (ItemInfo, []byte, error) {
|
|||
|
||||
if r := c.getOrRemove(id); r != nil {
|
||||
defer r.Close()
|
||||
b, err := io.ReadAll(r)
|
||||
b, err := ioutil.ReadAll(r)
|
||||
return info, b, err
|
||||
}
|
||||
|
||||
return info, nil, nil
|
||||
}
|
||||
|
||||
// Get gets the file with the given id from the cache, nil if none found.
|
||||
// Get gets the file with the given id from the cahce, nil if none found.
|
||||
func (c *Cache) Get(id string) (ItemInfo, io.ReadCloser, error) {
|
||||
if err := c.init(); err != nil {
|
||||
return ItemInfo{}, nil, err
|
||||
}
|
||||
id = cleanID(id)
|
||||
|
||||
c.nlocker.Lock(id)
|
||||
|
@ -332,11 +263,20 @@ func (c *Cache) getOrRemove(id string) hugio.ReadSeekCloser {
|
|||
return nil
|
||||
}
|
||||
|
||||
if removed, err := c.removeIfExpired(id); err != nil || removed {
|
||||
if c.maxAge > 0 {
|
||||
fi, err := c.Fs.Stat(id)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
if c.isExpired(fi.ModTime()) {
|
||||
c.Fs.Remove(id)
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
f, err := c.Fs.Open(id)
|
||||
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
@ -344,74 +284,30 @@ func (c *Cache) getOrRemove(id string) hugio.ReadSeekCloser {
|
|||
return f
|
||||
}
|
||||
|
||||
func (c *Cache) getBytesAndRemoveIfExpired(id string) ([]byte, bool) {
|
||||
if c.maxAge == 0 {
|
||||
// No caching.
|
||||
return nil, false
|
||||
}
|
||||
|
||||
f, err := c.Fs.Open(id)
|
||||
if err != nil {
|
||||
return nil, false
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
b, err := io.ReadAll(f)
|
||||
if err != nil {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
removed, err := c.removeIfExpired(id)
|
||||
if err != nil {
|
||||
return nil, false
|
||||
}
|
||||
|
||||
return b, removed
|
||||
}
|
||||
|
||||
func (c *Cache) removeIfExpired(id string) (bool, error) {
|
||||
if c.maxAge <= 0 {
|
||||
return false, nil
|
||||
}
|
||||
|
||||
fi, err := c.Fs.Stat(id)
|
||||
if err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
if c.isExpired(fi.ModTime()) {
|
||||
c.Fs.Remove(id)
|
||||
return true, nil
|
||||
}
|
||||
|
||||
return false, nil
|
||||
}
|
||||
|
||||
func (c *Cache) isExpired(modTime time.Time) bool {
|
||||
if c.maxAge < 0 {
|
||||
return false
|
||||
}
|
||||
|
||||
// Note the use of time.Since here.
|
||||
// We cannot use Hugo's global Clock for this.
|
||||
return c.maxAge == 0 || time.Since(modTime) > c.maxAge
|
||||
}
|
||||
|
||||
// For testing
|
||||
func (c *Cache) GetString(id string) string {
|
||||
func (c *Cache) getString(id string) string {
|
||||
id = cleanID(id)
|
||||
|
||||
c.nlocker.Lock(id)
|
||||
defer c.nlocker.Unlock(id)
|
||||
|
||||
f, err := c.Fs.Open(id)
|
||||
|
||||
if err != nil {
|
||||
return ""
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
b, _ := io.ReadAll(f)
|
||||
b, _ := ioutil.ReadAll(f)
|
||||
return string(b)
|
||||
|
||||
}
|
||||
|
||||
// Caches is a named set of caches.
|
||||
|
@ -425,29 +321,47 @@ func (f Caches) Get(name string) *Cache {
|
|||
// NewCaches creates a new set of file caches from the given
|
||||
// configuration.
|
||||
func NewCaches(p *helpers.PathSpec) (Caches, error) {
|
||||
dcfg := p.Cfg.GetConfigSection("caches").(Configs)
|
||||
var dcfg Configs
|
||||
if c, ok := p.Cfg.Get("filecacheConfigs").(Configs); ok {
|
||||
dcfg = c
|
||||
} else {
|
||||
var err error
|
||||
dcfg, err = DecodeConfig(p.Fs.Source, p.Cfg)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
|
||||
fs := p.Fs.Source
|
||||
|
||||
m := make(Caches)
|
||||
for k, v := range dcfg {
|
||||
var cfs afero.Fs
|
||||
|
||||
if v.IsResourceDir {
|
||||
if v.isResourceDir {
|
||||
cfs = p.BaseFs.ResourcesCache
|
||||
} else {
|
||||
cfs = fs
|
||||
}
|
||||
|
||||
if cfs == nil {
|
||||
panic("nil fs")
|
||||
// TODO(bep) we still have some places that do not initialize the
|
||||
// full dependencies of a site, e.g. the import Jekyll command.
|
||||
// That command does not need these caches, so let us just continue
|
||||
// for now.
|
||||
continue
|
||||
}
|
||||
|
||||
baseDir := v.DirCompiled
|
||||
baseDir := v.Dir
|
||||
|
||||
bfs := hugofs.NewBasePathFs(cfs, baseDir)
|
||||
if err := cfs.MkdirAll(baseDir, 0777); err != nil && !os.IsExist(err) {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
bfs := afero.NewBasePathFs(cfs, baseDir)
|
||||
|
||||
var pruneAllRootDir string
|
||||
if k == CacheKeyModules {
|
||||
if k == cacheKeyModules {
|
||||
pruneAllRootDir = "pkg"
|
||||
}
|
||||
|
||||
|
@ -460,37 +374,3 @@ func NewCaches(p *helpers.PathSpec) (Caches, error) {
|
|||
func cleanID(name string) string {
|
||||
return strings.TrimPrefix(filepath.Clean(name), helpers.FilePathSeparator)
|
||||
}
|
||||
|
||||
// AsHTTPCache returns an httpcache.Cache implementation for this file cache.
|
||||
// Note that none of the methods are protected by named locks, so you need to make sure
|
||||
// to do that in your own code.
|
||||
func (c *Cache) AsHTTPCache() httpcache.Cache {
|
||||
return &httpCache{c: c}
|
||||
}
|
||||
|
||||
type httpCache struct {
|
||||
c *Cache
|
||||
}
|
||||
|
||||
func (h *httpCache) Get(id string) (resp []byte, ok bool) {
|
||||
id = cleanID(id)
|
||||
b, removed := h.c.getBytesAndRemoveIfExpired(id)
|
||||
|
||||
return b, !removed
|
||||
}
|
||||
|
||||
func (h *httpCache) Set(id string, resp []byte) {
|
||||
if h.c.maxAge == 0 {
|
||||
return
|
||||
}
|
||||
|
||||
id = cleanID(id)
|
||||
|
||||
if err := h.c.writeReader(id, bytes.NewReader(resp)); err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
func (h *httpCache) Delete(key string) {
|
||||
h.c.Fs.Remove(key)
|
||||
}
|
||||
|
|
131
cache/filecache/filecache_config.go
vendored
131
cache/filecache/filecache_config.go
vendored
|
@ -11,131 +11,105 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package filecache provides a file based cache for Hugo.
|
||||
package filecache
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gohugoio/hugo/common/maps"
|
||||
"github.com/gohugoio/hugo/config"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
|
||||
"github.com/mitchellh/mapstructure"
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/afero"
|
||||
)
|
||||
|
||||
const (
|
||||
cachesConfigKey = "caches"
|
||||
|
||||
resourcesGenDir = ":resourceDir/_gen"
|
||||
cacheDirProject = ":cacheDir/:project"
|
||||
)
|
||||
|
||||
var defaultCacheConfig = FileCacheConfig{
|
||||
var defaultCacheConfig = Config{
|
||||
MaxAge: -1, // Never expire
|
||||
Dir: cacheDirProject,
|
||||
Dir: ":cacheDir/:project",
|
||||
}
|
||||
|
||||
const (
|
||||
CacheKeyGetJSON = "getjson"
|
||||
CacheKeyGetCSV = "getcsv"
|
||||
CacheKeyImages = "images"
|
||||
CacheKeyAssets = "assets"
|
||||
CacheKeyModules = "modules"
|
||||
CacheKeyGetResource = "getresource"
|
||||
CacheKeyMisc = "misc"
|
||||
cacheKeyGetJSON = "getjson"
|
||||
cacheKeyGetCSV = "getcsv"
|
||||
cacheKeyImages = "images"
|
||||
cacheKeyAssets = "assets"
|
||||
cacheKeyModules = "modules"
|
||||
)
|
||||
|
||||
type Configs map[string]FileCacheConfig
|
||||
type Configs map[string]Config
|
||||
|
||||
// For internal use.
|
||||
func (c Configs) CacheDirModules() string {
|
||||
return c[CacheKeyModules].DirCompiled
|
||||
return c[cacheKeyModules].Dir
|
||||
}
|
||||
|
||||
var defaultCacheConfigs = Configs{
|
||||
CacheKeyModules: {
|
||||
cacheKeyModules: {
|
||||
MaxAge: -1,
|
||||
Dir: ":cacheDir/modules",
|
||||
},
|
||||
CacheKeyGetJSON: defaultCacheConfig,
|
||||
CacheKeyGetCSV: defaultCacheConfig,
|
||||
CacheKeyImages: {
|
||||
cacheKeyGetJSON: defaultCacheConfig,
|
||||
cacheKeyGetCSV: defaultCacheConfig,
|
||||
cacheKeyImages: {
|
||||
MaxAge: -1,
|
||||
Dir: resourcesGenDir,
|
||||
},
|
||||
CacheKeyAssets: {
|
||||
cacheKeyAssets: {
|
||||
MaxAge: -1,
|
||||
Dir: resourcesGenDir,
|
||||
},
|
||||
CacheKeyGetResource: {
|
||||
MaxAge: -1, // Never expire
|
||||
Dir: cacheDirProject,
|
||||
},
|
||||
CacheKeyMisc: {
|
||||
MaxAge: -1,
|
||||
Dir: cacheDirProject,
|
||||
},
|
||||
}
|
||||
|
||||
type FileCacheConfig struct {
|
||||
type Config struct {
|
||||
// Max age of cache entries in this cache. Any items older than this will
|
||||
// be removed and not returned from the cache.
|
||||
// A negative value means forever, 0 means cache is disabled.
|
||||
// Hugo is lenient with what types it accepts here, but we recommend using
|
||||
// a duration string, a sequence of decimal numbers, each with optional fraction and a unit suffix,
|
||||
// such as "300ms", "1.5h" or "2h45m".
|
||||
// Valid time units are "ns", "us" (or "µs"), "ms", "s", "m", "h".
|
||||
// a negative value means forever, 0 means cache is disabled.
|
||||
MaxAge time.Duration
|
||||
|
||||
// The directory where files are stored.
|
||||
Dir string
|
||||
DirCompiled string `json:"-"`
|
||||
|
||||
// Will resources/_gen will get its own composite filesystem that
|
||||
// also checks any theme.
|
||||
IsResourceDir bool `json:"-"`
|
||||
isResourceDir bool
|
||||
}
|
||||
|
||||
// GetJSONCache gets the file cache for getJSON.
|
||||
func (f Caches) GetJSONCache() *Cache {
|
||||
return f[CacheKeyGetJSON]
|
||||
return f[cacheKeyGetJSON]
|
||||
}
|
||||
|
||||
// GetCSVCache gets the file cache for getCSV.
|
||||
func (f Caches) GetCSVCache() *Cache {
|
||||
return f[CacheKeyGetCSV]
|
||||
return f[cacheKeyGetCSV]
|
||||
}
|
||||
|
||||
// ImageCache gets the file cache for processed images.
|
||||
func (f Caches) ImageCache() *Cache {
|
||||
return f[CacheKeyImages]
|
||||
return f[cacheKeyImages]
|
||||
}
|
||||
|
||||
// ModulesCache gets the file cache for Hugo Modules.
|
||||
func (f Caches) ModulesCache() *Cache {
|
||||
return f[CacheKeyModules]
|
||||
return f[cacheKeyModules]
|
||||
}
|
||||
|
||||
// AssetsCache gets the file cache for assets (processed resources, SCSS etc.).
|
||||
func (f Caches) AssetsCache() *Cache {
|
||||
return f[CacheKeyAssets]
|
||||
return f[cacheKeyAssets]
|
||||
}
|
||||
|
||||
// MiscCache gets the file cache for miscellaneous stuff.
|
||||
func (f Caches) MiscCache() *Cache {
|
||||
return f[CacheKeyMisc]
|
||||
}
|
||||
|
||||
// GetResourceCache gets the file cache for remote resources.
|
||||
func (f Caches) GetResourceCache() *Cache {
|
||||
return f[CacheKeyGetResource]
|
||||
}
|
||||
|
||||
func DecodeConfig(fs afero.Fs, bcfg config.BaseConfig, m map[string]any) (Configs, error) {
|
||||
func DecodeConfig(fs afero.Fs, cfg config.Provider) (Configs, error) {
|
||||
c := make(Configs)
|
||||
valid := make(map[string]bool)
|
||||
// Add defaults
|
||||
|
@ -144,12 +118,11 @@ func DecodeConfig(fs afero.Fs, bcfg config.BaseConfig, m map[string]any) (Config
|
|||
valid[k] = true
|
||||
}
|
||||
|
||||
m := cfg.GetStringMap(cachesConfigKey)
|
||||
|
||||
_, isOsFs := fs.(*afero.OsFs)
|
||||
|
||||
for k, v := range m {
|
||||
if _, ok := v.(maps.Params); !ok {
|
||||
continue
|
||||
}
|
||||
cc := defaultCacheConfig
|
||||
|
||||
dc := &mapstructure.DecoderConfig{
|
||||
|
@ -164,7 +137,7 @@ func DecodeConfig(fs afero.Fs, bcfg config.BaseConfig, m map[string]any) (Config
|
|||
}
|
||||
|
||||
if err := decoder.Decode(v); err != nil {
|
||||
return nil, fmt.Errorf("failed to decode filecache config: %w", err)
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if cc.Dir == "" {
|
||||
|
@ -173,12 +146,15 @@ func DecodeConfig(fs afero.Fs, bcfg config.BaseConfig, m map[string]any) (Config
|
|||
|
||||
name := strings.ToLower(k)
|
||||
if !valid[name] {
|
||||
return nil, fmt.Errorf("%q is not a valid cache name", name)
|
||||
return nil, errors.Errorf("%q is not a valid cache name", name)
|
||||
}
|
||||
|
||||
c[name] = cc
|
||||
}
|
||||
|
||||
// This is a very old flag in Hugo, but we need to respect it.
|
||||
disabled := cfg.GetBool("ignoreCache")
|
||||
|
||||
for k, v := range c {
|
||||
dir := filepath.ToSlash(filepath.Clean(v.Dir))
|
||||
hadSlash := strings.HasPrefix(dir, "/")
|
||||
|
@ -186,12 +162,12 @@ func DecodeConfig(fs afero.Fs, bcfg config.BaseConfig, m map[string]any) (Config
|
|||
|
||||
for i, part := range parts {
|
||||
if strings.HasPrefix(part, ":") {
|
||||
resolved, isResource, err := resolveDirPlaceholder(fs, bcfg, part)
|
||||
resolved, isResource, err := resolveDirPlaceholder(fs, cfg, part)
|
||||
if err != nil {
|
||||
return c, err
|
||||
}
|
||||
if isResource {
|
||||
v.IsResourceDir = true
|
||||
v.isResourceDir = true
|
||||
}
|
||||
parts[i] = resolved
|
||||
}
|
||||
|
@ -201,29 +177,33 @@ func DecodeConfig(fs afero.Fs, bcfg config.BaseConfig, m map[string]any) (Config
|
|||
if hadSlash {
|
||||
dir = "/" + dir
|
||||
}
|
||||
v.DirCompiled = filepath.Clean(filepath.FromSlash(dir))
|
||||
v.Dir = filepath.Clean(filepath.FromSlash(dir))
|
||||
|
||||
if !v.IsResourceDir {
|
||||
if isOsFs && !filepath.IsAbs(v.DirCompiled) {
|
||||
return c, fmt.Errorf("%q must resolve to an absolute directory", v.DirCompiled)
|
||||
if !v.isResourceDir {
|
||||
if isOsFs && !filepath.IsAbs(v.Dir) {
|
||||
return c, errors.Errorf("%q must resolve to an absolute directory", v.Dir)
|
||||
}
|
||||
|
||||
// Avoid cache in root, e.g. / (Unix) or c:\ (Windows)
|
||||
if len(strings.TrimPrefix(v.DirCompiled, filepath.VolumeName(v.DirCompiled))) == 1 {
|
||||
return c, fmt.Errorf("%q is a root folder and not allowed as cache dir", v.DirCompiled)
|
||||
if len(strings.TrimPrefix(v.Dir, filepath.VolumeName(v.Dir))) == 1 {
|
||||
return c, errors.Errorf("%q is a root folder and not allowed as cache dir", v.Dir)
|
||||
}
|
||||
}
|
||||
|
||||
if !strings.HasPrefix(v.DirCompiled, "_gen") {
|
||||
if !strings.HasPrefix(v.Dir, "_gen") {
|
||||
// We do cache eviction (file removes) and since the user can set
|
||||
// his/hers own cache directory, we really want to make sure
|
||||
// we do not delete any files that do not belong to this cache.
|
||||
// We do add the cache name as the root, but this is an extra safe
|
||||
// guard. We skip the files inside /resources/_gen/ because
|
||||
// that would be breaking.
|
||||
v.DirCompiled = filepath.Join(v.DirCompiled, FilecacheRootDirname, k)
|
||||
v.Dir = filepath.Join(v.Dir, filecacheRootDirname, k)
|
||||
} else {
|
||||
v.DirCompiled = filepath.Join(v.DirCompiled, k)
|
||||
v.Dir = filepath.Join(v.Dir, k)
|
||||
}
|
||||
|
||||
if disabled {
|
||||
v.MaxAge = 0
|
||||
}
|
||||
|
||||
c[k] = v
|
||||
|
@ -233,15 +213,18 @@ func DecodeConfig(fs afero.Fs, bcfg config.BaseConfig, m map[string]any) (Config
|
|||
}
|
||||
|
||||
// Resolves :resourceDir => /myproject/resources etc., :cacheDir => ...
|
||||
func resolveDirPlaceholder(fs afero.Fs, bcfg config.BaseConfig, placeholder string) (cacheDir string, isResource bool, err error) {
|
||||
func resolveDirPlaceholder(fs afero.Fs, cfg config.Provider, placeholder string) (cacheDir string, isResource bool, err error) {
|
||||
workingDir := cfg.GetString("workingDir")
|
||||
|
||||
switch strings.ToLower(placeholder) {
|
||||
case ":resourcedir":
|
||||
return "", true, nil
|
||||
case ":cachedir":
|
||||
return bcfg.CacheDir, false, nil
|
||||
d, err := helpers.GetCacheDir(fs, cfg)
|
||||
return d, false, err
|
||||
case ":project":
|
||||
return filepath.Base(bcfg.WorkingDir), false, nil
|
||||
return filepath.Base(workingDir), false, nil
|
||||
}
|
||||
|
||||
return "", false, fmt.Errorf("%q is not a valid placeholder (valid values are :cacheDir or :resourceDir)", placeholder)
|
||||
return "", false, errors.Errorf("%q is not a valid placeholder (valid values are :cacheDir or :resourceDir)", placeholder)
|
||||
}
|
||||
|
|
104
cache/filecache/filecache_config_test.go
vendored
104
cache/filecache/filecache_config_test.go
vendored
|
@ -11,21 +11,21 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package filecache_test
|
||||
package filecache
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/spf13/afero"
|
||||
|
||||
"github.com/gohugoio/hugo/cache/filecache"
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/gohugoio/hugo/config/testconfig"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
func TestDecodeConfig(t *testing.T) {
|
||||
|
@ -51,27 +51,25 @@ maxAge = "11h"
|
|||
dir = "/path/to/c2"
|
||||
[caches.images]
|
||||
dir = "/path/to/c3"
|
||||
[caches.getResource]
|
||||
dir = "/path/to/c4"
|
||||
|
||||
`
|
||||
|
||||
cfg, err := config.FromConfigString(configStr, "toml")
|
||||
c.Assert(err, qt.IsNil)
|
||||
fs := afero.NewMemMapFs()
|
||||
decoded := testconfig.GetTestConfigs(fs, cfg).Base.Caches
|
||||
c.Assert(len(decoded), qt.Equals, 7)
|
||||
decoded, err := DecodeConfig(fs, cfg)
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
c.Assert(len(decoded), qt.Equals, 5)
|
||||
|
||||
c2 := decoded["getcsv"]
|
||||
c.Assert(c2.MaxAge.String(), qt.Equals, "11h0m0s")
|
||||
c.Assert(c2.DirCompiled, qt.Equals, filepath.FromSlash("/path/to/c2/filecache/getcsv"))
|
||||
c.Assert(c2.Dir, qt.Equals, filepath.FromSlash("/path/to/c2/filecache/getcsv"))
|
||||
|
||||
c3 := decoded["images"]
|
||||
c.Assert(c3.MaxAge, qt.Equals, time.Duration(-1))
|
||||
c.Assert(c3.DirCompiled, qt.Equals, filepath.FromSlash("/path/to/c3/filecache/images"))
|
||||
c.Assert(c3.Dir, qt.Equals, filepath.FromSlash("/path/to/c3/filecache/images"))
|
||||
|
||||
c4 := decoded["getresource"]
|
||||
c.Assert(c4.MaxAge, qt.Equals, time.Duration(-1))
|
||||
c.Assert(c4.DirCompiled, qt.Equals, filepath.FromSlash("/path/to/c4/filecache/getresource"))
|
||||
}
|
||||
|
||||
func TestDecodeConfigIgnoreCache(t *testing.T) {
|
||||
|
@ -98,24 +96,26 @@ maxAge = 3456
|
|||
dir = "/path/to/c2"
|
||||
[caches.images]
|
||||
dir = "/path/to/c3"
|
||||
[caches.getResource]
|
||||
dir = "/path/to/c4"
|
||||
|
||||
`
|
||||
|
||||
cfg, err := config.FromConfigString(configStr, "toml")
|
||||
c.Assert(err, qt.IsNil)
|
||||
fs := afero.NewMemMapFs()
|
||||
decoded := testconfig.GetTestConfigs(fs, cfg).Base.Caches
|
||||
c.Assert(len(decoded), qt.Equals, 7)
|
||||
decoded, err := DecodeConfig(fs, cfg)
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
c.Assert(len(decoded), qt.Equals, 5)
|
||||
|
||||
for _, v := range decoded {
|
||||
c.Assert(v.MaxAge, qt.Equals, time.Duration(0))
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestDecodeConfigDefault(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
cfg := config.New()
|
||||
cfg := newTestConfig()
|
||||
|
||||
if runtime.GOOS == "windows" {
|
||||
cfg.Set("resourceDir", "c:\\cache\\resources")
|
||||
|
@ -125,22 +125,72 @@ func TestDecodeConfigDefault(t *testing.T) {
|
|||
cfg.Set("resourceDir", "/cache/resources")
|
||||
cfg.Set("cacheDir", "/cache/thecache")
|
||||
}
|
||||
cfg.Set("workingDir", filepath.FromSlash("/my/cool/hugoproject"))
|
||||
|
||||
fs := afero.NewMemMapFs()
|
||||
decoded := testconfig.GetTestConfigs(fs, cfg).Base.Caches
|
||||
c.Assert(len(decoded), qt.Equals, 7)
|
||||
|
||||
imgConfig := decoded[filecache.CacheKeyImages]
|
||||
jsonConfig := decoded[filecache.CacheKeyGetJSON]
|
||||
decoded, err := DecodeConfig(fs, cfg)
|
||||
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
c.Assert(len(decoded), qt.Equals, 5)
|
||||
|
||||
imgConfig := decoded[cacheKeyImages]
|
||||
jsonConfig := decoded[cacheKeyGetJSON]
|
||||
|
||||
if runtime.GOOS == "windows" {
|
||||
c.Assert(imgConfig.DirCompiled, qt.Equals, filepath.FromSlash("_gen/images"))
|
||||
c.Assert(imgConfig.Dir, qt.Equals, filepath.FromSlash("_gen/images"))
|
||||
} else {
|
||||
c.Assert(imgConfig.DirCompiled, qt.Equals, "_gen/images")
|
||||
c.Assert(jsonConfig.DirCompiled, qt.Equals, "/cache/thecache/hugoproject/filecache/getjson")
|
||||
c.Assert(imgConfig.Dir, qt.Equals, "_gen/images")
|
||||
c.Assert(jsonConfig.Dir, qt.Equals, "/cache/thecache/hugoproject/filecache/getjson")
|
||||
}
|
||||
|
||||
c.Assert(imgConfig.IsResourceDir, qt.Equals, true)
|
||||
c.Assert(jsonConfig.IsResourceDir, qt.Equals, false)
|
||||
c.Assert(imgConfig.isResourceDir, qt.Equals, true)
|
||||
c.Assert(jsonConfig.isResourceDir, qt.Equals, false)
|
||||
}
|
||||
|
||||
func TestDecodeConfigInvalidDir(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
c := qt.New(t)
|
||||
|
||||
configStr := `
|
||||
resourceDir = "myresources"
|
||||
contentDir = "content"
|
||||
dataDir = "data"
|
||||
i18nDir = "i18n"
|
||||
layoutDir = "layouts"
|
||||
assetDir = "assets"
|
||||
archeTypedir = "archetypes"
|
||||
|
||||
[caches]
|
||||
[caches.getJSON]
|
||||
maxAge = "10m"
|
||||
dir = "/"
|
||||
|
||||
`
|
||||
if runtime.GOOS == "windows" {
|
||||
configStr = strings.Replace(configStr, "/", "c:\\\\", 1)
|
||||
}
|
||||
|
||||
cfg, err := config.FromConfigString(configStr, "toml")
|
||||
c.Assert(err, qt.IsNil)
|
||||
fs := afero.NewMemMapFs()
|
||||
|
||||
_, err = DecodeConfig(fs, cfg)
|
||||
c.Assert(err, qt.Not(qt.IsNil))
|
||||
|
||||
}
|
||||
|
||||
func newTestConfig() *viper.Viper {
|
||||
cfg := viper.New()
|
||||
cfg.Set("workingDir", filepath.FromSlash("/my/cool/hugoproject"))
|
||||
cfg.Set("contentDir", "content")
|
||||
cfg.Set("dataDir", "data")
|
||||
cfg.Set("resourceDir", "resources")
|
||||
cfg.Set("i18nDir", "i18n")
|
||||
cfg.Set("layoutDir", "layouts")
|
||||
cfg.Set("archetypeDir", "archetypes")
|
||||
cfg.Set("assetDir", "assets")
|
||||
|
||||
return cfg
|
||||
}
|
||||
|
|
106
cache/filecache/filecache_integration_test.go
vendored
106
cache/filecache/filecache_integration_test.go
vendored
|
@ -1,106 +0,0 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package filecache_test
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/bep/logg"
|
||||
qt "github.com/frankban/quicktest"
|
||||
"github.com/gohugoio/hugo/htesting"
|
||||
"github.com/gohugoio/hugo/hugolib"
|
||||
)
|
||||
|
||||
// See issue #10781. That issue wouldn't have been triggered if we kept
|
||||
// the empty root directories (e.g. _resources/gen/images).
|
||||
// It's still an upstream Go issue that we also need to handle, but
|
||||
// this is a test for the first part.
|
||||
func TestPruneShouldPreserveEmptyCacheRoots(t *testing.T) {
|
||||
files := `
|
||||
-- hugo.toml --
|
||||
baseURL = "https://example.com"
|
||||
-- content/_index.md --
|
||||
---
|
||||
title: "Home"
|
||||
---
|
||||
|
||||
`
|
||||
|
||||
b := hugolib.NewIntegrationTestBuilder(
|
||||
hugolib.IntegrationTestConfig{T: t, TxtarString: files, RunGC: true, NeedsOsFS: true},
|
||||
).Build()
|
||||
|
||||
_, err := b.H.BaseFs.ResourcesCache.Stat(filepath.Join("_gen", "images"))
|
||||
|
||||
b.Assert(err, qt.IsNil)
|
||||
}
|
||||
|
||||
func TestPruneImages(t *testing.T) {
|
||||
if htesting.IsCI() {
|
||||
// TODO(bep)
|
||||
t.Skip("skip flaky test on CI server")
|
||||
}
|
||||
t.Skip("skip flaky test")
|
||||
files := `
|
||||
-- hugo.toml --
|
||||
baseURL = "https://example.com"
|
||||
[caches]
|
||||
[caches.images]
|
||||
maxAge = "200ms"
|
||||
dir = ":resourceDir/_gen"
|
||||
-- content/_index.md --
|
||||
---
|
||||
title: "Home"
|
||||
---
|
||||
-- assets/a/pixel.png --
|
||||
iVBORw0KGgoAAAANSUhEUgAAAAEAAAABCAYAAAAfFcSJAAAADUlEQVR42mNkYPhfDwAChwGA60e6kgAAAABJRU5ErkJggg==
|
||||
-- layouts/index.html --
|
||||
{{ warnf "HOME!" }}
|
||||
{{ $img := resources.GetMatch "**.png" }}
|
||||
{{ $img = $img.Resize "3x3" }}
|
||||
{{ $img.RelPermalink }}
|
||||
|
||||
|
||||
|
||||
`
|
||||
|
||||
b := hugolib.NewIntegrationTestBuilder(
|
||||
hugolib.IntegrationTestConfig{T: t, TxtarString: files, Running: true, RunGC: true, NeedsOsFS: true, LogLevel: logg.LevelInfo},
|
||||
).Build()
|
||||
|
||||
b.Assert(b.GCCount, qt.Equals, 0)
|
||||
b.Assert(b.H, qt.IsNotNil)
|
||||
|
||||
imagesCacheDir := filepath.Join("_gen", "images")
|
||||
_, err := b.H.BaseFs.ResourcesCache.Stat(imagesCacheDir)
|
||||
|
||||
b.Assert(err, qt.IsNil)
|
||||
|
||||
// TODO(bep) we need a way to test full rebuilds.
|
||||
// For now, just sleep a little so the cache elements expires.
|
||||
time.Sleep(500 * time.Millisecond)
|
||||
|
||||
b.RenameFile("assets/a/pixel.png", "assets/b/pixel2.png").Build()
|
||||
|
||||
b.Assert(b.GCCount, qt.Equals, 1)
|
||||
// Build it again to GC the empty a dir.
|
||||
b.Build()
|
||||
|
||||
_, err = b.H.BaseFs.ResourcesCache.Stat(filepath.Join(imagesCacheDir, "a"))
|
||||
b.Assert(err, qt.Not(qt.IsNil))
|
||||
_, err = b.H.BaseFs.ResourcesCache.Stat(imagesCacheDir)
|
||||
b.Assert(err, qt.IsNil)
|
||||
}
|
45
cache/filecache/filecache_pruner.go
vendored
45
cache/filecache/filecache_pruner.go
vendored
|
@ -14,13 +14,10 @@
|
|||
package filecache
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
|
||||
"github.com/gohugoio/hugo/common/herrors"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
"github.com/spf13/afero"
|
||||
)
|
||||
|
||||
|
@ -31,15 +28,16 @@ import (
|
|||
func (c Caches) Prune() (int, error) {
|
||||
counter := 0
|
||||
for k, cache := range c {
|
||||
|
||||
count, err := cache.Prune(false)
|
||||
|
||||
counter += count
|
||||
|
||||
if err != nil {
|
||||
if herrors.IsNotExist(err) {
|
||||
if os.IsNotExist(err) {
|
||||
continue
|
||||
}
|
||||
return counter, fmt.Errorf("failed to prune cache %q: %w", k, err)
|
||||
return counter, errors.Wrapf(err, "failed to prune cache %q", k)
|
||||
}
|
||||
|
||||
}
|
||||
|
@ -53,9 +51,6 @@ func (c *Cache) Prune(force bool) (int, error) {
|
|||
if c.pruneAllRootDir != "" {
|
||||
return c.pruneRootDir(force)
|
||||
}
|
||||
if err := c.init(); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
counter := 0
|
||||
|
||||
|
@ -72,19 +67,14 @@ func (c *Cache) Prune(force bool) (int, error) {
|
|||
// This cache dir may not exist.
|
||||
return nil
|
||||
}
|
||||
defer f.Close()
|
||||
_, err = f.Readdirnames(1)
|
||||
f.Close()
|
||||
if err == io.EOF {
|
||||
// Empty dir.
|
||||
if name == "." {
|
||||
// e.g. /_gen/images -- keep it even if empty.
|
||||
err = nil
|
||||
} else {
|
||||
err = c.Fs.Remove(name)
|
||||
}
|
||||
}
|
||||
|
||||
if err != nil && !herrors.IsNotExist(err) {
|
||||
if err != nil && !os.IsNotExist(err) {
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -105,7 +95,7 @@ func (c *Cache) Prune(force bool) (int, error) {
|
|||
counter++
|
||||
}
|
||||
|
||||
if err != nil && !herrors.IsNotExist(err) {
|
||||
if err != nil && !os.IsNotExist(err) {
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -118,12 +108,10 @@ func (c *Cache) Prune(force bool) (int, error) {
|
|||
}
|
||||
|
||||
func (c *Cache) pruneRootDir(force bool) (int, error) {
|
||||
if err := c.init(); err != nil {
|
||||
return 0, err
|
||||
}
|
||||
|
||||
info, err := c.Fs.Stat(c.pruneAllRootDir)
|
||||
if err != nil {
|
||||
if herrors.IsNotExist(err) {
|
||||
if os.IsNotExist(err) {
|
||||
return 0, nil
|
||||
}
|
||||
return 0, err
|
||||
|
@ -133,5 +121,18 @@ func (c *Cache) pruneRootDir(force bool) (int, error) {
|
|||
return 0, nil
|
||||
}
|
||||
|
||||
return hugofs.MakeReadableAndRemoveAllModulePkgDir(c.Fs, c.pruneAllRootDir)
|
||||
counter := 0
|
||||
// Module cache has 0555 directories; make them writable in order to remove content.
|
||||
afero.Walk(c.Fs, c.pruneAllRootDir, func(path string, info os.FileInfo, err error) error {
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
if info.IsDir() {
|
||||
counter++
|
||||
c.Fs.Chmod(path, 0777)
|
||||
}
|
||||
return nil
|
||||
})
|
||||
return 1, c.Fs.RemoveAll(c.pruneAllRootDir)
|
||||
|
||||
}
|
||||
|
|
20
cache/filecache/filecache_pruner_test.go
vendored
20
cache/filecache/filecache_pruner_test.go
vendored
|
@ -11,14 +11,13 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package filecache_test
|
||||
package filecache
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/gohugoio/hugo/cache/filecache"
|
||||
"github.com/spf13/afero"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
|
@ -53,13 +52,13 @@ maxAge = "200ms"
|
|||
dir = ":resourceDir/_gen"
|
||||
`
|
||||
|
||||
for _, name := range []string{filecache.CacheKeyGetCSV, filecache.CacheKeyGetJSON, filecache.CacheKeyAssets, filecache.CacheKeyImages} {
|
||||
for _, name := range []string{cacheKeyGetCSV, cacheKeyGetJSON, cacheKeyAssets, cacheKeyImages} {
|
||||
msg := qt.Commentf("cache: %s", name)
|
||||
p := newPathsSpec(t, afero.NewMemMapFs(), configStr)
|
||||
caches, err := filecache.NewCaches(p)
|
||||
caches, err := NewCaches(p)
|
||||
c.Assert(err, qt.IsNil)
|
||||
cache := caches[name]
|
||||
for i := range 10 {
|
||||
for i := 0; i < 10; i++ {
|
||||
id := fmt.Sprintf("i%d", i)
|
||||
cache.GetOrCreateBytes(id, func() ([]byte, error) {
|
||||
return []byte("abc"), nil
|
||||
|
@ -74,9 +73,9 @@ dir = ":resourceDir/_gen"
|
|||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(count, qt.Equals, 5, msg)
|
||||
|
||||
for i := range 10 {
|
||||
for i := 0; i < 10; i++ {
|
||||
id := fmt.Sprintf("i%d", i)
|
||||
v := cache.GetString(id)
|
||||
v := cache.getString(id)
|
||||
if i < 5 {
|
||||
c.Assert(v, qt.Equals, "")
|
||||
} else {
|
||||
|
@ -84,7 +83,7 @@ dir = ":resourceDir/_gen"
|
|||
}
|
||||
}
|
||||
|
||||
caches, err = filecache.NewCaches(p)
|
||||
caches, err = NewCaches(p)
|
||||
c.Assert(err, qt.IsNil)
|
||||
cache = caches[name]
|
||||
// Touch one and then prune.
|
||||
|
@ -97,9 +96,9 @@ dir = ":resourceDir/_gen"
|
|||
c.Assert(count, qt.Equals, 4)
|
||||
|
||||
// Now only the i5 should be left.
|
||||
for i := range 10 {
|
||||
for i := 0; i < 10; i++ {
|
||||
id := fmt.Sprintf("i%d", i)
|
||||
v := cache.GetString(id)
|
||||
v := cache.getString(id)
|
||||
if i != 5 {
|
||||
c.Assert(v, qt.Equals, "")
|
||||
} else {
|
||||
|
@ -108,4 +107,5 @@ dir = ":resourceDir/_gen"
|
|||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
|
128
cache/filecache/filecache_test.go
vendored
128
cache/filecache/filecache_test.go
vendored
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -11,21 +11,25 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package filecache_test
|
||||
package filecache
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"sync"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/gohugoio/hugo/cache/filecache"
|
||||
"github.com/gohugoio/hugo/langs"
|
||||
"github.com/gohugoio/hugo/modules"
|
||||
|
||||
"github.com/gohugoio/hugo/common/hugio"
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/gohugoio/hugo/config/testconfig"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
|
@ -38,8 +42,13 @@ func TestFileCache(t *testing.T) {
|
|||
t.Parallel()
|
||||
c := qt.New(t)
|
||||
|
||||
tempWorkingDir := t.TempDir()
|
||||
tempCacheDir := t.TempDir()
|
||||
tempWorkingDir, err := ioutil.TempDir("", "hugo_filecache_test_work")
|
||||
c.Assert(err, qt.IsNil)
|
||||
defer os.Remove(tempWorkingDir)
|
||||
|
||||
tempCacheDir, err := ioutil.TempDir("", "hugo_filecache_test_cache")
|
||||
c.Assert(err, qt.IsNil)
|
||||
defer os.Remove(tempCacheDir)
|
||||
|
||||
osfs := afero.NewOsFs()
|
||||
|
||||
|
@ -79,14 +88,31 @@ dir = ":cacheDir/c"
|
|||
|
||||
p := newPathsSpec(t, osfs, configStr)
|
||||
|
||||
caches, err := filecache.NewCaches(p)
|
||||
caches, err := NewCaches(p)
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
cache := caches.Get("GetJSON")
|
||||
c.Assert(cache, qt.Not(qt.IsNil))
|
||||
c.Assert(cache.maxAge.String(), qt.Equals, "10h0m0s")
|
||||
|
||||
bfs, ok := cache.Fs.(*afero.BasePathFs)
|
||||
c.Assert(ok, qt.Equals, true)
|
||||
filename, err := bfs.RealPath("key")
|
||||
c.Assert(err, qt.IsNil)
|
||||
if test.cacheDir != "" {
|
||||
c.Assert(filename, qt.Equals, filepath.Join(test.cacheDir, "c/"+filecacheRootDirname+"/getjson/key"))
|
||||
} else {
|
||||
// Temp dir.
|
||||
c.Assert(filename, qt.Matches, ".*hugo_cache.*"+filecacheRootDirname+".*key")
|
||||
}
|
||||
|
||||
cache = caches.Get("Images")
|
||||
c.Assert(cache, qt.Not(qt.IsNil))
|
||||
c.Assert(cache.maxAge, qt.Equals, time.Duration(-1))
|
||||
bfs, ok = cache.Fs.(*afero.BasePathFs)
|
||||
c.Assert(ok, qt.Equals, true)
|
||||
filename, _ = bfs.RealPath("key")
|
||||
c.Assert(filename, qt.Equals, filepath.FromSlash("_gen/images/key"))
|
||||
|
||||
rf := func(s string) func() (io.ReadCloser, error) {
|
||||
return func() (io.ReadCloser, error) {
|
||||
|
@ -95,7 +121,7 @@ dir = ":cacheDir/c"
|
|||
io.Closer
|
||||
}{
|
||||
strings.NewReader(s),
|
||||
io.NopCloser(nil),
|
||||
ioutil.NopCloser(nil),
|
||||
}, nil
|
||||
}
|
||||
}
|
||||
|
@ -104,13 +130,13 @@ dir = ":cacheDir/c"
|
|||
return []byte("bcd"), nil
|
||||
}
|
||||
|
||||
for _, ca := range []*filecache.Cache{caches.ImageCache(), caches.AssetsCache(), caches.GetJSONCache(), caches.GetCSVCache()} {
|
||||
for range 2 {
|
||||
for _, ca := range []*Cache{caches.ImageCache(), caches.AssetsCache(), caches.GetJSONCache(), caches.GetCSVCache()} {
|
||||
for i := 0; i < 2; i++ {
|
||||
info, r, err := ca.GetOrCreate("a", rf("abc"))
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(r, qt.Not(qt.IsNil))
|
||||
c.Assert(info.Name, qt.Equals, "a")
|
||||
b, _ := io.ReadAll(r)
|
||||
b, _ := ioutil.ReadAll(r)
|
||||
r.Close()
|
||||
c.Assert(string(b), qt.Equals, "abc")
|
||||
|
||||
|
@ -126,7 +152,7 @@ dir = ":cacheDir/c"
|
|||
|
||||
_, r, err = ca.GetOrCreate("a", rf("bcd"))
|
||||
c.Assert(err, qt.IsNil)
|
||||
b, _ = io.ReadAll(r)
|
||||
b, _ = ioutil.ReadAll(r)
|
||||
r.Close()
|
||||
c.Assert(string(b), qt.Equals, "abc")
|
||||
}
|
||||
|
@ -139,13 +165,13 @@ dir = ":cacheDir/c"
|
|||
c.Assert(info.Name, qt.Equals, "mykey")
|
||||
io.WriteString(w, "Hugo is great!")
|
||||
w.Close()
|
||||
c.Assert(caches.ImageCache().GetString("mykey"), qt.Equals, "Hugo is great!")
|
||||
c.Assert(caches.ImageCache().getString("mykey"), qt.Equals, "Hugo is great!")
|
||||
|
||||
info, r, err := caches.ImageCache().Get("mykey")
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(r, qt.Not(qt.IsNil))
|
||||
c.Assert(info.Name, qt.Equals, "mykey")
|
||||
b, _ := io.ReadAll(r)
|
||||
b, _ := ioutil.ReadAll(r)
|
||||
r.Close()
|
||||
c.Assert(string(b), qt.Equals, "Hugo is great!")
|
||||
|
||||
|
@ -155,6 +181,7 @@ dir = ":cacheDir/c"
|
|||
c.Assert(string(b), qt.Equals, "Hugo is great!")
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestFileCacheConcurrent(t *testing.T) {
|
||||
|
@ -180,7 +207,7 @@ dir = "/cache/c"
|
|||
|
||||
p := newPathsSpec(t, afero.NewMemMapFs(), configStr)
|
||||
|
||||
caches, err := filecache.NewCaches(p)
|
||||
caches, err := NewCaches(p)
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
const cacheName = "getjson"
|
||||
|
@ -193,11 +220,11 @@ dir = "/cache/c"
|
|||
|
||||
var wg sync.WaitGroup
|
||||
|
||||
for i := range 50 {
|
||||
for i := 0; i < 50; i++ {
|
||||
wg.Add(1)
|
||||
go func(i int) {
|
||||
defer wg.Done()
|
||||
for range 20 {
|
||||
for j := 0; j < 20; j++ {
|
||||
ca := caches.Get(cacheName)
|
||||
c.Assert(ca, qt.Not(qt.IsNil))
|
||||
filename, data := filenameData(i)
|
||||
|
@ -205,7 +232,7 @@ dir = "/cache/c"
|
|||
return hugio.ToReadCloser(strings.NewReader(data)), nil
|
||||
})
|
||||
c.Assert(err, qt.IsNil)
|
||||
b, _ := io.ReadAll(r)
|
||||
b, _ := ioutil.ReadAll(r)
|
||||
r.Close()
|
||||
c.Assert(string(b), qt.Equals, data)
|
||||
// Trigger some expiration.
|
||||
|
@ -223,24 +250,25 @@ func TestFileCacheReadOrCreateErrorInRead(t *testing.T) {
|
|||
|
||||
var result string
|
||||
|
||||
rf := func(failLevel int) func(info filecache.ItemInfo, r io.ReadSeeker) error {
|
||||
return func(info filecache.ItemInfo, r io.ReadSeeker) error {
|
||||
rf := func(failLevel int) func(info ItemInfo, r io.ReadSeeker) error {
|
||||
|
||||
return func(info ItemInfo, r io.ReadSeeker) error {
|
||||
if failLevel > 0 {
|
||||
if failLevel > 1 {
|
||||
return filecache.ErrFatal
|
||||
return ErrFatal
|
||||
}
|
||||
return errors.New("fail")
|
||||
}
|
||||
|
||||
b, _ := io.ReadAll(r)
|
||||
b, _ := ioutil.ReadAll(r)
|
||||
result = string(b)
|
||||
|
||||
return nil
|
||||
}
|
||||
}
|
||||
|
||||
bf := func(s string) func(info filecache.ItemInfo, w io.WriteCloser) error {
|
||||
return func(info filecache.ItemInfo, w io.WriteCloser) error {
|
||||
bf := func(s string) func(info ItemInfo, w io.WriteCloser) error {
|
||||
return func(info ItemInfo, w io.WriteCloser) error {
|
||||
defer w.Close()
|
||||
result = s
|
||||
_, err := w.Write([]byte(s))
|
||||
|
@ -248,7 +276,7 @@ func TestFileCacheReadOrCreateErrorInRead(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
cache := filecache.NewCache(afero.NewMemMapFs(), 100*time.Hour, "")
|
||||
cache := NewCache(afero.NewMemMapFs(), 100*time.Hour, "")
|
||||
|
||||
const id = "a32"
|
||||
|
||||
|
@ -262,15 +290,59 @@ func TestFileCacheReadOrCreateErrorInRead(t *testing.T) {
|
|||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(result, qt.Equals, "v3")
|
||||
_, err = cache.ReadOrCreate(id, rf(2), bf("v3"))
|
||||
c.Assert(err, qt.Equals, filecache.ErrFatal)
|
||||
c.Assert(err, qt.Equals, ErrFatal)
|
||||
}
|
||||
|
||||
func TestCleanID(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
c.Assert(cleanID(filepath.FromSlash("/a/b//c.txt")), qt.Equals, filepath.FromSlash("a/b/c.txt"))
|
||||
c.Assert(cleanID(filepath.FromSlash("a/b//c.txt")), qt.Equals, filepath.FromSlash("a/b/c.txt"))
|
||||
}
|
||||
|
||||
func initConfig(fs afero.Fs, cfg config.Provider) error {
|
||||
if _, err := langs.LoadLanguageSettings(cfg, nil); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
modConfig, err := modules.DecodeConfig(cfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
workingDir := cfg.GetString("workingDir")
|
||||
themesDir := cfg.GetString("themesDir")
|
||||
if !filepath.IsAbs(themesDir) {
|
||||
themesDir = filepath.Join(workingDir, themesDir)
|
||||
}
|
||||
modulesClient := modules.NewClient(modules.ClientConfig{
|
||||
Fs: fs,
|
||||
WorkingDir: workingDir,
|
||||
ThemesDir: themesDir,
|
||||
ModuleConfig: modConfig,
|
||||
IgnoreVendor: true,
|
||||
})
|
||||
|
||||
moduleConfig, err := modulesClient.Collect()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := modules.ApplyProjectConfigDefaults(cfg, moduleConfig.ActiveModules[len(moduleConfig.ActiveModules)-1]); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
cfg.Set("allModules", moduleConfig.ActiveModules)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func newPathsSpec(t *testing.T, fs afero.Fs, configStr string) *helpers.PathSpec {
|
||||
c := qt.New(t)
|
||||
cfg, err := config.FromConfigString(configStr, "toml")
|
||||
c.Assert(err, qt.IsNil)
|
||||
acfg := testconfig.GetTestConfig(fs, cfg)
|
||||
p, err := helpers.NewPathSpec(hugofs.NewFrom(fs, acfg.BaseConfig()), acfg, nil)
|
||||
initConfig(fs, cfg)
|
||||
p, err := helpers.NewPathSpec(hugofs.NewFrom(fs, cfg), cfg, nil)
|
||||
c.Assert(err, qt.IsNil)
|
||||
return p
|
||||
|
||||
}
|
||||
|
|
229
cache/httpcache/httpcache.go
vendored
229
cache/httpcache/httpcache.go
vendored
|
@ -1,229 +0,0 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package httpcache
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"time"
|
||||
|
||||
"github.com/gobwas/glob"
|
||||
"github.com/gohugoio/hugo/common/predicate"
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/mitchellh/mapstructure"
|
||||
)
|
||||
|
||||
// DefaultConfig holds the default configuration for the HTTP cache.
|
||||
var DefaultConfig = Config{
|
||||
Cache: Cache{
|
||||
For: GlobMatcher{
|
||||
Excludes: []string{"**"},
|
||||
},
|
||||
},
|
||||
Polls: []PollConfig{
|
||||
{
|
||||
For: GlobMatcher{
|
||||
Includes: []string{"**"},
|
||||
},
|
||||
Disable: true,
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
// Config holds the configuration for the HTTP cache.
|
||||
type Config struct {
|
||||
// Configures the HTTP cache behavior (RFC 9111).
|
||||
// When this is not enabled for a resource, Hugo will go straight to the file cache.
|
||||
Cache Cache
|
||||
|
||||
// Polls holds a list of configurations for polling remote resources to detect changes in watch mode.
|
||||
// This can be disabled for some resources, typically if they are known to not change.
|
||||
Polls []PollConfig
|
||||
}
|
||||
|
||||
type Cache struct {
|
||||
// Enable HTTP cache behavior (RFC 9111) for these resources.
|
||||
For GlobMatcher
|
||||
}
|
||||
|
||||
func (c *Config) Compile() (ConfigCompiled, error) {
|
||||
var cc ConfigCompiled
|
||||
|
||||
p, err := c.Cache.For.CompilePredicate()
|
||||
if err != nil {
|
||||
return cc, err
|
||||
}
|
||||
|
||||
cc.For = p
|
||||
|
||||
for _, pc := range c.Polls {
|
||||
|
||||
p, err := pc.For.CompilePredicate()
|
||||
if err != nil {
|
||||
return cc, err
|
||||
}
|
||||
|
||||
cc.PollConfigs = append(cc.PollConfigs, PollConfigCompiled{
|
||||
For: p,
|
||||
Config: pc,
|
||||
})
|
||||
}
|
||||
|
||||
return cc, nil
|
||||
}
|
||||
|
||||
// PollConfig holds the configuration for polling remote resources to detect changes in watch mode.
|
||||
type PollConfig struct {
|
||||
// What remote resources to apply this configuration to.
|
||||
For GlobMatcher
|
||||
|
||||
// Disable polling for this configuration.
|
||||
Disable bool
|
||||
|
||||
// Low is the lower bound for the polling interval.
|
||||
// This is the starting point when the resource has recently changed,
|
||||
// if that resource stops changing, the polling interval will gradually increase towards High.
|
||||
Low time.Duration
|
||||
|
||||
// High is the upper bound for the polling interval.
|
||||
// This is the interval used when the resource is stable.
|
||||
High time.Duration
|
||||
}
|
||||
|
||||
func (c PollConfig) MarshalJSON() (b []byte, err error) {
|
||||
// Marshal the durations as strings.
|
||||
type Alias PollConfig
|
||||
return json.Marshal(&struct {
|
||||
Low string
|
||||
High string
|
||||
Alias
|
||||
}{
|
||||
Low: c.Low.String(),
|
||||
High: c.High.String(),
|
||||
Alias: (Alias)(c),
|
||||
})
|
||||
}
|
||||
|
||||
type GlobMatcher struct {
|
||||
// Excludes holds a list of glob patterns that will be excluded.
|
||||
Excludes []string
|
||||
|
||||
// Includes holds a list of glob patterns that will be included.
|
||||
Includes []string
|
||||
}
|
||||
|
||||
func (gm GlobMatcher) IsZero() bool {
|
||||
return len(gm.Includes) == 0 && len(gm.Excludes) == 0
|
||||
}
|
||||
|
||||
type ConfigCompiled struct {
|
||||
For predicate.P[string]
|
||||
PollConfigs []PollConfigCompiled
|
||||
}
|
||||
|
||||
func (c *ConfigCompiled) PollConfigFor(s string) PollConfigCompiled {
|
||||
for _, pc := range c.PollConfigs {
|
||||
if pc.For(s) {
|
||||
return pc
|
||||
}
|
||||
}
|
||||
return PollConfigCompiled{}
|
||||
}
|
||||
|
||||
func (c *ConfigCompiled) IsPollingDisabled() bool {
|
||||
for _, pc := range c.PollConfigs {
|
||||
if !pc.Config.Disable {
|
||||
return false
|
||||
}
|
||||
}
|
||||
return true
|
||||
}
|
||||
|
||||
type PollConfigCompiled struct {
|
||||
For predicate.P[string]
|
||||
Config PollConfig
|
||||
}
|
||||
|
||||
func (p PollConfigCompiled) IsZero() bool {
|
||||
return p.For == nil
|
||||
}
|
||||
|
||||
func (gm *GlobMatcher) CompilePredicate() (func(string) bool, error) {
|
||||
if gm.IsZero() {
|
||||
panic("no includes or excludes")
|
||||
}
|
||||
var p predicate.P[string]
|
||||
for _, include := range gm.Includes {
|
||||
g, err := glob.Compile(include, '/')
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
fn := func(s string) bool {
|
||||
return g.Match(s)
|
||||
}
|
||||
p = p.Or(fn)
|
||||
}
|
||||
|
||||
for _, exclude := range gm.Excludes {
|
||||
g, err := glob.Compile(exclude, '/')
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
fn := func(s string) bool {
|
||||
return !g.Match(s)
|
||||
}
|
||||
p = p.And(fn)
|
||||
}
|
||||
|
||||
return p, nil
|
||||
}
|
||||
|
||||
func DecodeConfig(_ config.BaseConfig, m map[string]any) (Config, error) {
|
||||
if len(m) == 0 {
|
||||
return DefaultConfig, nil
|
||||
}
|
||||
|
||||
var c Config
|
||||
|
||||
dc := &mapstructure.DecoderConfig{
|
||||
Result: &c,
|
||||
DecodeHook: mapstructure.StringToTimeDurationHookFunc(),
|
||||
WeaklyTypedInput: true,
|
||||
}
|
||||
|
||||
decoder, err := mapstructure.NewDecoder(dc)
|
||||
if err != nil {
|
||||
return c, err
|
||||
}
|
||||
|
||||
if err := decoder.Decode(m); err != nil {
|
||||
return c, err
|
||||
}
|
||||
|
||||
if c.Cache.For.IsZero() {
|
||||
c.Cache.For = DefaultConfig.Cache.For
|
||||
}
|
||||
|
||||
for pci := range c.Polls {
|
||||
if c.Polls[pci].For.IsZero() {
|
||||
c.Polls[pci].For = DefaultConfig.Cache.For
|
||||
c.Polls[pci].Disable = true
|
||||
}
|
||||
}
|
||||
|
||||
if len(c.Polls) == 0 {
|
||||
c.Polls = DefaultConfig.Polls
|
||||
}
|
||||
|
||||
return c, nil
|
||||
}
|
95
cache/httpcache/httpcache_integration_test.go
vendored
95
cache/httpcache/httpcache_integration_test.go
vendored
|
@ -1,95 +0,0 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package httpcache_test
|
||||
|
||||
import (
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
"github.com/gohugoio/hugo/hugolib"
|
||||
)
|
||||
|
||||
func TestConfigCustom(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
files := `
|
||||
-- hugo.toml --
|
||||
[httpcache]
|
||||
[httpcache.cache.for]
|
||||
includes = ["**gohugo.io**"]
|
||||
[[httpcache.polls]]
|
||||
low = "5s"
|
||||
high = "32s"
|
||||
[httpcache.polls.for]
|
||||
includes = ["**gohugo.io**"]
|
||||
|
||||
|
||||
`
|
||||
|
||||
b := hugolib.Test(t, files)
|
||||
|
||||
httpcacheConf := b.H.Configs.Base.HTTPCache
|
||||
compiled := b.H.Configs.Base.C.HTTPCache
|
||||
|
||||
b.Assert(httpcacheConf.Cache.For.Includes, qt.DeepEquals, []string{"**gohugo.io**"})
|
||||
b.Assert(httpcacheConf.Cache.For.Excludes, qt.IsNil)
|
||||
|
||||
pc := compiled.PollConfigFor("https://gohugo.io/foo.jpg")
|
||||
b.Assert(pc.Config.Low, qt.Equals, 5*time.Second)
|
||||
b.Assert(pc.Config.High, qt.Equals, 32*time.Second)
|
||||
b.Assert(compiled.PollConfigFor("https://example.com/foo.jpg").IsZero(), qt.IsTrue)
|
||||
}
|
||||
|
||||
func TestConfigDefault(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
files := `
|
||||
-- hugo.toml --
|
||||
`
|
||||
b := hugolib.Test(t, files)
|
||||
|
||||
compiled := b.H.Configs.Base.C.HTTPCache
|
||||
|
||||
b.Assert(compiled.For("https://gohugo.io/posts.json"), qt.IsFalse)
|
||||
b.Assert(compiled.For("https://gohugo.io/foo.jpg"), qt.IsFalse)
|
||||
b.Assert(compiled.PollConfigFor("https://gohugo.io/foo.jpg").Config.Disable, qt.IsTrue)
|
||||
}
|
||||
|
||||
func TestConfigPollsOnly(t *testing.T) {
|
||||
t.Parallel()
|
||||
files := `
|
||||
-- hugo.toml --
|
||||
[httpcache]
|
||||
[[httpcache.polls]]
|
||||
low = "5s"
|
||||
high = "32s"
|
||||
[httpcache.polls.for]
|
||||
includes = ["**gohugo.io**"]
|
||||
|
||||
|
||||
`
|
||||
|
||||
b := hugolib.Test(t, files)
|
||||
|
||||
compiled := b.H.Configs.Base.C.HTTPCache
|
||||
|
||||
b.Assert(compiled.For("https://gohugo.io/posts.json"), qt.IsFalse)
|
||||
b.Assert(compiled.For("https://gohugo.io/foo.jpg"), qt.IsFalse)
|
||||
|
||||
pc := compiled.PollConfigFor("https://gohugo.io/foo.jpg")
|
||||
b.Assert(pc.Config.Low, qt.Equals, 5*time.Second)
|
||||
b.Assert(pc.Config.High, qt.Equals, 32*time.Second)
|
||||
b.Assert(compiled.PollConfigFor("https://example.com/foo.jpg").IsZero(), qt.IsTrue)
|
||||
}
|
73
cache/httpcache/httpcache_test.go
vendored
73
cache/httpcache/httpcache_test.go
vendored
|
@ -1,73 +0,0 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package httpcache
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
"github.com/gohugoio/hugo/config"
|
||||
)
|
||||
|
||||
func TestGlobMatcher(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
|
||||
g := GlobMatcher{
|
||||
Includes: []string{"**/*.jpg", "**.png", "**/bar/**"},
|
||||
Excludes: []string{"**/foo.jpg", "**.css"},
|
||||
}
|
||||
|
||||
p, err := g.CompilePredicate()
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
c.Assert(p("foo.jpg"), qt.IsFalse)
|
||||
c.Assert(p("foo.png"), qt.IsTrue)
|
||||
c.Assert(p("foo/bar.jpg"), qt.IsTrue)
|
||||
c.Assert(p("foo/bar.png"), qt.IsTrue)
|
||||
c.Assert(p("foo/bar/foo.jpg"), qt.IsFalse)
|
||||
c.Assert(p("foo/bar/foo.css"), qt.IsFalse)
|
||||
c.Assert(p("foo.css"), qt.IsFalse)
|
||||
c.Assert(p("foo/bar/foo.css"), qt.IsFalse)
|
||||
c.Assert(p("foo/bar/foo.xml"), qt.IsTrue)
|
||||
}
|
||||
|
||||
func TestDefaultConfig(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
|
||||
_, err := DefaultConfig.Compile()
|
||||
c.Assert(err, qt.IsNil)
|
||||
}
|
||||
|
||||
func TestDecodeConfigInjectsDefaultAndCompiles(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
|
||||
cfg, err := DecodeConfig(config.BaseConfig{}, map[string]interface{}{})
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(cfg, qt.DeepEquals, DefaultConfig)
|
||||
|
||||
_, err = cfg.Compile()
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
cfg, err = DecodeConfig(config.BaseConfig{}, map[string]any{
|
||||
"cache": map[string]any{
|
||||
"polls": []map[string]any{
|
||||
{"disable": true},
|
||||
},
|
||||
},
|
||||
})
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
_, err = cfg.Compile()
|
||||
c.Assert(err, qt.IsNil)
|
||||
}
|
79
cache/namedmemcache/named_cache.go
vendored
Normal file
79
cache/namedmemcache/named_cache.go
vendored
Normal file
|
@ -0,0 +1,79 @@
|
|||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package namedmemcache provides a memory cache with a named lock. This is suitable
|
||||
// for situations where creating the cached resource can be time consuming or otherwise
|
||||
// resource hungry, or in situations where a "once only per key" is a requirement.
|
||||
package namedmemcache
|
||||
|
||||
import (
|
||||
"sync"
|
||||
|
||||
"github.com/BurntSushi/locker"
|
||||
)
|
||||
|
||||
// Cache holds the cached values.
|
||||
type Cache struct {
|
||||
nlocker *locker.Locker
|
||||
cache map[string]cacheEntry
|
||||
mu sync.RWMutex
|
||||
}
|
||||
|
||||
type cacheEntry struct {
|
||||
value interface{}
|
||||
err error
|
||||
}
|
||||
|
||||
// New creates a new cache.
|
||||
func New() *Cache {
|
||||
return &Cache{
|
||||
nlocker: locker.NewLocker(),
|
||||
cache: make(map[string]cacheEntry),
|
||||
}
|
||||
}
|
||||
|
||||
// Clear clears the cache state.
|
||||
func (c *Cache) Clear() {
|
||||
c.mu.Lock()
|
||||
defer c.mu.Unlock()
|
||||
|
||||
c.cache = make(map[string]cacheEntry)
|
||||
c.nlocker = locker.NewLocker()
|
||||
|
||||
}
|
||||
|
||||
// GetOrCreate tries to get the value with the given cache key, if not found
|
||||
// create will be called and cached.
|
||||
// This method is thread safe. It also guarantees that the create func for a given
|
||||
// key is invoced only once for this cache.
|
||||
func (c *Cache) GetOrCreate(key string, create func() (interface{}, error)) (interface{}, error) {
|
||||
c.mu.RLock()
|
||||
entry, found := c.cache[key]
|
||||
c.mu.RUnlock()
|
||||
|
||||
if found {
|
||||
return entry.value, entry.err
|
||||
}
|
||||
|
||||
c.nlocker.Lock(key)
|
||||
defer c.nlocker.Unlock(key)
|
||||
|
||||
// Create it.
|
||||
value, err := create()
|
||||
|
||||
c.mu.Lock()
|
||||
c.cache[key] = cacheEntry{value: value, err: err}
|
||||
c.mu.Unlock()
|
||||
|
||||
return value, err
|
||||
}
|
80
cache/namedmemcache/named_cache_test.go
vendored
Normal file
80
cache/namedmemcache/named_cache_test.go
vendored
Normal file
|
@ -0,0 +1,80 @@
|
|||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package namedmemcache
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
)
|
||||
|
||||
func TestNamedCache(t *testing.T) {
|
||||
t.Parallel()
|
||||
c := qt.New(t)
|
||||
|
||||
cache := New()
|
||||
|
||||
counter := 0
|
||||
create := func() (interface{}, error) {
|
||||
counter++
|
||||
return counter, nil
|
||||
}
|
||||
|
||||
for i := 0; i < 5; i++ {
|
||||
v1, err := cache.GetOrCreate("a1", create)
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(v1, qt.Equals, 1)
|
||||
v2, err := cache.GetOrCreate("a2", create)
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(v2, qt.Equals, 2)
|
||||
}
|
||||
|
||||
cache.Clear()
|
||||
|
||||
v3, err := cache.GetOrCreate("a2", create)
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(v3, qt.Equals, 3)
|
||||
}
|
||||
|
||||
func TestNamedCacheConcurrent(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
c := qt.New(t)
|
||||
|
||||
var wg sync.WaitGroup
|
||||
|
||||
cache := New()
|
||||
|
||||
create := func(i int) func() (interface{}, error) {
|
||||
return func() (interface{}, error) {
|
||||
return i, nil
|
||||
}
|
||||
}
|
||||
|
||||
for i := 0; i < 10; i++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
for j := 0; j < 100; j++ {
|
||||
id := fmt.Sprintf("id%d", j)
|
||||
v, err := cache.GetOrCreate(id, create(j))
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(v, qt.Equals, j)
|
||||
}
|
||||
}()
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
99
cache/partitioned_lazy_cache.go
vendored
Normal file
99
cache/partitioned_lazy_cache.go
vendored
Normal file
|
@ -0,0 +1,99 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package cache
|
||||
|
||||
import (
|
||||
"sync"
|
||||
)
|
||||
|
||||
// Partition represents a cache partition where Load is the callback
|
||||
// for when the partition is needed.
|
||||
type Partition struct {
|
||||
Key string
|
||||
Load func() (map[string]interface{}, error)
|
||||
}
|
||||
|
||||
// Lazy represents a lazily loaded cache.
|
||||
type Lazy struct {
|
||||
initSync sync.Once
|
||||
initErr error
|
||||
cache map[string]interface{}
|
||||
load func() (map[string]interface{}, error)
|
||||
}
|
||||
|
||||
// NewLazy creates a lazy cache with the given load func.
|
||||
func NewLazy(load func() (map[string]interface{}, error)) *Lazy {
|
||||
return &Lazy{load: load}
|
||||
}
|
||||
|
||||
func (l *Lazy) init() error {
|
||||
l.initSync.Do(func() {
|
||||
c, err := l.load()
|
||||
l.cache = c
|
||||
l.initErr = err
|
||||
|
||||
})
|
||||
|
||||
return l.initErr
|
||||
}
|
||||
|
||||
// Get initializes the cache if not already initialized, then looks up the
|
||||
// given key.
|
||||
func (l *Lazy) Get(key string) (interface{}, bool, error) {
|
||||
l.init()
|
||||
if l.initErr != nil {
|
||||
return nil, false, l.initErr
|
||||
}
|
||||
v, found := l.cache[key]
|
||||
return v, found, nil
|
||||
}
|
||||
|
||||
// PartitionedLazyCache is a lazily loaded cache paritioned by a supplied string key.
|
||||
type PartitionedLazyCache struct {
|
||||
partitions map[string]*Lazy
|
||||
}
|
||||
|
||||
// NewPartitionedLazyCache creates a new NewPartitionedLazyCache with the supplied
|
||||
// partitions.
|
||||
func NewPartitionedLazyCache(partitions ...Partition) *PartitionedLazyCache {
|
||||
lazyPartitions := make(map[string]*Lazy, len(partitions))
|
||||
for _, partition := range partitions {
|
||||
lazyPartitions[partition.Key] = NewLazy(partition.Load)
|
||||
}
|
||||
cache := &PartitionedLazyCache{partitions: lazyPartitions}
|
||||
|
||||
return cache
|
||||
}
|
||||
|
||||
// Get initializes the partition if not already done so, then looks up the given
|
||||
// key in the given partition, returns nil if no value found.
|
||||
func (c *PartitionedLazyCache) Get(partition, key string) (interface{}, error) {
|
||||
p, found := c.partitions[partition]
|
||||
|
||||
if !found {
|
||||
return nil, nil
|
||||
}
|
||||
|
||||
v, found, err := p.Get(key)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if found {
|
||||
return v, nil
|
||||
}
|
||||
|
||||
return nil, nil
|
||||
|
||||
}
|
138
cache/partitioned_lazy_cache_test.go
vendored
Normal file
138
cache/partitioned_lazy_cache_test.go
vendored
Normal file
|
@ -0,0 +1,138 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package cache
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
)
|
||||
|
||||
func TestNewPartitionedLazyCache(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
c := qt.New(t)
|
||||
|
||||
p1 := Partition{
|
||||
Key: "p1",
|
||||
Load: func() (map[string]interface{}, error) {
|
||||
return map[string]interface{}{
|
||||
"p1_1": "p1v1",
|
||||
"p1_2": "p1v2",
|
||||
"p1_nil": nil,
|
||||
}, nil
|
||||
},
|
||||
}
|
||||
|
||||
p2 := Partition{
|
||||
Key: "p2",
|
||||
Load: func() (map[string]interface{}, error) {
|
||||
return map[string]interface{}{
|
||||
"p2_1": "p2v1",
|
||||
"p2_2": "p2v2",
|
||||
"p2_3": "p2v3",
|
||||
}, nil
|
||||
},
|
||||
}
|
||||
|
||||
cache := NewPartitionedLazyCache(p1, p2)
|
||||
|
||||
v, err := cache.Get("p1", "p1_1")
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(v, qt.Equals, "p1v1")
|
||||
|
||||
v, err = cache.Get("p1", "p2_1")
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(v, qt.IsNil)
|
||||
|
||||
v, err = cache.Get("p1", "p1_nil")
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(v, qt.IsNil)
|
||||
|
||||
v, err = cache.Get("p2", "p2_3")
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(v, qt.Equals, "p2v3")
|
||||
|
||||
v, err = cache.Get("doesnotexist", "p1_1")
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(v, qt.IsNil)
|
||||
|
||||
v, err = cache.Get("p1", "doesnotexist")
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(v, qt.IsNil)
|
||||
|
||||
errorP := Partition{
|
||||
Key: "p3",
|
||||
Load: func() (map[string]interface{}, error) {
|
||||
return nil, errors.New("Failed")
|
||||
},
|
||||
}
|
||||
|
||||
cache = NewPartitionedLazyCache(errorP)
|
||||
|
||||
v, err = cache.Get("p1", "doesnotexist")
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(v, qt.IsNil)
|
||||
|
||||
_, err = cache.Get("p3", "doesnotexist")
|
||||
c.Assert(err, qt.Not(qt.IsNil))
|
||||
|
||||
}
|
||||
|
||||
func TestConcurrentPartitionedLazyCache(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
c := qt.New(t)
|
||||
|
||||
var wg sync.WaitGroup
|
||||
|
||||
p1 := Partition{
|
||||
Key: "p1",
|
||||
Load: func() (map[string]interface{}, error) {
|
||||
return map[string]interface{}{
|
||||
"p1_1": "p1v1",
|
||||
"p1_2": "p1v2",
|
||||
"p1_nil": nil,
|
||||
}, nil
|
||||
},
|
||||
}
|
||||
|
||||
p2 := Partition{
|
||||
Key: "p2",
|
||||
Load: func() (map[string]interface{}, error) {
|
||||
return map[string]interface{}{
|
||||
"p2_1": "p2v1",
|
||||
"p2_2": "p2v2",
|
||||
"p2_3": "p2v3",
|
||||
}, nil
|
||||
},
|
||||
}
|
||||
|
||||
cache := NewPartitionedLazyCache(p1, p2)
|
||||
|
||||
for i := 0; i < 100; i++ {
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
for j := 0; j < 10; j++ {
|
||||
v, err := cache.Get("p1", "p1_1")
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(v, qt.Equals, "p1v1")
|
||||
}
|
||||
}()
|
||||
}
|
||||
wg.Wait()
|
||||
}
|
|
@ -1,2 +0,0 @@
|
|||
#!/usr/bin/env bash
|
||||
diff <(gofmt -d .) <(printf '')
|
|
@ -26,7 +26,6 @@ import (
|
|||
"path/filepath"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"slices"
|
||||
"sort"
|
||||
"strings"
|
||||
"sync"
|
||||
|
@ -59,7 +58,7 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
|
|||
|
||||
var methods Methods
|
||||
|
||||
excludes := make(map[string]bool)
|
||||
var excludes = make(map[string]bool)
|
||||
|
||||
if len(exclude) > 0 {
|
||||
for _, m := range c.MethodsFromTypes(exclude, nil) {
|
||||
|
@ -100,10 +99,12 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
|
|||
name = pkgPrefix + name
|
||||
|
||||
return name, pkg
|
||||
|
||||
}
|
||||
|
||||
for _, t := range include {
|
||||
for i := range t.NumMethod() {
|
||||
|
||||
for i := 0; i < t.NumMethod(); i++ {
|
||||
|
||||
m := t.Method(i)
|
||||
if excludes[m.Name] || seen[m.Name] {
|
||||
|
@ -123,7 +124,7 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
|
|||
|
||||
method := Method{Owner: t, OwnerName: ownerName, Name: m.Name}
|
||||
|
||||
for i := range numIn {
|
||||
for i := 0; i < numIn; i++ {
|
||||
in := m.Type.In(i)
|
||||
|
||||
name, pkg := nameAndPackage(in)
|
||||
|
@ -138,7 +139,7 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
|
|||
numOut := m.Type.NumOut()
|
||||
|
||||
if numOut > 0 {
|
||||
for i := range numOut {
|
||||
for i := 0; i < numOut; i++ {
|
||||
out := m.Type.Out(i)
|
||||
name, pkg := nameAndPackage(out)
|
||||
|
||||
|
@ -152,6 +153,7 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
|
|||
|
||||
methods = append(methods, method)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
sort.SliceStable(methods, func(i, j int) bool {
|
||||
|
@ -165,13 +167,16 @@ func (c *Inspector) MethodsFromTypes(include []reflect.Type, exclude []reflect.T
|
|||
}
|
||||
|
||||
return wi < wj
|
||||
|
||||
})
|
||||
|
||||
return methods
|
||||
|
||||
}
|
||||
|
||||
func (c *Inspector) parseSource() {
|
||||
c.init.Do(func() {
|
||||
|
||||
if !strings.Contains(c.ProjectRootDir, "hugo") {
|
||||
panic("dir must be set to the Hugo root")
|
||||
}
|
||||
|
@ -195,6 +200,7 @@ func (c *Inspector) parseSource() {
|
|||
filenames = append(filenames, path)
|
||||
|
||||
return nil
|
||||
|
||||
})
|
||||
|
||||
for _, filename := range filenames {
|
||||
|
@ -224,6 +230,7 @@ func (c *Inspector) parseSource() {
|
|||
c.methodWeight[iface] = weights
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return true
|
||||
})
|
||||
|
@ -240,6 +247,7 @@ func (c *Inspector) parseSource() {
|
|||
}
|
||||
}
|
||||
}
|
||||
|
||||
})
|
||||
}
|
||||
|
||||
|
@ -305,7 +313,7 @@ func (m Method) inOutStr() string {
|
|||
}
|
||||
|
||||
args := make([]string, len(m.In))
|
||||
for i := range args {
|
||||
for i := 0; i < len(args); i++ {
|
||||
args[i] = fmt.Sprintf("arg%d", i)
|
||||
}
|
||||
return "(" + strings.Join(args, ", ") + ")"
|
||||
|
@ -317,7 +325,7 @@ func (m Method) inStr() string {
|
|||
}
|
||||
|
||||
args := make([]string, len(m.In))
|
||||
for i := range args {
|
||||
for i := 0; i < len(args); i++ {
|
||||
args[i] = fmt.Sprintf("arg%d %s", i, m.In[i])
|
||||
}
|
||||
return "(" + strings.Join(args, ", ") + ")"
|
||||
|
@ -340,7 +348,7 @@ func (m Method) outStrNamed() string {
|
|||
}
|
||||
|
||||
outs := make([]string, len(m.Out))
|
||||
for i := range outs {
|
||||
for i := 0; i < len(outs); i++ {
|
||||
outs[i] = fmt.Sprintf("o%d %s", i, m.Out[i])
|
||||
}
|
||||
|
||||
|
@ -366,7 +374,7 @@ func (m Methods) Imports() []string {
|
|||
}
|
||||
|
||||
// ToMarshalJSON creates a MarshalJSON method for these methods. Any method name
|
||||
// matching any of the regexps in excludes will be ignored.
|
||||
// matchin any of the regexps in excludes will be ignored.
|
||||
func (m Methods) ToMarshalJSON(receiver, pkgPath string, excludes ...string) (string, []string) {
|
||||
var sb strings.Builder
|
||||
|
||||
|
@ -377,7 +385,7 @@ func (m Methods) ToMarshalJSON(receiver, pkgPath string, excludes ...string) (st
|
|||
fmt.Fprintf(&sb, "func Marshal%sToJSON(%s %s) ([]byte, error) {\n", what, r, receiver)
|
||||
|
||||
var methods Methods
|
||||
excludeRes := make([]*regexp.Regexp, len(excludes))
|
||||
var excludeRes = make([]*regexp.Regexp, len(excludes))
|
||||
|
||||
for i, exclude := range excludes {
|
||||
excludeRes[i] = regexp.MustCompile(exclude)
|
||||
|
@ -436,12 +444,13 @@ func (m Methods) ToMarshalJSON(receiver, pkgPath string, excludes ...string) (st
|
|||
// Exclude self
|
||||
for i, pkgImp := range pkgImports {
|
||||
if pkgImp == pkgPath {
|
||||
pkgImports = slices.Delete(pkgImports, i, i+1)
|
||||
pkgImports = append(pkgImports[:i], pkgImports[i+1:]...)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return sb.String(), pkgImports
|
||||
|
||||
}
|
||||
|
||||
func collectMethodsRecursive(pkg string, f []*ast.Field) []string {
|
||||
|
@ -453,15 +462,12 @@ func collectMethodsRecursive(pkg string, f []*ast.Field) []string {
|
|||
}
|
||||
|
||||
if ident, ok := m.Type.(*ast.Ident); ok && ident.Obj != nil {
|
||||
switch tt := ident.Obj.Decl.(*ast.TypeSpec).Type.(type) {
|
||||
case *ast.InterfaceType:
|
||||
// Embedded interface
|
||||
methodNames = append(
|
||||
methodNames,
|
||||
collectMethodsRecursive(
|
||||
pkg,
|
||||
tt.Methods.List)...)
|
||||
}
|
||||
ident.Obj.Decl.(*ast.TypeSpec).Type.(*ast.InterfaceType).Methods.List)...)
|
||||
} else {
|
||||
// Embedded, but in a different file/package. Return the
|
||||
// package.Name and deal with that later.
|
||||
|
@ -475,6 +481,7 @@ func collectMethodsRecursive(pkg string, f []*ast.Field) []string {
|
|||
}
|
||||
|
||||
return methodNames
|
||||
|
||||
}
|
||||
|
||||
func firstToLower(name string) string {
|
||||
|
@ -509,7 +516,7 @@ func typeName(name, pkg string) string {
|
|||
|
||||
func uniqueNonEmptyStrings(s []string) []string {
|
||||
var unique []string
|
||||
set := map[string]any{}
|
||||
set := map[string]interface{}{}
|
||||
for _, val := range s {
|
||||
if val == "" {
|
||||
continue
|
||||
|
@ -537,4 +544,5 @@ func varName(name string) string {
|
|||
}
|
||||
|
||||
return name
|
||||
|
||||
}
|
||||
|
|
|
@ -25,6 +25,7 @@ import (
|
|||
)
|
||||
|
||||
func TestMethods(t *testing.T) {
|
||||
|
||||
var (
|
||||
zeroIE = reflect.TypeOf((*IEmbed)(nil)).Elem()
|
||||
zeroIEOnly = reflect.TypeOf((*IEOnly)(nil)).Elem()
|
||||
|
@ -57,6 +58,7 @@ func TestMethods(t *testing.T) {
|
|||
methodsStr := fmt.Sprint(methods)
|
||||
|
||||
c.Assert(methodsStr, qt.Contains, "MethodEmbed3(arg0 string) string")
|
||||
|
||||
})
|
||||
|
||||
t.Run("ToMarshalJSON", func(t *testing.T) {
|
||||
|
@ -74,7 +76,9 @@ func TestMethods(t *testing.T) {
|
|||
c.Assert(pkg, qt.Contains, "encoding/json")
|
||||
|
||||
fmt.Println(pkg)
|
||||
|
||||
})
|
||||
|
||||
}
|
||||
|
||||
type I interface {
|
||||
|
@ -85,7 +89,7 @@ type I interface {
|
|||
Method3(myint int, mystring string)
|
||||
Method5() (string, error)
|
||||
Method6() *net.IP
|
||||
Method7() any
|
||||
Method7() interface{}
|
||||
Method8() herrors.ErrorContext
|
||||
method2()
|
||||
method9() os.FileInfo
|
||||
|
|
34
commands/check.go
Normal file
34
commands/check.go
Normal file
|
@ -0,0 +1,34 @@
|
|||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// +build !darwin
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var _ cmder = (*checkCmd)(nil)
|
||||
|
||||
type checkCmd struct {
|
||||
*baseCmd
|
||||
}
|
||||
|
||||
func newCheckCmd() *checkCmd {
|
||||
return &checkCmd{baseCmd: &baseCmd{cmd: &cobra.Command{
|
||||
Use: "check",
|
||||
Short: "Contains some verification checks",
|
||||
},
|
||||
}}
|
||||
}
|
36
commands/check_darwin.go
Normal file
36
commands/check_darwin.go
Normal file
|
@ -0,0 +1,36 @@
|
|||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var _ cmder = (*checkCmd)(nil)
|
||||
|
||||
type checkCmd struct {
|
||||
*baseCmd
|
||||
}
|
||||
|
||||
func newCheckCmd() *checkCmd {
|
||||
cc := &checkCmd{baseCmd: &baseCmd{cmd: &cobra.Command{
|
||||
Use: "check",
|
||||
Short: "Contains some verification checks",
|
||||
},
|
||||
}}
|
||||
|
||||
cc.cmd.AddCommand(newLimitCmd().getCommand())
|
||||
|
||||
return cc
|
||||
}
|
File diff suppressed because it is too large
Load diff
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -14,60 +14,315 @@
|
|||
package commands
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"os"
|
||||
"time"
|
||||
|
||||
"github.com/bep/simplecobra"
|
||||
"github.com/gohugoio/hugo/hugolib/paths"
|
||||
|
||||
"github.com/gohugoio/hugo/common/hugo"
|
||||
"github.com/gohugoio/hugo/common/loggers"
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
// newExec wires up all of Hugo's CLI.
|
||||
func newExec() (*simplecobra.Exec, error) {
|
||||
rootCmd := &rootCommand{
|
||||
commands: []simplecobra.Commander{
|
||||
newHugoBuildCmd(),
|
||||
type commandsBuilder struct {
|
||||
hugoBuilderCommon
|
||||
|
||||
commands []cmder
|
||||
}
|
||||
|
||||
func newCommandsBuilder() *commandsBuilder {
|
||||
return &commandsBuilder{}
|
||||
}
|
||||
|
||||
func (b *commandsBuilder) addCommands(commands ...cmder) *commandsBuilder {
|
||||
b.commands = append(b.commands, commands...)
|
||||
return b
|
||||
}
|
||||
|
||||
func (b *commandsBuilder) addAll() *commandsBuilder {
|
||||
b.addCommands(
|
||||
b.newServerCmd(),
|
||||
newVersionCmd(),
|
||||
newEnvCommand(),
|
||||
newServerCommand(),
|
||||
newDeployCommand(),
|
||||
newConfigCommand(),
|
||||
newNewCommand(),
|
||||
newConvertCommand(),
|
||||
newImportCommand(),
|
||||
newListCommand(),
|
||||
newModCommands(),
|
||||
newGenCommand(),
|
||||
newReleaseCommand(),
|
||||
},
|
||||
newEnvCmd(),
|
||||
newConfigCmd(),
|
||||
newCheckCmd(),
|
||||
newDeployCmd(),
|
||||
newConvertCmd(),
|
||||
b.newNewCmd(),
|
||||
newListCmd(),
|
||||
newImportCmd(),
|
||||
newGenCmd(),
|
||||
createReleaser(),
|
||||
b.newModCmd(),
|
||||
)
|
||||
|
||||
return b
|
||||
}
|
||||
|
||||
return simplecobra.New(rootCmd)
|
||||
func (b *commandsBuilder) build() *hugoCmd {
|
||||
h := b.newHugoCmd()
|
||||
addCommands(h.getCommand(), b.commands...)
|
||||
return h
|
||||
}
|
||||
|
||||
func newHugoBuildCmd() simplecobra.Commander {
|
||||
return &hugoBuildCommand{}
|
||||
func addCommands(root *cobra.Command, commands ...cmder) {
|
||||
for _, command := range commands {
|
||||
cmd := command.getCommand()
|
||||
if cmd == nil {
|
||||
continue
|
||||
}
|
||||
root.AddCommand(cmd)
|
||||
}
|
||||
}
|
||||
|
||||
// hugoBuildCommand just delegates to the rootCommand.
|
||||
type hugoBuildCommand struct {
|
||||
rootCmd *rootCommand
|
||||
type baseCmd struct {
|
||||
cmd *cobra.Command
|
||||
}
|
||||
|
||||
func (c *hugoBuildCommand) Commands() []simplecobra.Commander {
|
||||
var _ commandsBuilderGetter = (*baseBuilderCmd)(nil)
|
||||
|
||||
// Used in tests.
|
||||
type commandsBuilderGetter interface {
|
||||
getCommandsBuilder() *commandsBuilder
|
||||
}
|
||||
type baseBuilderCmd struct {
|
||||
*baseCmd
|
||||
*commandsBuilder
|
||||
}
|
||||
|
||||
func (b *baseBuilderCmd) getCommandsBuilder() *commandsBuilder {
|
||||
return b.commandsBuilder
|
||||
}
|
||||
|
||||
func (c *baseCmd) getCommand() *cobra.Command {
|
||||
return c.cmd
|
||||
}
|
||||
|
||||
func newBaseCmd(cmd *cobra.Command) *baseCmd {
|
||||
return &baseCmd{cmd: cmd}
|
||||
}
|
||||
|
||||
func (b *commandsBuilder) newBuilderCmd(cmd *cobra.Command) *baseBuilderCmd {
|
||||
bcmd := &baseBuilderCmd{commandsBuilder: b, baseCmd: &baseCmd{cmd: cmd}}
|
||||
bcmd.hugoBuilderCommon.handleFlags(cmd)
|
||||
return bcmd
|
||||
}
|
||||
|
||||
func (c *baseCmd) flagsToConfig(cfg config.Provider) {
|
||||
initializeFlags(c.cmd, cfg)
|
||||
}
|
||||
|
||||
type hugoCmd struct {
|
||||
*baseBuilderCmd
|
||||
|
||||
// Need to get the sites once built.
|
||||
c *commandeer
|
||||
}
|
||||
|
||||
var _ cmder = (*nilCommand)(nil)
|
||||
|
||||
type nilCommand struct {
|
||||
}
|
||||
|
||||
func (c *nilCommand) getCommand() *cobra.Command {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *hugoBuildCommand) Name() string {
|
||||
return "build"
|
||||
func (c *nilCommand) flagsToConfig(cfg config.Provider) {
|
||||
|
||||
}
|
||||
|
||||
func (c *hugoBuildCommand) Init(cd *simplecobra.Commandeer) error {
|
||||
c.rootCmd = cd.Root.Command.(*rootCommand)
|
||||
return c.rootCmd.initRootCommand("build", cd)
|
||||
func (b *commandsBuilder) newHugoCmd() *hugoCmd {
|
||||
cc := &hugoCmd{}
|
||||
|
||||
cc.baseBuilderCmd = b.newBuilderCmd(&cobra.Command{
|
||||
Use: "hugo",
|
||||
Short: "hugo builds your site",
|
||||
Long: `hugo is the main command, used to build your Hugo site.
|
||||
|
||||
Hugo is a Fast and Flexible Static Site Generator
|
||||
built with love by spf13 and friends in Go.
|
||||
|
||||
Complete documentation is available at http://gohugo.io/.`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
defer cc.timeTrack(time.Now(), "Total")
|
||||
cfgInit := func(c *commandeer) error {
|
||||
if cc.buildWatch {
|
||||
c.Set("disableLiveReload", true)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *hugoBuildCommand) PreRun(cd, runner *simplecobra.Commandeer) error {
|
||||
return c.rootCmd.PreRun(cd, runner)
|
||||
c, err := initializeConfig(true, cc.buildWatch, &cc.hugoBuilderCommon, cc, cfgInit)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cc.c = c
|
||||
|
||||
return c.build()
|
||||
},
|
||||
})
|
||||
|
||||
cc.cmd.PersistentFlags().StringVar(&cc.cfgFile, "config", "", "config file (default is path/config.yaml|json|toml)")
|
||||
cc.cmd.PersistentFlags().StringVar(&cc.cfgDir, "configDir", "config", "config dir")
|
||||
cc.cmd.PersistentFlags().BoolVar(&cc.quiet, "quiet", false, "build in quiet mode")
|
||||
|
||||
// Set bash-completion
|
||||
_ = cc.cmd.PersistentFlags().SetAnnotation("config", cobra.BashCompFilenameExt, config.ValidConfigFileExtensions)
|
||||
|
||||
cc.cmd.PersistentFlags().BoolVarP(&cc.verbose, "verbose", "v", false, "verbose output")
|
||||
cc.cmd.PersistentFlags().BoolVarP(&cc.debug, "debug", "", false, "debug output")
|
||||
cc.cmd.PersistentFlags().BoolVar(&cc.logging, "log", false, "enable Logging")
|
||||
cc.cmd.PersistentFlags().StringVar(&cc.logFile, "logFile", "", "log File path (if set, logging enabled automatically)")
|
||||
cc.cmd.PersistentFlags().BoolVar(&cc.verboseLog, "verboseLog", false, "verbose logging")
|
||||
|
||||
cc.cmd.Flags().BoolVarP(&cc.buildWatch, "watch", "w", false, "watch filesystem for changes and recreate as needed")
|
||||
|
||||
cc.cmd.Flags().Bool("renderToMemory", false, "render to memory (only useful for benchmark testing)")
|
||||
|
||||
// Set bash-completion
|
||||
_ = cc.cmd.PersistentFlags().SetAnnotation("logFile", cobra.BashCompFilenameExt, []string{})
|
||||
|
||||
cc.cmd.SetGlobalNormalizationFunc(helpers.NormalizeHugoFlags)
|
||||
cc.cmd.SilenceUsage = true
|
||||
|
||||
return cc
|
||||
}
|
||||
|
||||
func (c *hugoBuildCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
|
||||
return c.rootCmd.Run(ctx, cd, args)
|
||||
type hugoBuilderCommon struct {
|
||||
source string
|
||||
baseURL string
|
||||
environment string
|
||||
|
||||
buildWatch bool
|
||||
|
||||
gc bool
|
||||
|
||||
// Profile flags (for debugging of performance problems)
|
||||
cpuprofile string
|
||||
memprofile string
|
||||
mutexprofile string
|
||||
traceprofile string
|
||||
|
||||
// TODO(bep) var vs string
|
||||
logging bool
|
||||
verbose bool
|
||||
verboseLog bool
|
||||
debug bool
|
||||
quiet bool
|
||||
|
||||
cfgFile string
|
||||
cfgDir string
|
||||
logFile string
|
||||
}
|
||||
|
||||
func (cc *hugoBuilderCommon) timeTrack(start time.Time, name string) {
|
||||
if cc.quiet {
|
||||
return
|
||||
}
|
||||
elapsed := time.Since(start)
|
||||
fmt.Printf("%s in %v ms\n", name, int(1000*elapsed.Seconds()))
|
||||
}
|
||||
|
||||
func (cc *hugoBuilderCommon) getConfigDir(baseDir string) string {
|
||||
if cc.cfgDir != "" {
|
||||
return paths.AbsPathify(baseDir, cc.cfgDir)
|
||||
}
|
||||
|
||||
if v, found := os.LookupEnv("HUGO_CONFIGDIR"); found {
|
||||
return paths.AbsPathify(baseDir, v)
|
||||
}
|
||||
|
||||
return paths.AbsPathify(baseDir, "config")
|
||||
}
|
||||
|
||||
func (cc *hugoBuilderCommon) getEnvironment(isServer bool) string {
|
||||
if cc.environment != "" {
|
||||
return cc.environment
|
||||
}
|
||||
|
||||
if v, found := os.LookupEnv("HUGO_ENVIRONMENT"); found {
|
||||
return v
|
||||
}
|
||||
|
||||
// Used by Netlify and Forestry
|
||||
if v, found := os.LookupEnv("HUGO_ENV"); found {
|
||||
return v
|
||||
}
|
||||
|
||||
if isServer {
|
||||
return hugo.EnvironmentDevelopment
|
||||
}
|
||||
|
||||
return hugo.EnvironmentProduction
|
||||
}
|
||||
|
||||
func (cc *hugoBuilderCommon) handleCommonBuilderFlags(cmd *cobra.Command) {
|
||||
cmd.PersistentFlags().StringVarP(&cc.source, "source", "s", "", "filesystem path to read files relative from")
|
||||
cmd.PersistentFlags().SetAnnotation("source", cobra.BashCompSubdirsInDir, []string{})
|
||||
cmd.PersistentFlags().StringVarP(&cc.environment, "environment", "e", "", "build environment")
|
||||
cmd.PersistentFlags().StringP("themesDir", "", "", "filesystem path to themes directory")
|
||||
cmd.PersistentFlags().BoolP("ignoreVendor", "", false, "ignores any _vendor directory")
|
||||
}
|
||||
|
||||
func (cc *hugoBuilderCommon) handleFlags(cmd *cobra.Command) {
|
||||
cc.handleCommonBuilderFlags(cmd)
|
||||
cmd.Flags().Bool("cleanDestinationDir", false, "remove files from destination not found in static directories")
|
||||
cmd.Flags().BoolP("buildDrafts", "D", false, "include content marked as draft")
|
||||
cmd.Flags().BoolP("buildFuture", "F", false, "include content with publishdate in the future")
|
||||
cmd.Flags().BoolP("buildExpired", "E", false, "include expired content")
|
||||
cmd.Flags().StringP("contentDir", "c", "", "filesystem path to content directory")
|
||||
cmd.Flags().StringP("layoutDir", "l", "", "filesystem path to layout directory")
|
||||
cmd.Flags().StringP("cacheDir", "", "", "filesystem path to cache directory. Defaults: $TMPDIR/hugo_cache/")
|
||||
cmd.Flags().BoolP("ignoreCache", "", false, "ignores the cache directory")
|
||||
cmd.Flags().StringP("destination", "d", "", "filesystem path to write files to")
|
||||
cmd.Flags().StringSliceP("theme", "t", []string{}, "themes to use (located in /themes/THEMENAME/)")
|
||||
cmd.Flags().StringVarP(&cc.baseURL, "baseURL", "b", "", "hostname (and path) to the root, e.g. http://spf13.com/")
|
||||
cmd.Flags().Bool("enableGitInfo", false, "add Git revision, date and author info to the pages")
|
||||
cmd.Flags().BoolVar(&cc.gc, "gc", false, "enable to run some cleanup tasks (remove unused cache files) after the build")
|
||||
|
||||
cmd.Flags().Bool("templateMetrics", false, "display metrics about template executions")
|
||||
cmd.Flags().Bool("templateMetricsHints", false, "calculate some improvement hints when combined with --templateMetrics")
|
||||
cmd.Flags().BoolP("forceSyncStatic", "", false, "copy all files when static is changed.")
|
||||
cmd.Flags().BoolP("noTimes", "", false, "don't sync modification time of files")
|
||||
cmd.Flags().BoolP("noChmod", "", false, "don't sync permission mode of files")
|
||||
cmd.Flags().BoolP("i18n-warnings", "", false, "print missing translations")
|
||||
cmd.Flags().BoolP("path-warnings", "", false, "print warnings on duplicate target paths etc.")
|
||||
cmd.Flags().StringVarP(&cc.cpuprofile, "profile-cpu", "", "", "write cpu profile to `file`")
|
||||
cmd.Flags().StringVarP(&cc.memprofile, "profile-mem", "", "", "write memory profile to `file`")
|
||||
cmd.Flags().StringVarP(&cc.mutexprofile, "profile-mutex", "", "", "write Mutex profile to `file`")
|
||||
cmd.Flags().StringVarP(&cc.traceprofile, "trace", "", "", "write trace to `file` (not useful in general)")
|
||||
|
||||
// Hide these for now.
|
||||
cmd.Flags().MarkHidden("profile-cpu")
|
||||
cmd.Flags().MarkHidden("profile-mem")
|
||||
cmd.Flags().MarkHidden("profile-mutex")
|
||||
|
||||
cmd.Flags().StringSlice("disableKinds", []string{}, "disable different kind of pages (home, RSS etc.)")
|
||||
|
||||
cmd.Flags().Bool("minify", false, "minify any supported output format (HTML, XML etc.)")
|
||||
|
||||
// Set bash-completion.
|
||||
// Each flag must first be defined before using the SetAnnotation() call.
|
||||
_ = cmd.Flags().SetAnnotation("source", cobra.BashCompSubdirsInDir, []string{})
|
||||
_ = cmd.Flags().SetAnnotation("cacheDir", cobra.BashCompSubdirsInDir, []string{})
|
||||
_ = cmd.Flags().SetAnnotation("destination", cobra.BashCompSubdirsInDir, []string{})
|
||||
_ = cmd.Flags().SetAnnotation("theme", cobra.BashCompSubdirsInDir, []string{"themes"})
|
||||
}
|
||||
|
||||
func checkErr(logger *loggers.Logger, err error, s ...string) {
|
||||
if err == nil {
|
||||
return
|
||||
}
|
||||
if len(s) == 0 {
|
||||
logger.CRITICAL.Println(err)
|
||||
return
|
||||
}
|
||||
for _, message := range s {
|
||||
logger.ERROR.Println(message)
|
||||
}
|
||||
logger.ERROR.Println(err)
|
||||
}
|
||||
|
|
286
commands/commands_test.go
Normal file
286
commands/commands_test.go
Normal file
|
@ -0,0 +1,286 @@
|
|||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/gohugoio/hugo/common/types"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/viper"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
)
|
||||
|
||||
func TestExecute(t *testing.T) {
|
||||
|
||||
c := qt.New(t)
|
||||
|
||||
dir, err := createSimpleTestSite(t, testSiteConfig{})
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
defer func() {
|
||||
os.RemoveAll(dir)
|
||||
}()
|
||||
|
||||
resp := Execute([]string{"-s=" + dir})
|
||||
c.Assert(resp.Err, qt.IsNil)
|
||||
result := resp.Result
|
||||
c.Assert(len(result.Sites) == 1, qt.Equals, true)
|
||||
c.Assert(len(result.Sites[0].RegularPages()) == 1, qt.Equals, true)
|
||||
}
|
||||
|
||||
func TestCommandsPersistentFlags(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
|
||||
noOpRunE := func(cmd *cobra.Command, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
tests := []struct {
|
||||
args []string
|
||||
check func(command []cmder)
|
||||
}{{[]string{"server",
|
||||
"--config=myconfig.toml",
|
||||
"--configDir=myconfigdir",
|
||||
"--contentDir=mycontent",
|
||||
"--disableKinds=page,home",
|
||||
"--environment=testing",
|
||||
"--configDir=myconfigdir",
|
||||
"--layoutDir=mylayouts",
|
||||
"--theme=mytheme",
|
||||
"--gc",
|
||||
"--themesDir=mythemes",
|
||||
"--cleanDestinationDir",
|
||||
"--navigateToChanged",
|
||||
"--disableLiveReload",
|
||||
"--noHTTPCache",
|
||||
"--i18n-warnings",
|
||||
"--destination=/tmp/mydestination",
|
||||
"-b=https://example.com/b/",
|
||||
"--port=1366",
|
||||
"--renderToDisk",
|
||||
"--source=mysource",
|
||||
"--path-warnings",
|
||||
}, func(commands []cmder) {
|
||||
var sc *serverCmd
|
||||
for _, command := range commands {
|
||||
if b, ok := command.(commandsBuilderGetter); ok {
|
||||
v := b.getCommandsBuilder().hugoBuilderCommon
|
||||
c.Assert(v.cfgFile, qt.Equals, "myconfig.toml")
|
||||
c.Assert(v.cfgDir, qt.Equals, "myconfigdir")
|
||||
c.Assert(v.source, qt.Equals, "mysource")
|
||||
c.Assert(v.baseURL, qt.Equals, "https://example.com/b/")
|
||||
}
|
||||
|
||||
if srvCmd, ok := command.(*serverCmd); ok {
|
||||
sc = srvCmd
|
||||
}
|
||||
}
|
||||
|
||||
c.Assert(sc, qt.Not(qt.IsNil))
|
||||
c.Assert(sc.navigateToChanged, qt.Equals, true)
|
||||
c.Assert(sc.disableLiveReload, qt.Equals, true)
|
||||
c.Assert(sc.noHTTPCache, qt.Equals, true)
|
||||
c.Assert(sc.renderToDisk, qt.Equals, true)
|
||||
c.Assert(sc.serverPort, qt.Equals, 1366)
|
||||
c.Assert(sc.environment, qt.Equals, "testing")
|
||||
|
||||
cfg := viper.New()
|
||||
sc.flagsToConfig(cfg)
|
||||
c.Assert(cfg.GetString("publishDir"), qt.Equals, "/tmp/mydestination")
|
||||
c.Assert(cfg.GetString("contentDir"), qt.Equals, "mycontent")
|
||||
c.Assert(cfg.GetString("layoutDir"), qt.Equals, "mylayouts")
|
||||
c.Assert(cfg.GetStringSlice("theme"), qt.DeepEquals, []string{"mytheme"})
|
||||
c.Assert(cfg.GetString("themesDir"), qt.Equals, "mythemes")
|
||||
c.Assert(cfg.GetString("baseURL"), qt.Equals, "https://example.com/b/")
|
||||
|
||||
c.Assert(cfg.Get("disableKinds"), qt.DeepEquals, []string{"page", "home"})
|
||||
|
||||
c.Assert(cfg.GetBool("gc"), qt.Equals, true)
|
||||
|
||||
// The flag is named path-warnings
|
||||
c.Assert(cfg.GetBool("logPathWarnings"), qt.Equals, true)
|
||||
|
||||
// The flag is named i18n-warnings
|
||||
c.Assert(cfg.GetBool("logI18nWarnings"), qt.Equals, true)
|
||||
|
||||
}}}
|
||||
|
||||
for _, test := range tests {
|
||||
b := newCommandsBuilder()
|
||||
root := b.addAll().build()
|
||||
|
||||
for _, c := range b.commands {
|
||||
if c.getCommand() == nil {
|
||||
continue
|
||||
}
|
||||
// We are only intereseted in the flag handling here.
|
||||
c.getCommand().RunE = noOpRunE
|
||||
}
|
||||
rootCmd := root.getCommand()
|
||||
rootCmd.SetArgs(test.args)
|
||||
c.Assert(rootCmd.Execute(), qt.IsNil)
|
||||
test.check(b.commands)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
func TestCommandsExecute(t *testing.T) {
|
||||
|
||||
c := qt.New(t)
|
||||
|
||||
dir, err := createSimpleTestSite(t, testSiteConfig{})
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
dirOut, err := ioutil.TempDir("", "hugo-cli-out")
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
defer func() {
|
||||
os.RemoveAll(dir)
|
||||
os.RemoveAll(dirOut)
|
||||
}()
|
||||
|
||||
sourceFlag := fmt.Sprintf("-s=%s", dir)
|
||||
|
||||
tests := []struct {
|
||||
commands []string
|
||||
flags []string
|
||||
expectErrToContain string
|
||||
}{
|
||||
// TODO(bep) permission issue on my OSX? "operation not permitted" {[]string{"check", "ulimit"}, nil, false},
|
||||
{[]string{"env"}, nil, ""},
|
||||
{[]string{"version"}, nil, ""},
|
||||
// no args = hugo build
|
||||
{nil, []string{sourceFlag}, ""},
|
||||
{nil, []string{sourceFlag, "--renderToMemory"}, ""},
|
||||
{[]string{"config"}, []string{sourceFlag}, ""},
|
||||
{[]string{"convert", "toTOML"}, []string{sourceFlag, "-o=" + filepath.Join(dirOut, "toml")}, ""},
|
||||
{[]string{"convert", "toYAML"}, []string{sourceFlag, "-o=" + filepath.Join(dirOut, "yaml")}, ""},
|
||||
{[]string{"convert", "toJSON"}, []string{sourceFlag, "-o=" + filepath.Join(dirOut, "json")}, ""},
|
||||
{[]string{"gen", "autocomplete"}, []string{"--completionfile=" + filepath.Join(dirOut, "autocomplete.txt")}, ""},
|
||||
{[]string{"gen", "chromastyles"}, []string{"--style=manni"}, ""},
|
||||
{[]string{"gen", "doc"}, []string{"--dir=" + filepath.Join(dirOut, "doc")}, ""},
|
||||
{[]string{"gen", "man"}, []string{"--dir=" + filepath.Join(dirOut, "man")}, ""},
|
||||
{[]string{"list", "drafts"}, []string{sourceFlag}, ""},
|
||||
{[]string{"list", "expired"}, []string{sourceFlag}, ""},
|
||||
{[]string{"list", "future"}, []string{sourceFlag}, ""},
|
||||
{[]string{"new", "new-page.md"}, []string{sourceFlag}, ""},
|
||||
{[]string{"new", "site", filepath.Join(dirOut, "new-site")}, nil, ""},
|
||||
{[]string{"unknowncommand"}, nil, "unknown command"},
|
||||
// TODO(bep) cli refactor fix https://github.com/gohugoio/hugo/issues/4450
|
||||
//{[]string{"new", "theme", filepath.Join(dirOut, "new-theme")}, nil,false},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
b := newCommandsBuilder().addAll().build()
|
||||
hugoCmd := b.getCommand()
|
||||
test.flags = append(test.flags, "--quiet")
|
||||
hugoCmd.SetArgs(append(test.commands, test.flags...))
|
||||
|
||||
// TODO(bep) capture output and add some simple asserts
|
||||
// TODO(bep) misspelled subcommands does not return an error. We should investigate this
|
||||
// but before that, check for "Error: unknown command".
|
||||
|
||||
_, err := hugoCmd.ExecuteC()
|
||||
if test.expectErrToContain != "" {
|
||||
c.Assert(err, qt.Not(qt.IsNil))
|
||||
c.Assert(err.Error(), qt.Contains, test.expectErrToContain)
|
||||
} else {
|
||||
c.Assert(err, qt.IsNil)
|
||||
}
|
||||
|
||||
// Assert that we have not left any development debug artifacts in
|
||||
// the code.
|
||||
if b.c != nil {
|
||||
_, ok := b.c.destinationFs.(types.DevMarker)
|
||||
c.Assert(ok, qt.Equals, false)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
type testSiteConfig struct {
|
||||
configTOML string
|
||||
contentDir string
|
||||
}
|
||||
|
||||
func createSimpleTestSite(t *testing.T, cfg testSiteConfig) (string, error) {
|
||||
d, e := ioutil.TempDir("", "hugo-cli")
|
||||
if e != nil {
|
||||
return "", e
|
||||
}
|
||||
|
||||
cfgStr := `
|
||||
|
||||
baseURL = "https://example.org"
|
||||
title = "Hugo Commands"
|
||||
|
||||
`
|
||||
|
||||
contentDir := "content"
|
||||
|
||||
if cfg.configTOML != "" {
|
||||
cfgStr = cfg.configTOML
|
||||
}
|
||||
if cfg.contentDir != "" {
|
||||
contentDir = cfg.contentDir
|
||||
}
|
||||
|
||||
// Just the basic. These are for CLI tests, not site testing.
|
||||
writeFile(t, filepath.Join(d, "config.toml"), cfgStr)
|
||||
|
||||
writeFile(t, filepath.Join(d, contentDir, "p1.md"), `
|
||||
---
|
||||
title: "P1"
|
||||
weight: 1
|
||||
---
|
||||
|
||||
Content
|
||||
|
||||
`)
|
||||
|
||||
writeFile(t, filepath.Join(d, "layouts", "_default", "single.html"), `
|
||||
|
||||
Single: {{ .Title }}
|
||||
|
||||
`)
|
||||
|
||||
writeFile(t, filepath.Join(d, "layouts", "_default", "list.html"), `
|
||||
|
||||
List: {{ .Title }}
|
||||
Environment: {{ hugo.Environment }}
|
||||
|
||||
`)
|
||||
|
||||
return d, nil
|
||||
|
||||
}
|
||||
|
||||
func writeFile(t *testing.T, filename, content string) {
|
||||
must(t, os.MkdirAll(filepath.Dir(filename), os.FileMode(0755)))
|
||||
must(t, ioutil.WriteFile(filename, []byte(content), os.FileMode(0755)))
|
||||
}
|
||||
|
||||
func must(t *testing.T, err error) {
|
||||
if err != nil {
|
||||
t.Fatal(err)
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -9,231 +9,139 @@
|
|||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
// limitations under the License.Print the version number of Hug
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"reflect"
|
||||
"regexp"
|
||||
"sort"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/bep/simplecobra"
|
||||
"github.com/gohugoio/hugo/common/maps"
|
||||
"github.com/gohugoio/hugo/config/allconfig"
|
||||
"github.com/gohugoio/hugo/modules"
|
||||
"github.com/gohugoio/hugo/parser"
|
||||
"github.com/gohugoio/hugo/parser/metadecoders"
|
||||
|
||||
"github.com/gohugoio/hugo/modules"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
jww "github.com/spf13/jwalterweatherman"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
// newConfigCommand creates a new config command and its subcommands.
|
||||
func newConfigCommand() *configCommand {
|
||||
return &configCommand{
|
||||
commands: []simplecobra.Commander{
|
||||
&configMountsCommand{},
|
||||
},
|
||||
}
|
||||
var _ cmder = (*configCmd)(nil)
|
||||
|
||||
type configCmd struct {
|
||||
hugoBuilderCommon
|
||||
*baseCmd
|
||||
}
|
||||
|
||||
type configCommand struct {
|
||||
r *rootCommand
|
||||
func newConfigCmd() *configCmd {
|
||||
cc := &configCmd{}
|
||||
cc.baseCmd = newBaseCmd(&cobra.Command{
|
||||
Use: "config",
|
||||
Short: "Print the site configuration",
|
||||
Long: `Print the site configuration, both default and custom settings.`,
|
||||
RunE: cc.printConfig,
|
||||
})
|
||||
|
||||
format string
|
||||
lang string
|
||||
printZero bool
|
||||
cc.cmd.PersistentFlags().StringVarP(&cc.source, "source", "s", "", "filesystem path to read files relative from")
|
||||
|
||||
commands []simplecobra.Commander
|
||||
printMountsCmd := &cobra.Command{
|
||||
Use: "mounts",
|
||||
Short: "Print the configured file mounts",
|
||||
RunE: cc.printMounts,
|
||||
}
|
||||
|
||||
func (c *configCommand) Commands() []simplecobra.Commander {
|
||||
return c.commands
|
||||
cc.cmd.AddCommand(printMountsCmd)
|
||||
|
||||
return cc
|
||||
}
|
||||
|
||||
func (c *configCommand) Name() string {
|
||||
return "config"
|
||||
}
|
||||
|
||||
func (c *configCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
|
||||
conf, err := c.r.ConfigFromProvider(configKey{counter: c.r.configVersionID.Load()}, flagsToCfg(cd, nil))
|
||||
func (c *configCmd) printMounts(cmd *cobra.Command, args []string) error {
|
||||
cfg, err := initializeConfig(true, false, &c.hugoBuilderCommon, c, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var config *allconfig.Config
|
||||
if c.lang != "" {
|
||||
var found bool
|
||||
config, found = conf.configs.LanguageConfigMap[c.lang]
|
||||
if !found {
|
||||
return fmt.Errorf("language %q not found", c.lang)
|
||||
|
||||
allModules := cfg.Cfg.Get("allmodules").(modules.Modules)
|
||||
|
||||
for _, m := range allModules {
|
||||
if err := parser.InterfaceToConfig(&modMounts{m: m}, metadecoders.JSON, os.Stdout); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *configCmd) printConfig(cmd *cobra.Command, args []string) error {
|
||||
cfg, err := initializeConfig(true, false, &c.hugoBuilderCommon, c, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
allSettings := cfg.Cfg.(*viper.Viper).AllSettings()
|
||||
|
||||
// We need to clean up this, but we store objects in the config that
|
||||
// isn't really interesting to the end user, so filter these.
|
||||
ignoreKeysRe := regexp.MustCompile("client|sorted|filecacheconfigs|allmodules|multilingual")
|
||||
|
||||
separator := ": "
|
||||
|
||||
if len(cfg.configFiles) > 0 && strings.HasSuffix(cfg.configFiles[0], ".toml") {
|
||||
separator = " = "
|
||||
}
|
||||
|
||||
var keys []string
|
||||
for k := range allSettings {
|
||||
if ignoreKeysRe.MatchString(k) {
|
||||
continue
|
||||
}
|
||||
keys = append(keys, k)
|
||||
}
|
||||
sort.Strings(keys)
|
||||
for _, k := range keys {
|
||||
kv := reflect.ValueOf(allSettings[k])
|
||||
if kv.Kind() == reflect.String {
|
||||
jww.FEEDBACK.Printf("%s%s\"%+v\"\n", k, separator, allSettings[k])
|
||||
} else {
|
||||
config = conf.configs.LanguageConfigSlice[0]
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
dec := json.NewEncoder(&buf)
|
||||
dec.SetIndent("", " ")
|
||||
dec.SetEscapeHTML(false)
|
||||
|
||||
if err := dec.Encode(parser.ReplacingJSONMarshaller{Value: config, KeysToLower: true, OmitEmpty: !c.printZero}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
format := strings.ToLower(c.format)
|
||||
|
||||
switch format {
|
||||
case "json":
|
||||
os.Stdout.Write(buf.Bytes())
|
||||
default:
|
||||
// Decode the JSON to a map[string]interface{} and then unmarshal it again to the correct format.
|
||||
var m map[string]any
|
||||
if err := json.Unmarshal(buf.Bytes(), &m); err != nil {
|
||||
return err
|
||||
}
|
||||
maps.ConvertFloat64WithNoDecimalsToInt(m)
|
||||
switch format {
|
||||
case "yaml":
|
||||
return parser.InterfaceToConfig(m, metadecoders.YAML, os.Stdout)
|
||||
case "toml":
|
||||
return parser.InterfaceToConfig(m, metadecoders.TOML, os.Stdout)
|
||||
default:
|
||||
return fmt.Errorf("unsupported format: %q", format)
|
||||
jww.FEEDBACK.Printf("%s%s%+v\n", k, separator, allSettings[k])
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *configCommand) Init(cd *simplecobra.Commandeer) error {
|
||||
c.r = cd.Root.Command.(*rootCommand)
|
||||
cmd := cd.CobraCommand
|
||||
cmd.Short = "Display site configuration"
|
||||
cmd.Long = `Display site configuration, both default and custom settings.`
|
||||
cmd.Flags().StringVar(&c.format, "format", "toml", "preferred file format (toml, yaml or json)")
|
||||
_ = cmd.RegisterFlagCompletionFunc("format", cobra.FixedCompletions([]string{"toml", "yaml", "json"}, cobra.ShellCompDirectiveNoFileComp))
|
||||
cmd.Flags().StringVar(&c.lang, "lang", "", "the language to display config for. Defaults to the first language defined.")
|
||||
cmd.Flags().BoolVar(&c.printZero, "printZero", false, `include config options with zero values (e.g. false, 0, "") in the output`)
|
||||
_ = cmd.RegisterFlagCompletionFunc("lang", cobra.NoFileCompletions)
|
||||
applyLocalFlagsBuildConfig(cmd, c.r)
|
||||
|
||||
return nil
|
||||
type modMounts struct {
|
||||
m modules.Module
|
||||
}
|
||||
|
||||
func (c *configCommand) PreRun(cd, runner *simplecobra.Commandeer) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
type configModMount struct {
|
||||
type modMount struct {
|
||||
Source string `json:"source"`
|
||||
Target string `json:"target"`
|
||||
Lang string `json:"lang,omitempty"`
|
||||
}
|
||||
|
||||
type configModMounts struct {
|
||||
verbose bool
|
||||
m modules.Module
|
||||
}
|
||||
|
||||
// MarshalJSON is for internal use only.
|
||||
func (m *configModMounts) MarshalJSON() ([]byte, error) {
|
||||
var mounts []configModMount
|
||||
func (m *modMounts) MarshalJSON() ([]byte, error) {
|
||||
var mounts []modMount
|
||||
|
||||
for _, mount := range m.m.Mounts() {
|
||||
mounts = append(mounts, configModMount{
|
||||
mounts = append(mounts, modMount{
|
||||
Source: mount.Source,
|
||||
Target: mount.Target,
|
||||
Lang: mount.Lang,
|
||||
})
|
||||
}
|
||||
|
||||
var ownerPath string
|
||||
if m.m.Owner() != nil {
|
||||
ownerPath = m.m.Owner().Path()
|
||||
}
|
||||
|
||||
if m.verbose {
|
||||
config := m.m.Config()
|
||||
return json.Marshal(&struct {
|
||||
Path string `json:"path"`
|
||||
Version string `json:"version"`
|
||||
Time time.Time `json:"time"`
|
||||
Owner string `json:"owner"`
|
||||
Dir string `json:"dir"`
|
||||
Meta map[string]any `json:"meta"`
|
||||
HugoVersion modules.HugoVersion `json:"hugoVersion"`
|
||||
|
||||
Mounts []configModMount `json:"mounts"`
|
||||
Mounts []modMount `json:"mounts"`
|
||||
}{
|
||||
Path: m.m.Path(),
|
||||
Version: m.m.Version(),
|
||||
Time: m.m.Time(),
|
||||
Owner: ownerPath,
|
||||
Dir: m.m.Dir(),
|
||||
Meta: config.Params,
|
||||
HugoVersion: config.HugoVersion,
|
||||
Mounts: mounts,
|
||||
})
|
||||
}
|
||||
|
||||
return json.Marshal(&struct {
|
||||
Path string `json:"path"`
|
||||
Version string `json:"version"`
|
||||
Time time.Time `json:"time"`
|
||||
Owner string `json:"owner"`
|
||||
Dir string `json:"dir"`
|
||||
Mounts []configModMount `json:"mounts"`
|
||||
}{
|
||||
Path: m.m.Path(),
|
||||
Version: m.m.Version(),
|
||||
Time: m.m.Time(),
|
||||
Owner: ownerPath,
|
||||
Dir: m.m.Dir(),
|
||||
Mounts: mounts,
|
||||
})
|
||||
}
|
||||
|
||||
type configMountsCommand struct {
|
||||
r *rootCommand
|
||||
configCmd *configCommand
|
||||
}
|
||||
|
||||
func (c *configMountsCommand) Commands() []simplecobra.Commander {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *configMountsCommand) Name() string {
|
||||
return "mounts"
|
||||
}
|
||||
|
||||
func (c *configMountsCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
|
||||
r := c.configCmd.r
|
||||
conf, err := r.ConfigFromProvider(configKey{counter: c.r.configVersionID.Load()}, flagsToCfg(cd, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, m := range conf.configs.Modules {
|
||||
if err := parser.InterfaceToConfig(&configModMounts{m: m, verbose: r.isVerbose()}, metadecoders.JSON, os.Stdout); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *configMountsCommand) Init(cd *simplecobra.Commandeer) error {
|
||||
c.r = cd.Root.Command.(*rootCommand)
|
||||
cmd := cd.CobraCommand
|
||||
cmd.Short = "Print the configured file mounts"
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
applyLocalFlagsBuildConfig(cmd, c.r)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *configMountsCommand) PreRun(cd, runner *simplecobra.Commandeer) error {
|
||||
c.configCmd = cd.Parent.Command.(*configCommand)
|
||||
return nil
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -15,149 +15,153 @@ package commands
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"io"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/bep/simplecobra"
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/resources/page"
|
||||
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/gohugoio/hugo/hugolib"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
|
||||
"github.com/gohugoio/hugo/parser"
|
||||
"github.com/gohugoio/hugo/parser/metadecoders"
|
||||
"github.com/gohugoio/hugo/parser/pageparser"
|
||||
"github.com/gohugoio/hugo/resources/page"
|
||||
|
||||
"github.com/pkg/errors"
|
||||
|
||||
"github.com/gohugoio/hugo/hugolib"
|
||||
|
||||
"path/filepath"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func newConvertCommand() *convertCommand {
|
||||
var c *convertCommand
|
||||
c = &convertCommand{
|
||||
commands: []simplecobra.Commander{
|
||||
&simpleCommand{
|
||||
name: "toJSON",
|
||||
short: "Convert front matter to JSON",
|
||||
long: `toJSON converts all front matter in the content directory
|
||||
to use JSON for the front matter.`,
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
return c.convertContents(metadecoders.JSON)
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
},
|
||||
},
|
||||
&simpleCommand{
|
||||
name: "toTOML",
|
||||
short: "Convert front matter to TOML",
|
||||
long: `toTOML converts all front matter in the content directory
|
||||
to use TOML for the front matter.`,
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
return c.convertContents(metadecoders.TOML)
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
},
|
||||
},
|
||||
&simpleCommand{
|
||||
name: "toYAML",
|
||||
short: "Convert front matter to YAML",
|
||||
long: `toYAML converts all front matter in the content directory
|
||||
to use YAML for the front matter.`,
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
return c.convertContents(metadecoders.YAML)
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
return c
|
||||
}
|
||||
var (
|
||||
_ cmder = (*convertCmd)(nil)
|
||||
)
|
||||
|
||||
type convertCmd struct {
|
||||
hugoBuilderCommon
|
||||
|
||||
type convertCommand struct {
|
||||
// Flags.
|
||||
outputDir string
|
||||
unsafe bool
|
||||
|
||||
// Deps.
|
||||
r *rootCommand
|
||||
h *hugolib.HugoSites
|
||||
|
||||
// Commands.
|
||||
commands []simplecobra.Commander
|
||||
*baseCmd
|
||||
}
|
||||
|
||||
func (c *convertCommand) Commands() []simplecobra.Commander {
|
||||
return c.commands
|
||||
func newConvertCmd() *convertCmd {
|
||||
cc := &convertCmd{}
|
||||
|
||||
cc.baseCmd = newBaseCmd(&cobra.Command{
|
||||
Use: "convert",
|
||||
Short: "Convert your content to different formats",
|
||||
Long: `Convert your content (e.g. front matter) to different formats.
|
||||
|
||||
See convert's subcommands toJSON, toTOML and toYAML for more information.`,
|
||||
RunE: nil,
|
||||
})
|
||||
|
||||
cc.cmd.AddCommand(
|
||||
&cobra.Command{
|
||||
Use: "toJSON",
|
||||
Short: "Convert front matter to JSON",
|
||||
Long: `toJSON converts all front matter in the content directory
|
||||
to use JSON for the front matter.`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
return cc.convertContents(metadecoders.JSON)
|
||||
},
|
||||
},
|
||||
&cobra.Command{
|
||||
Use: "toTOML",
|
||||
Short: "Convert front matter to TOML",
|
||||
Long: `toTOML converts all front matter in the content directory
|
||||
to use TOML for the front matter.`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
return cc.convertContents(metadecoders.TOML)
|
||||
},
|
||||
},
|
||||
&cobra.Command{
|
||||
Use: "toYAML",
|
||||
Short: "Convert front matter to YAML",
|
||||
Long: `toYAML converts all front matter in the content directory
|
||||
to use YAML for the front matter.`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
return cc.convertContents(metadecoders.YAML)
|
||||
},
|
||||
},
|
||||
)
|
||||
|
||||
cc.cmd.PersistentFlags().StringVarP(&cc.outputDir, "output", "o", "", "filesystem path to write files to")
|
||||
cc.cmd.PersistentFlags().StringVarP(&cc.source, "source", "s", "", "filesystem path to read files relative from")
|
||||
cc.cmd.PersistentFlags().BoolVar(&cc.unsafe, "unsafe", false, "enable less safe operations, please backup first")
|
||||
cc.cmd.PersistentFlags().SetAnnotation("source", cobra.BashCompSubdirsInDir, []string{})
|
||||
|
||||
return cc
|
||||
}
|
||||
|
||||
func (c *convertCommand) Name() string {
|
||||
return "convert"
|
||||
func (cc *convertCmd) convertContents(format metadecoders.Format) error {
|
||||
if cc.outputDir == "" && !cc.unsafe {
|
||||
return newUserError("Unsafe operation not allowed, use --unsafe or set a different output path")
|
||||
}
|
||||
|
||||
func (c *convertCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *convertCommand) Init(cd *simplecobra.Commandeer) error {
|
||||
cmd := cd.CobraCommand
|
||||
cmd.Short = "Convert front matter to another format"
|
||||
cmd.Long = `Convert front matter to another format.
|
||||
|
||||
See convert's subcommands toJSON, toTOML and toYAML for more information.`
|
||||
|
||||
cmd.PersistentFlags().StringVarP(&c.outputDir, "output", "o", "", "filesystem path to write files to")
|
||||
_ = cmd.MarkFlagDirname("output")
|
||||
cmd.PersistentFlags().BoolVar(&c.unsafe, "unsafe", false, "enable less safe operations, please backup first")
|
||||
|
||||
cmd.RunE = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *convertCommand) PreRun(cd, runner *simplecobra.Commandeer) error {
|
||||
c.r = cd.Root.Command.(*rootCommand)
|
||||
cfg := config.New()
|
||||
cfg.Set("buildDrafts", true)
|
||||
h, err := c.r.Hugo(flagsToCfg(cd, cfg))
|
||||
c, err := initializeConfig(true, false, &cc.hugoBuilderCommon, cc, nil)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
c.h = h
|
||||
|
||||
c.Cfg.Set("buildDrafts", true)
|
||||
|
||||
h, err := hugolib.NewHugoSites(*c.DepsCfg)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if err := h.Build(hugolib.BuildCfg{SkipRender: true}); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
site := h.Sites[0]
|
||||
|
||||
site.Log.FEEDBACK.Println("processing", len(site.AllPages()), "content files")
|
||||
for _, p := range site.AllPages() {
|
||||
if err := cc.convertAndSavePage(p, site, format); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *convertCommand) convertAndSavePage(p page.Page, site *hugolib.Site, targetFormat metadecoders.Format) error {
|
||||
func (cc *convertCmd) convertAndSavePage(p page.Page, site *hugolib.Site, targetFormat metadecoders.Format) error {
|
||||
// The resources are not in .Site.AllPages.
|
||||
for _, r := range p.Resources().ByType("page") {
|
||||
if err := c.convertAndSavePage(r.(page.Page), site, targetFormat); err != nil {
|
||||
if err := cc.convertAndSavePage(r.(page.Page), site, targetFormat); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
|
||||
if p.File() == nil {
|
||||
if p.File().IsZero() {
|
||||
// No content file.
|
||||
return nil
|
||||
}
|
||||
|
||||
errMsg := fmt.Errorf("error processing file %q", p.File().Path())
|
||||
errMsg := fmt.Errorf("Error processing file %q", p.Path())
|
||||
|
||||
site.Log.Infoln("attempting to convert", p.File().Filename())
|
||||
site.Log.INFO.Println("Attempting to convert", p.File().Filename())
|
||||
|
||||
f := p.File()
|
||||
file, err := f.FileInfo().Meta().Open()
|
||||
if err != nil {
|
||||
site.Log.Errorln(errMsg)
|
||||
site.Log.ERROR.Println(errMsg)
|
||||
file.Close()
|
||||
return nil
|
||||
}
|
||||
|
||||
pf, err := pageparser.ParseFrontMatterAndContent(file)
|
||||
pf, err := parseContentFile(file)
|
||||
if err != nil {
|
||||
site.Log.Errorln(errMsg)
|
||||
site.Log.ERROR.Println(errMsg)
|
||||
file.Close()
|
||||
return err
|
||||
}
|
||||
|
@ -165,65 +169,82 @@ func (c *convertCommand) convertAndSavePage(p page.Page, site *hugolib.Site, tar
|
|||
file.Close()
|
||||
|
||||
// better handling of dates in formats that don't have support for them
|
||||
if pf.FrontMatterFormat == metadecoders.JSON || pf.FrontMatterFormat == metadecoders.YAML || pf.FrontMatterFormat == metadecoders.TOML {
|
||||
for k, v := range pf.FrontMatter {
|
||||
if pf.frontMatterFormat == metadecoders.JSON || pf.frontMatterFormat == metadecoders.YAML || pf.frontMatterFormat == metadecoders.TOML {
|
||||
for k, v := range pf.frontMatter {
|
||||
switch vv := v.(type) {
|
||||
case time.Time:
|
||||
pf.FrontMatter[k] = vv.Format(time.RFC3339)
|
||||
pf.frontMatter[k] = vv.Format(time.RFC3339)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var newContent bytes.Buffer
|
||||
err = parser.InterfaceToFrontMatter(pf.FrontMatter, targetFormat, &newContent)
|
||||
err = parser.InterfaceToFrontMatter(pf.frontMatter, targetFormat, &newContent)
|
||||
if err != nil {
|
||||
site.Log.Errorln(errMsg)
|
||||
site.Log.ERROR.Println(errMsg)
|
||||
return err
|
||||
}
|
||||
|
||||
newContent.Write(pf.Content)
|
||||
newContent.Write(pf.content)
|
||||
|
||||
newFilename := p.File().Filename()
|
||||
|
||||
if c.outputDir != "" {
|
||||
contentDir := strings.TrimSuffix(newFilename, p.File().Path())
|
||||
if cc.outputDir != "" {
|
||||
contentDir := strings.TrimSuffix(newFilename, p.Path())
|
||||
contentDir = filepath.Base(contentDir)
|
||||
|
||||
newFilename = filepath.Join(c.outputDir, contentDir, p.File().Path())
|
||||
newFilename = filepath.Join(cc.outputDir, contentDir, p.Path())
|
||||
}
|
||||
|
||||
fs := hugofs.Os
|
||||
if err := helpers.WriteToDisk(newFilename, &newContent, fs); err != nil {
|
||||
return fmt.Errorf("failed to save file %q:: %w", newFilename, err)
|
||||
return errors.Wrapf(err, "Failed to save file %q:", newFilename)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *convertCommand) convertContents(format metadecoders.Format) error {
|
||||
if c.outputDir == "" && !c.unsafe {
|
||||
return newUserError("Unsafe operation not allowed, use --unsafe or set a different output path")
|
||||
type parsedFile struct {
|
||||
frontMatterFormat metadecoders.Format
|
||||
frontMatterSource []byte
|
||||
frontMatter map[string]interface{}
|
||||
|
||||
// Everything after Front Matter
|
||||
content []byte
|
||||
}
|
||||
|
||||
if err := c.h.Build(hugolib.BuildCfg{SkipRender: true}); err != nil {
|
||||
return err
|
||||
func parseContentFile(r io.Reader) (parsedFile, error) {
|
||||
var pf parsedFile
|
||||
|
||||
psr, err := pageparser.Parse(r, pageparser.Config{})
|
||||
if err != nil {
|
||||
return pf, err
|
||||
}
|
||||
|
||||
site := c.h.Sites[0]
|
||||
iter := psr.Iterator()
|
||||
|
||||
var pagesBackedByFile page.Pages
|
||||
for _, p := range site.AllPages() {
|
||||
if p.File() == nil {
|
||||
continue
|
||||
walkFn := func(item pageparser.Item) bool {
|
||||
if pf.frontMatterSource != nil {
|
||||
// The rest is content.
|
||||
pf.content = psr.Input()[item.Pos:]
|
||||
// Done
|
||||
return false
|
||||
} else if item.IsFrontMatter() {
|
||||
pf.frontMatterFormat = metadecoders.FormatFromFrontMatterType(item.Type)
|
||||
pf.frontMatterSource = item.Val
|
||||
}
|
||||
pagesBackedByFile = append(pagesBackedByFile, p)
|
||||
return true
|
||||
|
||||
}
|
||||
|
||||
site.Log.Println("processing", len(pagesBackedByFile), "content files")
|
||||
for _, p := range site.AllPages() {
|
||||
if err := c.convertAndSavePage(p, site, format); err != nil {
|
||||
return err
|
||||
iter.PeekWalk(walkFn)
|
||||
|
||||
metadata, err := metadecoders.Default.UnmarshalToMap(pf.frontMatterSource, pf.frontMatterFormat)
|
||||
if err != nil {
|
||||
return pf, err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
pf.frontMatter = metadata
|
||||
|
||||
return pf, nil
|
||||
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -11,41 +11,67 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
//go:build withdeploy
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"context"
|
||||
|
||||
"github.com/gohugoio/hugo/deploy"
|
||||
|
||||
"github.com/bep/simplecobra"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func newDeployCommand() simplecobra.Commander {
|
||||
return &simpleCommand{
|
||||
name: "deploy",
|
||||
short: "Deploy your site to a cloud provider",
|
||||
long: `Deploy your site to a cloud provider
|
||||
var _ cmder = (*deployCmd)(nil)
|
||||
|
||||
// deployCmd supports deploying sites to Cloud providers.
|
||||
type deployCmd struct {
|
||||
hugoBuilderCommon
|
||||
*baseCmd
|
||||
}
|
||||
|
||||
// TODO: In addition to the "deploy" command, consider adding a "--deploy"
|
||||
// flag for the default command; this would build the site and then deploy it.
|
||||
// It's not obvious how to do this; would all of the deploy-specific flags
|
||||
// have to exist at the top level as well?
|
||||
|
||||
// TODO: The output files change every time "hugo" is executed, it looks
|
||||
// like because of map order randomization. This means that you can
|
||||
// run "hugo && hugo deploy" again and again and upload new stuff every time. Is
|
||||
// this intended?
|
||||
|
||||
func newDeployCmd() *deployCmd {
|
||||
cc := &deployCmd{}
|
||||
|
||||
cc.baseCmd = newBaseCmd(&cobra.Command{
|
||||
Use: "deploy",
|
||||
Short: "Deploy your site to a Cloud provider.",
|
||||
Long: `Deploy your site to a Cloud provider.
|
||||
|
||||
See https://gohugo.io/hosting-and-deployment/hugo-deploy/ for detailed
|
||||
documentation.
|
||||
`,
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
h, err := r.Hugo(flagsToCfgWithAdditionalConfigBase(cd, nil, "deployment"))
|
||||
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
cfgInit := func(c *commandeer) error {
|
||||
return nil
|
||||
}
|
||||
comm, err := initializeConfig(true, false, &cc.hugoBuilderCommon, cc, cfgInit)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
deployer, err := deploy.New(h.Configs.GetFirstLanguageConfig(), h.Log, h.PathSpec.PublishFs)
|
||||
deployer, err := deploy.New(comm.Cfg, comm.hugo().PathSpec.PublishFs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return deployer.Deploy(ctx)
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
applyDeployFlags(cmd, r)
|
||||
return deployer.Deploy(context.Background())
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
cc.cmd.Flags().String("target", "", "target deployment from deployments section in config file; defaults to the first one")
|
||||
cc.cmd.Flags().Bool("confirm", false, "ask for confirmation before making changes to the target")
|
||||
cc.cmd.Flags().Bool("dryRun", false, "dry run")
|
||||
cc.cmd.Flags().Bool("force", false, "force upload of all files")
|
||||
cc.cmd.Flags().Bool("invalidateCDN", true, "invalidate the CDN cache listed in the deployment target")
|
||||
cc.cmd.Flags().Int("maxDeletes", 256, "maximum # of files to delete, or -1 to disable")
|
||||
|
||||
return cc
|
||||
}
|
||||
|
|
|
@ -1,33 +0,0 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"github.com/gohugoio/hugo/deploy/deployconfig"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func applyDeployFlags(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
cmd.Flags().String("target", "", "target deployment from deployments section in config file; defaults to the first one")
|
||||
_ = cmd.RegisterFlagCompletionFunc("target", cobra.NoFileCompletions)
|
||||
cmd.Flags().Bool("confirm", false, "ask for confirmation before making changes to the target")
|
||||
cmd.Flags().Bool("dryRun", false, "dry run")
|
||||
cmd.Flags().Bool("force", false, "force upload of all files")
|
||||
cmd.Flags().Bool("invalidateCDN", deployconfig.DefaultConfig.InvalidateCDN, "invalidate the CDN cache listed in the deployment target")
|
||||
cmd.Flags().Int("maxDeletes", deployconfig.DefaultConfig.MaxDeletes, "maximum # of files to delete, or -1 to disable")
|
||||
_ = cmd.RegisterFlagCompletionFunc("maxDeletes", cobra.NoFileCompletions)
|
||||
cmd.Flags().Int("workers", deployconfig.DefaultConfig.Workers, "number of workers to transfer files. defaults to 10")
|
||||
_ = cmd.RegisterFlagCompletionFunc("workers", cobra.NoFileCompletions)
|
||||
}
|
|
@ -1,50 +0,0 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
//go:build !withdeploy
|
||||
|
||||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"github.com/bep/simplecobra"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func newDeployCommand() simplecobra.Commander {
|
||||
return &simpleCommand{
|
||||
name: "deploy",
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
return errors.New("deploy not supported in this version of Hugo; install a release with 'withdeploy' in the archive filename or build yourself with the 'withdeploy' build tag. Also see https://github.com/gohugoio/hugo/pull/12995")
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
applyDeployFlags(cmd, r)
|
||||
cmd.Hidden = true
|
||||
},
|
||||
}
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2016 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -14,57 +14,31 @@
|
|||
package commands
|
||||
|
||||
import (
|
||||
"context"
|
||||
"runtime"
|
||||
|
||||
"github.com/bep/simplecobra"
|
||||
"github.com/gohugoio/hugo/common/hugo"
|
||||
"github.com/spf13/cobra"
|
||||
jww "github.com/spf13/jwalterweatherman"
|
||||
)
|
||||
|
||||
func newEnvCommand() simplecobra.Commander {
|
||||
return &simpleCommand{
|
||||
name: "env",
|
||||
short: "Display version and environment info",
|
||||
long: "Display version and environment info. This is useful in Hugo bug reports",
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
r.Printf("%s\n", hugo.BuildVersionString())
|
||||
r.Printf("GOOS=%q\n", runtime.GOOS)
|
||||
r.Printf("GOARCH=%q\n", runtime.GOARCH)
|
||||
r.Printf("GOVERSION=%q\n", runtime.Version())
|
||||
var _ cmder = (*envCmd)(nil)
|
||||
|
||||
if r.isVerbose() {
|
||||
deps := hugo.GetDependencyList()
|
||||
for _, dep := range deps {
|
||||
r.Printf("%s\n", dep)
|
||||
}
|
||||
} else {
|
||||
// These are also included in the GetDependencyList above;
|
||||
// always print these as these are most likely the most useful to know about.
|
||||
deps := hugo.GetDependencyListNonGo()
|
||||
for _, dep := range deps {
|
||||
r.Printf("%s\n", dep)
|
||||
}
|
||||
}
|
||||
return nil
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
},
|
||||
}
|
||||
type envCmd struct {
|
||||
*baseCmd
|
||||
}
|
||||
|
||||
func newVersionCmd() simplecobra.Commander {
|
||||
return &simpleCommand{
|
||||
name: "version",
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
r.Println(hugo.BuildVersionString())
|
||||
func newEnvCmd() *envCmd {
|
||||
return &envCmd{baseCmd: newBaseCmd(&cobra.Command{
|
||||
Use: "env",
|
||||
Short: "Print Hugo version and environment info",
|
||||
Long: `Print Hugo version and environment info. This is useful in Hugo bug reports.`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
printHugoVersion()
|
||||
jww.FEEDBACK.Printf("GOOS=%q\n", runtime.GOOS)
|
||||
jww.FEEDBACK.Printf("GOARCH=%q\n", runtime.GOARCH)
|
||||
jww.FEEDBACK.Printf("GOVERSION=%q\n", runtime.Version())
|
||||
|
||||
return nil
|
||||
},
|
||||
short: "Display version",
|
||||
long: "Display version and environment info. This is useful in Hugo bug reports.",
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
},
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
|
296
commands/gen.go
296
commands/gen.go
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -14,290 +14,28 @@
|
|||
package commands
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"github.com/alecthomas/chroma/v2"
|
||||
"github.com/alecthomas/chroma/v2/formatters/html"
|
||||
"github.com/alecthomas/chroma/v2/styles"
|
||||
"github.com/bep/simplecobra"
|
||||
"github.com/gohugoio/hugo/common/hugo"
|
||||
"github.com/gohugoio/hugo/docshelper"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/gohugoio/hugo/hugolib"
|
||||
"github.com/gohugoio/hugo/parser"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/cobra/doc"
|
||||
"gopkg.in/yaml.v2"
|
||||
)
|
||||
|
||||
func newGenCommand() *genCommand {
|
||||
var (
|
||||
// Flags.
|
||||
gendocdir string
|
||||
genmandir string
|
||||
var _ cmder = (*genCmd)(nil)
|
||||
|
||||
// Chroma flags.
|
||||
style string
|
||||
highlightStyle string
|
||||
lineNumbersInlineStyle string
|
||||
lineNumbersTableStyle string
|
||||
omitEmpty bool
|
||||
)
|
||||
|
||||
newChromaStyles := func() simplecobra.Commander {
|
||||
return &simpleCommand{
|
||||
name: "chromastyles",
|
||||
short: "Generate CSS stylesheet for the Chroma code highlighter",
|
||||
long: `Generate CSS stylesheet for the Chroma code highlighter for a given style. This stylesheet is needed if markup.highlight.noClasses is disabled in config.
|
||||
|
||||
See https://xyproto.github.io/splash/docs/all.html for a preview of the available styles`,
|
||||
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
style = strings.ToLower(style)
|
||||
if !slices.Contains(styles.Names(), style) {
|
||||
return fmt.Errorf("invalid style: %s", style)
|
||||
}
|
||||
builder := styles.Get(style).Builder()
|
||||
if highlightStyle != "" {
|
||||
builder.Add(chroma.LineHighlight, highlightStyle)
|
||||
}
|
||||
if lineNumbersInlineStyle != "" {
|
||||
builder.Add(chroma.LineNumbers, lineNumbersInlineStyle)
|
||||
}
|
||||
if lineNumbersTableStyle != "" {
|
||||
builder.Add(chroma.LineNumbersTable, lineNumbersTableStyle)
|
||||
}
|
||||
style, err := builder.Build()
|
||||
if err != nil {
|
||||
return err
|
||||
type genCmd struct {
|
||||
*baseCmd
|
||||
}
|
||||
|
||||
var formatter *html.Formatter
|
||||
if omitEmpty {
|
||||
formatter = html.New(html.WithClasses(true))
|
||||
} else {
|
||||
formatter = html.New(html.WithAllClasses(true))
|
||||
}
|
||||
|
||||
w := os.Stdout
|
||||
fmt.Fprintf(w, "/* Generated using: hugo %s */\n\n", strings.Join(os.Args[1:], " "))
|
||||
formatter.WriteCSS(w, style)
|
||||
return nil
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
cmd.PersistentFlags().StringVar(&style, "style", "friendly", "highlighter style (see https://xyproto.github.io/splash/docs/)")
|
||||
_ = cmd.RegisterFlagCompletionFunc("style", cobra.NoFileCompletions)
|
||||
cmd.PersistentFlags().StringVar(&highlightStyle, "highlightStyle", "", `foreground and background colors for highlighted lines, e.g. --highlightStyle "#fff000 bg:#000fff"`)
|
||||
_ = cmd.RegisterFlagCompletionFunc("highlightStyle", cobra.NoFileCompletions)
|
||||
cmd.PersistentFlags().StringVar(&lineNumbersInlineStyle, "lineNumbersInlineStyle", "", `foreground and background colors for inline line numbers, e.g. --lineNumbersInlineStyle "#fff000 bg:#000fff"`)
|
||||
_ = cmd.RegisterFlagCompletionFunc("lineNumbersInlineStyle", cobra.NoFileCompletions)
|
||||
cmd.PersistentFlags().StringVar(&lineNumbersTableStyle, "lineNumbersTableStyle", "", `foreground and background colors for table line numbers, e.g. --lineNumbersTableStyle "#fff000 bg:#000fff"`)
|
||||
_ = cmd.RegisterFlagCompletionFunc("lineNumbersTableStyle", cobra.NoFileCompletions)
|
||||
cmd.PersistentFlags().BoolVar(&omitEmpty, "omitEmpty", false, `omit empty CSS rules`)
|
||||
_ = cmd.RegisterFlagCompletionFunc("omitEmpty", cobra.NoFileCompletions)
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
newMan := func() simplecobra.Commander {
|
||||
return &simpleCommand{
|
||||
name: "man",
|
||||
short: "Generate man pages for the Hugo CLI",
|
||||
long: `This command automatically generates up-to-date man pages of Hugo's
|
||||
command-line interface. By default, it creates the man page files
|
||||
in the "man" directory under the current directory.`,
|
||||
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
header := &doc.GenManHeader{
|
||||
Section: "1",
|
||||
Manual: "Hugo Manual",
|
||||
Source: fmt.Sprintf("Hugo %s", hugo.CurrentVersion),
|
||||
}
|
||||
if !strings.HasSuffix(genmandir, helpers.FilePathSeparator) {
|
||||
genmandir += helpers.FilePathSeparator
|
||||
}
|
||||
if found, _ := helpers.Exists(genmandir, hugofs.Os); !found {
|
||||
r.Println("Directory", genmandir, "does not exist, creating...")
|
||||
if err := hugofs.Os.MkdirAll(genmandir, 0o777); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
cd.CobraCommand.Root().DisableAutoGenTag = true
|
||||
|
||||
r.Println("Generating Hugo man pages in", genmandir, "...")
|
||||
doc.GenManTree(cd.CobraCommand.Root(), header, genmandir)
|
||||
|
||||
r.Println("Done.")
|
||||
|
||||
return nil
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
cmd.PersistentFlags().StringVar(&genmandir, "dir", "man/", "the directory to write the man pages.")
|
||||
_ = cmd.MarkFlagDirname("dir")
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
newGen := func() simplecobra.Commander {
|
||||
const gendocFrontmatterTemplate = `---
|
||||
title: "%s"
|
||||
slug: %s
|
||||
url: %s
|
||||
---
|
||||
`
|
||||
|
||||
return &simpleCommand{
|
||||
name: "doc",
|
||||
short: "Generate Markdown documentation for the Hugo CLI",
|
||||
long: `Generate Markdown documentation for the Hugo CLI.
|
||||
This command is, mostly, used to create up-to-date documentation
|
||||
of Hugo's command-line interface for https://gohugo.io/.
|
||||
|
||||
It creates one Markdown file per command with front matter suitable
|
||||
for rendering in Hugo.`,
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
cd.CobraCommand.VisitParents(func(c *cobra.Command) {
|
||||
// Disable the "Auto generated by spf13/cobra on DATE"
|
||||
// as it creates a lot of diffs.
|
||||
c.DisableAutoGenTag = true
|
||||
func newGenCmd() *genCmd {
|
||||
cc := &genCmd{}
|
||||
cc.baseCmd = newBaseCmd(&cobra.Command{
|
||||
Use: "gen",
|
||||
Short: "A collection of several useful generators.",
|
||||
})
|
||||
if !strings.HasSuffix(gendocdir, helpers.FilePathSeparator) {
|
||||
gendocdir += helpers.FilePathSeparator
|
||||
}
|
||||
if found, _ := helpers.Exists(gendocdir, hugofs.Os); !found {
|
||||
r.Println("Directory", gendocdir, "does not exist, creating...")
|
||||
if err := hugofs.Os.MkdirAll(gendocdir, 0o777); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
prepender := func(filename string) string {
|
||||
name := filepath.Base(filename)
|
||||
base := strings.TrimSuffix(name, path.Ext(name))
|
||||
url := "/docs/reference/commands/" + strings.ToLower(base) + "/"
|
||||
return fmt.Sprintf(gendocFrontmatterTemplate, strings.Replace(base, "_", " ", -1), base, url)
|
||||
}
|
||||
|
||||
linkHandler := func(name string) string {
|
||||
base := strings.TrimSuffix(name, path.Ext(name))
|
||||
return "/docs/reference/commands/" + strings.ToLower(base) + "/"
|
||||
}
|
||||
r.Println("Generating Hugo command-line documentation in", gendocdir, "...")
|
||||
doc.GenMarkdownTreeCustom(cd.CobraCommand.Root(), gendocdir, prepender, linkHandler)
|
||||
r.Println("Done.")
|
||||
|
||||
return nil
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
cmd.PersistentFlags().StringVar(&gendocdir, "dir", "/tmp/hugodoc/", "the directory to write the doc.")
|
||||
_ = cmd.MarkFlagDirname("dir")
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
var docsHelperTarget string
|
||||
|
||||
newDocsHelper := func() simplecobra.Commander {
|
||||
return &simpleCommand{
|
||||
name: "docshelper",
|
||||
short: "Generate some data files for the Hugo docs",
|
||||
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
r.Println("Generate docs data to", docsHelperTarget)
|
||||
|
||||
var buf bytes.Buffer
|
||||
jsonEnc := json.NewEncoder(&buf)
|
||||
|
||||
configProvider := func() docshelper.DocProvider {
|
||||
conf := hugolib.DefaultConfig()
|
||||
conf.CacheDir = "" // The default value does not make sense in the docs.
|
||||
defaultConfig := parser.NullBoolJSONMarshaller{Wrapped: parser.LowerCaseCamelJSONMarshaller{Value: conf}}
|
||||
return docshelper.DocProvider{"config": defaultConfig}
|
||||
}
|
||||
|
||||
docshelper.AddDocProviderFunc(configProvider)
|
||||
if err := jsonEnc.Encode(docshelper.GetDocProvider()); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Decode the JSON to a map[string]interface{} and then unmarshal it again to the correct format.
|
||||
var m map[string]any
|
||||
if err := json.Unmarshal(buf.Bytes(), &m); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
targetFile := filepath.Join(docsHelperTarget, "docs.yaml")
|
||||
|
||||
f, err := os.Create(targetFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
yamlEnc := yaml.NewEncoder(f)
|
||||
if err := yamlEnc.Encode(m); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r.Println("Done!")
|
||||
return nil
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.Hidden = true
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
cmd.PersistentFlags().StringVarP(&docsHelperTarget, "dir", "", "docs/data", "data dir")
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
return &genCommand{
|
||||
commands: []simplecobra.Commander{
|
||||
newChromaStyles(),
|
||||
newGen(),
|
||||
newMan(),
|
||||
newDocsHelper(),
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type genCommand struct {
|
||||
rootCmd *rootCommand
|
||||
|
||||
commands []simplecobra.Commander
|
||||
}
|
||||
|
||||
func (c *genCommand) Commands() []simplecobra.Commander {
|
||||
return c.commands
|
||||
}
|
||||
|
||||
func (c *genCommand) Name() string {
|
||||
return "gen"
|
||||
}
|
||||
|
||||
func (c *genCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *genCommand) Init(cd *simplecobra.Commandeer) error {
|
||||
cmd := cd.CobraCommand
|
||||
cmd.Short = "Generate documentation and syntax highlighting styles"
|
||||
cmd.Long = "Generate documentation for your project using Hugo's documentation engine, including syntax highlighting for various programming languages."
|
||||
|
||||
cmd.RunE = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *genCommand) PreRun(cd, runner *simplecobra.Commandeer) error {
|
||||
c.rootCmd = cd.Root.Command.(*rootCommand)
|
||||
return nil
|
||||
cc.cmd.AddCommand(
|
||||
newGenautocompleteCmd().getCommand(),
|
||||
newGenDocCmd().getCommand(),
|
||||
newGenManCmd().getCommand(),
|
||||
createGenDocsHelper().getCommand(),
|
||||
createGenChromaStyles().getCommand())
|
||||
|
||||
return cc
|
||||
}
|
||||
|
|
80
commands/genautocomplete.go
Normal file
80
commands/genautocomplete.go
Normal file
|
@ -0,0 +1,80 @@
|
|||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"github.com/spf13/cobra"
|
||||
jww "github.com/spf13/jwalterweatherman"
|
||||
)
|
||||
|
||||
var _ cmder = (*genautocompleteCmd)(nil)
|
||||
|
||||
type genautocompleteCmd struct {
|
||||
autocompleteTarget string
|
||||
|
||||
// bash for now (zsh and others will come)
|
||||
autocompleteType string
|
||||
|
||||
*baseCmd
|
||||
}
|
||||
|
||||
func newGenautocompleteCmd() *genautocompleteCmd {
|
||||
cc := &genautocompleteCmd{}
|
||||
|
||||
cc.baseCmd = newBaseCmd(&cobra.Command{
|
||||
Use: "autocomplete",
|
||||
Short: "Generate shell autocompletion script for Hugo",
|
||||
Long: `Generates a shell autocompletion script for Hugo.
|
||||
|
||||
NOTE: The current version supports Bash only.
|
||||
This should work for *nix systems with Bash installed.
|
||||
|
||||
By default, the file is written directly to /etc/bash_completion.d
|
||||
for convenience, and the command may need superuser rights, e.g.:
|
||||
|
||||
$ sudo hugo gen autocomplete
|
||||
|
||||
Add ` + "`--completionfile=/path/to/file`" + ` flag to set alternative
|
||||
file-path and name.
|
||||
|
||||
Logout and in again to reload the completion scripts,
|
||||
or just source them in directly:
|
||||
|
||||
$ . /etc/bash_completion`,
|
||||
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
if cc.autocompleteType != "bash" {
|
||||
return newUserError("Only Bash is supported for now")
|
||||
}
|
||||
|
||||
err := cmd.Root().GenBashCompletionFile(cc.autocompleteTarget)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
jww.FEEDBACK.Println("Bash completion file for Hugo saved to", cc.autocompleteTarget)
|
||||
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
cc.cmd.PersistentFlags().StringVarP(&cc.autocompleteTarget, "completionfile", "", "/etc/bash_completion.d/hugo.sh", "autocompletion file")
|
||||
cc.cmd.PersistentFlags().StringVarP(&cc.autocompleteType, "type", "", "bash", "autocompletion type (currently only bash supported)")
|
||||
|
||||
// For bash-completion
|
||||
cc.cmd.PersistentFlags().SetAnnotation("completionfile", cobra.BashCompFilenameExt, []string{})
|
||||
|
||||
return cc
|
||||
}
|
74
commands/genchromastyles.go
Normal file
74
commands/genchromastyles.go
Normal file
|
@ -0,0 +1,74 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"os"
|
||||
|
||||
"github.com/alecthomas/chroma"
|
||||
"github.com/alecthomas/chroma/formatters/html"
|
||||
"github.com/alecthomas/chroma/styles"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
_ cmder = (*genChromaStyles)(nil)
|
||||
)
|
||||
|
||||
type genChromaStyles struct {
|
||||
style string
|
||||
highlightStyle string
|
||||
linesStyle string
|
||||
*baseCmd
|
||||
}
|
||||
|
||||
// TODO(bep) highlight
|
||||
func createGenChromaStyles() *genChromaStyles {
|
||||
g := &genChromaStyles{
|
||||
baseCmd: newBaseCmd(&cobra.Command{
|
||||
Use: "chromastyles",
|
||||
Short: "Generate CSS stylesheet for the Chroma code highlighter",
|
||||
Long: `Generate CSS stylesheet for the Chroma code highlighter for a given style. This stylesheet is needed if pygmentsUseClasses is enabled in config.
|
||||
|
||||
See https://help.farbox.com/pygments.html for preview of available styles`,
|
||||
}),
|
||||
}
|
||||
|
||||
g.cmd.RunE = func(cmd *cobra.Command, args []string) error {
|
||||
return g.generate()
|
||||
}
|
||||
|
||||
g.cmd.PersistentFlags().StringVar(&g.style, "style", "friendly", "highlighter style (see https://help.farbox.com/pygments.html)")
|
||||
g.cmd.PersistentFlags().StringVar(&g.highlightStyle, "highlightStyle", "bg:#ffffcc", "style used for highlighting lines (see https://github.com/alecthomas/chroma)")
|
||||
g.cmd.PersistentFlags().StringVar(&g.linesStyle, "linesStyle", "", "style used for line numbers (see https://github.com/alecthomas/chroma)")
|
||||
|
||||
return g
|
||||
}
|
||||
|
||||
func (g *genChromaStyles) generate() error {
|
||||
builder := styles.Get(g.style).Builder()
|
||||
if g.highlightStyle != "" {
|
||||
builder.Add(chroma.LineHighlight, g.highlightStyle)
|
||||
}
|
||||
if g.linesStyle != "" {
|
||||
builder.Add(chroma.LineNumbers, g.linesStyle)
|
||||
}
|
||||
style, err := builder.Build()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
formatter := html.New(html.WithClasses(true))
|
||||
formatter.WriteCSS(os.Stdout, style)
|
||||
return nil
|
||||
}
|
96
commands/gendoc.go
Normal file
96
commands/gendoc.go
Normal file
|
@ -0,0 +1,96 @@
|
|||
// Copyright 2016 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/cobra/doc"
|
||||
jww "github.com/spf13/jwalterweatherman"
|
||||
)
|
||||
|
||||
var _ cmder = (*genDocCmd)(nil)
|
||||
|
||||
type genDocCmd struct {
|
||||
gendocdir string
|
||||
*baseCmd
|
||||
}
|
||||
|
||||
func newGenDocCmd() *genDocCmd {
|
||||
const gendocFrontmatterTemplate = `---
|
||||
date: %s
|
||||
title: "%s"
|
||||
slug: %s
|
||||
url: %s
|
||||
---
|
||||
`
|
||||
|
||||
cc := &genDocCmd{}
|
||||
|
||||
cc.baseCmd = newBaseCmd(&cobra.Command{
|
||||
Use: "doc",
|
||||
Short: "Generate Markdown documentation for the Hugo CLI.",
|
||||
Long: `Generate Markdown documentation for the Hugo CLI.
|
||||
|
||||
This command is, mostly, used to create up-to-date documentation
|
||||
of Hugo's command-line interface for http://gohugo.io/.
|
||||
|
||||
It creates one Markdown file per command with front matter suitable
|
||||
for rendering in Hugo.`,
|
||||
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
if !strings.HasSuffix(cc.gendocdir, helpers.FilePathSeparator) {
|
||||
cc.gendocdir += helpers.FilePathSeparator
|
||||
}
|
||||
if found, _ := helpers.Exists(cc.gendocdir, hugofs.Os); !found {
|
||||
jww.FEEDBACK.Println("Directory", cc.gendocdir, "does not exist, creating...")
|
||||
if err := hugofs.Os.MkdirAll(cc.gendocdir, 0777); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
now := time.Now().Format("2006-01-02")
|
||||
prepender := func(filename string) string {
|
||||
name := filepath.Base(filename)
|
||||
base := strings.TrimSuffix(name, path.Ext(name))
|
||||
url := "/commands/" + strings.ToLower(base) + "/"
|
||||
return fmt.Sprintf(gendocFrontmatterTemplate, now, strings.Replace(base, "_", " ", -1), base, url)
|
||||
}
|
||||
|
||||
linkHandler := func(name string) string {
|
||||
base := strings.TrimSuffix(name, path.Ext(name))
|
||||
return "/commands/" + strings.ToLower(base) + "/"
|
||||
}
|
||||
|
||||
jww.FEEDBACK.Println("Generating Hugo command-line documentation in", cc.gendocdir, "...")
|
||||
doc.GenMarkdownTreeCustom(cmd.Root(), cc.gendocdir, prepender, linkHandler)
|
||||
jww.FEEDBACK.Println("Done.")
|
||||
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
cc.cmd.PersistentFlags().StringVar(&cc.gendocdir, "dir", "/tmp/hugodoc/", "the directory to write the doc.")
|
||||
|
||||
// For bash-completion
|
||||
cc.cmd.PersistentFlags().SetAnnotation("dir", cobra.BashCompSubdirsInDir, []string{})
|
||||
|
||||
return cc
|
||||
}
|
74
commands/gendocshelper.go
Normal file
74
commands/gendocshelper.go
Normal file
|
@ -0,0 +1,74 @@
|
|||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"fmt"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/gohugoio/hugo/docshelper"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var (
|
||||
_ cmder = (*genDocsHelper)(nil)
|
||||
)
|
||||
|
||||
type genDocsHelper struct {
|
||||
target string
|
||||
*baseCmd
|
||||
}
|
||||
|
||||
func createGenDocsHelper() *genDocsHelper {
|
||||
g := &genDocsHelper{
|
||||
baseCmd: newBaseCmd(&cobra.Command{
|
||||
Use: "docshelper",
|
||||
Short: "Generate some data files for the Hugo docs.",
|
||||
Hidden: true,
|
||||
}),
|
||||
}
|
||||
|
||||
g.cmd.RunE = func(cmd *cobra.Command, args []string) error {
|
||||
return g.generate()
|
||||
}
|
||||
|
||||
g.cmd.PersistentFlags().StringVarP(&g.target, "dir", "", "docs/data", "data dir")
|
||||
|
||||
return g
|
||||
}
|
||||
|
||||
func (g *genDocsHelper) generate() error {
|
||||
fmt.Println("Generate docs data to", g.target)
|
||||
|
||||
targetFile := filepath.Join(g.target, "docs.json")
|
||||
|
||||
f, err := os.Create(targetFile)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
enc := json.NewEncoder(f)
|
||||
enc.SetIndent("", " ")
|
||||
|
||||
if err := enc.Encode(docshelper.DocProviders); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fmt.Println("Done!")
|
||||
return nil
|
||||
|
||||
}
|
77
commands/genman.go
Normal file
77
commands/genman.go
Normal file
|
@ -0,0 +1,77 @@
|
|||
// Copyright 2016 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"github.com/gohugoio/hugo/common/hugo"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/spf13/cobra"
|
||||
"github.com/spf13/cobra/doc"
|
||||
jww "github.com/spf13/jwalterweatherman"
|
||||
)
|
||||
|
||||
var _ cmder = (*genManCmd)(nil)
|
||||
|
||||
type genManCmd struct {
|
||||
genmandir string
|
||||
*baseCmd
|
||||
}
|
||||
|
||||
func newGenManCmd() *genManCmd {
|
||||
cc := &genManCmd{}
|
||||
|
||||
cc.baseCmd = newBaseCmd(&cobra.Command{
|
||||
Use: "man",
|
||||
Short: "Generate man pages for the Hugo CLI",
|
||||
Long: `This command automatically generates up-to-date man pages of Hugo's
|
||||
command-line interface. By default, it creates the man page files
|
||||
in the "man" directory under the current directory.`,
|
||||
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
header := &doc.GenManHeader{
|
||||
Section: "1",
|
||||
Manual: "Hugo Manual",
|
||||
Source: fmt.Sprintf("Hugo %s", hugo.CurrentVersion),
|
||||
}
|
||||
if !strings.HasSuffix(cc.genmandir, helpers.FilePathSeparator) {
|
||||
cc.genmandir += helpers.FilePathSeparator
|
||||
}
|
||||
if found, _ := helpers.Exists(cc.genmandir, hugofs.Os); !found {
|
||||
jww.FEEDBACK.Println("Directory", cc.genmandir, "does not exist, creating...")
|
||||
if err := hugofs.Os.MkdirAll(cc.genmandir, 0777); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
cmd.Root().DisableAutoGenTag = true
|
||||
|
||||
jww.FEEDBACK.Println("Generating Hugo man pages in", cc.genmandir, "...")
|
||||
doc.GenManTree(cmd.Root(), header, cc.genmandir)
|
||||
|
||||
jww.FEEDBACK.Println("Done.")
|
||||
|
||||
return nil
|
||||
},
|
||||
})
|
||||
|
||||
cc.cmd.PersistentFlags().StringVar(&cc.genmandir, "dir", "man/", "the directory to write the man pages.")
|
||||
|
||||
// For bash-completion
|
||||
cc.cmd.PersistentFlags().SetAnnotation("dir", cobra.BashCompSubdirsInDir, []string{})
|
||||
|
||||
return cc
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -11,19 +11,16 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package commands defines and implements command-line commands and flags
|
||||
// used by Hugo. Commands and flags are implemented using Cobra.
|
||||
package commands
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"regexp"
|
||||
|
||||
"github.com/bep/simplecobra"
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/spf13/pflag"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const (
|
||||
|
@ -33,89 +30,50 @@ const (
|
|||
showCursor = ansiEsc + "[?25h"
|
||||
)
|
||||
|
||||
func newUserError(a ...any) *simplecobra.CommandError {
|
||||
return &simplecobra.CommandError{Err: errors.New(fmt.Sprint(a...))}
|
||||
type flagsToConfigHandler interface {
|
||||
flagsToConfig(cfg config.Provider)
|
||||
}
|
||||
|
||||
func setValueFromFlag(flags *pflag.FlagSet, key string, cfg config.Provider, targetKey string, force bool) {
|
||||
key = strings.TrimSpace(key)
|
||||
if (force && flags.Lookup(key) != nil) || flags.Changed(key) {
|
||||
f := flags.Lookup(key)
|
||||
configKey := key
|
||||
if targetKey != "" {
|
||||
configKey = targetKey
|
||||
}
|
||||
// Gotta love this API.
|
||||
switch f.Value.Type() {
|
||||
case "bool":
|
||||
bv, _ := flags.GetBool(key)
|
||||
cfg.Set(configKey, bv)
|
||||
case "string":
|
||||
cfg.Set(configKey, f.Value.String())
|
||||
case "stringSlice":
|
||||
bv, _ := flags.GetStringSlice(key)
|
||||
cfg.Set(configKey, bv)
|
||||
case "int":
|
||||
iv, _ := flags.GetInt(key)
|
||||
cfg.Set(configKey, iv)
|
||||
default:
|
||||
panic(fmt.Sprintf("update switch with %s", f.Value.Type()))
|
||||
type cmder interface {
|
||||
flagsToConfigHandler
|
||||
getCommand() *cobra.Command
|
||||
}
|
||||
|
||||
}
|
||||
// commandError is an error used to signal different error situations in command handling.
|
||||
type commandError struct {
|
||||
s string
|
||||
userError bool
|
||||
}
|
||||
|
||||
func flagsToCfg(cd *simplecobra.Commandeer, cfg config.Provider) config.Provider {
|
||||
return flagsToCfgWithAdditionalConfigBase(cd, cfg, "")
|
||||
func (c commandError) Error() string {
|
||||
return c.s
|
||||
}
|
||||
|
||||
func flagsToCfgWithAdditionalConfigBase(cd *simplecobra.Commandeer, cfg config.Provider, additionalConfigBase string) config.Provider {
|
||||
if cfg == nil {
|
||||
cfg = config.New()
|
||||
func (c commandError) isUserError() bool {
|
||||
return c.userError
|
||||
}
|
||||
|
||||
// Flags with a different name in the config.
|
||||
keyMap := map[string]string{
|
||||
"minify": "minifyOutput",
|
||||
"destination": "publishDir",
|
||||
"editor": "newContentEditor",
|
||||
func newUserError(a ...interface{}) commandError {
|
||||
return commandError{s: fmt.Sprintln(a...), userError: true}
|
||||
}
|
||||
|
||||
// Flags that we for some reason don't want to expose in the site config.
|
||||
internalKeySet := map[string]bool{
|
||||
"quiet": true,
|
||||
"verbose": true,
|
||||
"watch": true,
|
||||
"liveReloadPort": true,
|
||||
"renderToMemory": true,
|
||||
"clock": true,
|
||||
func newSystemError(a ...interface{}) commandError {
|
||||
return commandError{s: fmt.Sprintln(a...), userError: false}
|
||||
}
|
||||
|
||||
cmd := cd.CobraCommand
|
||||
flags := cmd.Flags()
|
||||
|
||||
flags.VisitAll(func(f *pflag.Flag) {
|
||||
if f.Changed {
|
||||
targetKey := f.Name
|
||||
if internalKeySet[targetKey] {
|
||||
targetKey = "internal." + targetKey
|
||||
} else if mapped, ok := keyMap[targetKey]; ok {
|
||||
targetKey = mapped
|
||||
}
|
||||
setValueFromFlag(flags, f.Name, cfg, targetKey, false)
|
||||
if additionalConfigBase != "" {
|
||||
setValueFromFlag(flags, f.Name, cfg, additionalConfigBase+"."+targetKey, true)
|
||||
}
|
||||
}
|
||||
})
|
||||
|
||||
return cfg
|
||||
func newSystemErrorF(format string, a ...interface{}) commandError {
|
||||
return commandError{s: fmt.Sprintf(format, a...), userError: false}
|
||||
}
|
||||
|
||||
func mkdir(x ...string) {
|
||||
p := filepath.Join(x...)
|
||||
err := os.MkdirAll(p, 0o777) // before umask
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
// Catch some of the obvious user errors from Cobra.
|
||||
// We don't want to show the usage message for every error.
|
||||
// The below may be to generic. Time will show.
|
||||
var userErrorRegexp = regexp.MustCompile("argument|flag|shorthand")
|
||||
|
||||
func isUserError(err error) bool {
|
||||
if cErr, ok := err.(commandError); ok && cErr.isUserError() {
|
||||
return true
|
||||
}
|
||||
|
||||
return userErrorRegexp.MatchString(err.Error())
|
||||
}
|
||||
|
|
1178
commands/hugo.go
Normal file
1178
commands/hugo.go
Normal file
File diff suppressed because it is too large
Load diff
52
commands/hugo_test.go
Normal file
52
commands/hugo_test.go
Normal file
|
@ -0,0 +1,52 @@
|
|||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"os"
|
||||
"testing"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
)
|
||||
|
||||
// Issue #5662
|
||||
func TestHugoWithContentDirOverride(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
|
||||
hugoCmd := newCommandsBuilder().addAll().build()
|
||||
cmd := hugoCmd.getCommand()
|
||||
|
||||
contentDir := "contentOverride"
|
||||
|
||||
cfgStr := `
|
||||
|
||||
baseURL = "https://example.org"
|
||||
title = "Hugo Commands"
|
||||
|
||||
contentDir = "thisdoesnotexist"
|
||||
|
||||
`
|
||||
dir, err := createSimpleTestSite(t, testSiteConfig{configTOML: cfgStr, contentDir: contentDir})
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
defer func() {
|
||||
os.RemoveAll(dir)
|
||||
}()
|
||||
|
||||
cmd.SetArgs([]string{"-s=" + dir, "-c=" + contentDir})
|
||||
|
||||
_, err = cmd.ExecuteC()
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -13,21 +13,15 @@
|
|||
|
||||
package commands
|
||||
|
||||
import (
|
||||
// For time zone lookups on Windows without Go installed.
|
||||
// See #8892
|
||||
_ "time/tzdata"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
import "github.com/spf13/cobra"
|
||||
|
||||
func init() {
|
||||
// This message to show to Windows users if Hugo is opened from explorer.exe
|
||||
cobra.MousetrapHelpText = `
|
||||
|
||||
Hugo is a command-line tool for generating static websites.
|
||||
Hugo is a command-line tool for generating static website.
|
||||
|
||||
You need to open PowerShell and run Hugo from there.
|
||||
You need to open cmd.exe and run Hugo from there.
|
||||
|
||||
Visit https://gohugo.io/ for more information.`
|
||||
}
|
||||
|
|
File diff suppressed because it is too large
Load diff
|
@ -1,618 +0,0 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"log"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
|
||||
"github.com/bep/simplecobra"
|
||||
"github.com/gohugoio/hugo/common/htime"
|
||||
"github.com/gohugoio/hugo/common/hugio"
|
||||
"github.com/gohugoio/hugo/common/maps"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/gohugoio/hugo/parser"
|
||||
"github.com/gohugoio/hugo/parser/metadecoders"
|
||||
"github.com/gohugoio/hugo/parser/pageparser"
|
||||
"github.com/spf13/afero"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func newImportCommand() *importCommand {
|
||||
var c *importCommand
|
||||
c = &importCommand{
|
||||
commands: []simplecobra.Commander{
|
||||
&simpleCommand{
|
||||
name: "jekyll",
|
||||
short: "hugo import from Jekyll",
|
||||
long: `hugo import from Jekyll.
|
||||
|
||||
Import from Jekyll requires two paths, e.g. ` + "`hugo import jekyll jekyll_root_path target_path`.",
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
if len(args) < 2 {
|
||||
return newUserError(`import from jekyll requires two paths, e.g. ` + "`hugo import jekyll jekyll_root_path target_path`.")
|
||||
}
|
||||
return c.importFromJekyll(args)
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
cmd.Flags().BoolVar(&c.force, "force", false, "allow import into non-empty target directory")
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
type importCommand struct {
|
||||
r *rootCommand
|
||||
|
||||
force bool
|
||||
|
||||
commands []simplecobra.Commander
|
||||
}
|
||||
|
||||
func (c *importCommand) Commands() []simplecobra.Commander {
|
||||
return c.commands
|
||||
}
|
||||
|
||||
func (c *importCommand) Name() string {
|
||||
return "import"
|
||||
}
|
||||
|
||||
func (c *importCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *importCommand) Init(cd *simplecobra.Commandeer) error {
|
||||
cmd := cd.CobraCommand
|
||||
cmd.Short = "Import a site from another system"
|
||||
cmd.Long = `Import a site from another system.
|
||||
|
||||
Import requires a subcommand, e.g. ` + "`hugo import jekyll jekyll_root_path target_path`."
|
||||
|
||||
cmd.RunE = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *importCommand) PreRun(cd, runner *simplecobra.Commandeer) error {
|
||||
c.r = cd.Root.Command.(*rootCommand)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *importCommand) createConfigFromJekyll(fs afero.Fs, inpath string, kind metadecoders.Format, jekyllConfig map[string]any) (err error) {
|
||||
title := "My New Hugo Site"
|
||||
baseURL := "http://example.org/"
|
||||
|
||||
for key, value := range jekyllConfig {
|
||||
lowerKey := strings.ToLower(key)
|
||||
|
||||
switch lowerKey {
|
||||
case "title":
|
||||
if str, ok := value.(string); ok {
|
||||
title = str
|
||||
}
|
||||
|
||||
case "url":
|
||||
if str, ok := value.(string); ok {
|
||||
baseURL = str
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
in := map[string]any{
|
||||
"baseURL": baseURL,
|
||||
"title": title,
|
||||
"languageCode": "en-us",
|
||||
"disablePathToLower": true,
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
err = parser.InterfaceToConfig(in, kind, &buf)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return helpers.WriteToDisk(filepath.Join(inpath, "hugo."+string(kind)), &buf, fs)
|
||||
}
|
||||
|
||||
func (c *importCommand) getJekyllDirInfo(fs afero.Fs, jekyllRoot string) (map[string]bool, bool) {
|
||||
postDirs := make(map[string]bool)
|
||||
hasAnyPost := false
|
||||
if entries, err := os.ReadDir(jekyllRoot); err == nil {
|
||||
for _, entry := range entries {
|
||||
if entry.IsDir() {
|
||||
subDir := filepath.Join(jekyllRoot, entry.Name())
|
||||
if isPostDir, hasAnyPostInDir := c.retrieveJekyllPostDir(fs, subDir); isPostDir {
|
||||
postDirs[entry.Name()] = hasAnyPostInDir
|
||||
if hasAnyPostInDir {
|
||||
hasAnyPost = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return postDirs, hasAnyPost
|
||||
}
|
||||
|
||||
func (c *importCommand) createSiteFromJekyll(jekyllRoot, targetDir string, jekyllPostDirs map[string]bool) error {
|
||||
fs := &afero.OsFs{}
|
||||
if exists, _ := helpers.Exists(targetDir, fs); exists {
|
||||
if isDir, _ := helpers.IsDir(targetDir, fs); !isDir {
|
||||
return errors.New("target path \"" + targetDir + "\" exists but is not a directory")
|
||||
}
|
||||
|
||||
isEmpty, _ := helpers.IsEmpty(targetDir, fs)
|
||||
|
||||
if !isEmpty && !c.force {
|
||||
return errors.New("target path \"" + targetDir + "\" exists and is not empty")
|
||||
}
|
||||
}
|
||||
|
||||
jekyllConfig := c.loadJekyllConfig(fs, jekyllRoot)
|
||||
|
||||
mkdir(targetDir, "layouts")
|
||||
mkdir(targetDir, "content")
|
||||
mkdir(targetDir, "archetypes")
|
||||
mkdir(targetDir, "static")
|
||||
mkdir(targetDir, "data")
|
||||
mkdir(targetDir, "themes")
|
||||
|
||||
c.createConfigFromJekyll(fs, targetDir, "yaml", jekyllConfig)
|
||||
|
||||
c.copyJekyllFilesAndFolders(jekyllRoot, filepath.Join(targetDir, "static"), jekyllPostDirs)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *importCommand) convertJekyllContent(m any, content string) (string, error) {
|
||||
metadata, _ := maps.ToStringMapE(m)
|
||||
|
||||
lines := strings.Split(content, "\n")
|
||||
var resultLines []string
|
||||
for _, line := range lines {
|
||||
resultLines = append(resultLines, strings.Trim(line, "\r\n"))
|
||||
}
|
||||
|
||||
content = strings.Join(resultLines, "\n")
|
||||
|
||||
excerptSep := "<!--more-->"
|
||||
if value, ok := metadata["excerpt_separator"]; ok {
|
||||
if str, strOk := value.(string); strOk {
|
||||
content = strings.Replace(content, strings.TrimSpace(str), excerptSep, -1)
|
||||
}
|
||||
}
|
||||
|
||||
replaceList := []struct {
|
||||
re *regexp.Regexp
|
||||
replace string
|
||||
}{
|
||||
{regexp.MustCompile("(?i)<!-- more -->"), "<!--more-->"},
|
||||
{regexp.MustCompile(`\{%\s*raw\s*%\}\s*(.*?)\s*\{%\s*endraw\s*%\}`), "$1"},
|
||||
{regexp.MustCompile(`{%\s*endhighlight\s*%}`), "{{< / highlight >}}"},
|
||||
}
|
||||
|
||||
for _, replace := range replaceList {
|
||||
content = replace.re.ReplaceAllString(content, replace.replace)
|
||||
}
|
||||
|
||||
replaceListFunc := []struct {
|
||||
re *regexp.Regexp
|
||||
replace func(string) string
|
||||
}{
|
||||
// Octopress image tag: http://octopress.org/docs/plugins/image-tag/
|
||||
{regexp.MustCompile(`{%\s+img\s*(.*?)\s*%}`), c.replaceImageTag},
|
||||
{regexp.MustCompile(`{%\s*highlight\s*(.*?)\s*%}`), c.replaceHighlightTag},
|
||||
}
|
||||
|
||||
for _, replace := range replaceListFunc {
|
||||
content = replace.re.ReplaceAllStringFunc(content, replace.replace)
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
if len(metadata) != 0 {
|
||||
err := parser.InterfaceToFrontMatter(m, metadecoders.YAML, &buf)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
buf.WriteString(content)
|
||||
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
func (c *importCommand) convertJekyllMetaData(m any, postName string, postDate time.Time, draft bool) (any, error) {
|
||||
metadata, err := maps.ToStringMapE(m)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if draft {
|
||||
metadata["draft"] = true
|
||||
}
|
||||
|
||||
for key, value := range metadata {
|
||||
lowerKey := strings.ToLower(key)
|
||||
|
||||
switch lowerKey {
|
||||
case "layout":
|
||||
delete(metadata, key)
|
||||
case "permalink":
|
||||
if str, ok := value.(string); ok {
|
||||
metadata["url"] = str
|
||||
}
|
||||
delete(metadata, key)
|
||||
case "category":
|
||||
if str, ok := value.(string); ok {
|
||||
metadata["categories"] = []string{str}
|
||||
}
|
||||
delete(metadata, key)
|
||||
case "excerpt_separator":
|
||||
if key != lowerKey {
|
||||
delete(metadata, key)
|
||||
metadata[lowerKey] = value
|
||||
}
|
||||
case "date":
|
||||
if str, ok := value.(string); ok {
|
||||
re := regexp.MustCompile(`(\d+):(\d+):(\d+)`)
|
||||
r := re.FindAllStringSubmatch(str, -1)
|
||||
if len(r) > 0 {
|
||||
hour, _ := strconv.Atoi(r[0][1])
|
||||
minute, _ := strconv.Atoi(r[0][2])
|
||||
second, _ := strconv.Atoi(r[0][3])
|
||||
postDate = time.Date(postDate.Year(), postDate.Month(), postDate.Day(), hour, minute, second, 0, time.UTC)
|
||||
}
|
||||
}
|
||||
delete(metadata, key)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
metadata["date"] = postDate.Format(time.RFC3339)
|
||||
|
||||
return metadata, nil
|
||||
}
|
||||
|
||||
func (c *importCommand) convertJekyllPost(path, relPath, targetDir string, draft bool) error {
|
||||
log.Println("Converting", path)
|
||||
|
||||
filename := filepath.Base(path)
|
||||
postDate, postName, err := c.parseJekyllFilename(filename)
|
||||
if err != nil {
|
||||
c.r.Printf("Failed to parse filename '%s': %s. Skipping.", filename, err)
|
||||
return nil
|
||||
}
|
||||
|
||||
log.Println(filename, postDate, postName)
|
||||
|
||||
targetFile := filepath.Join(targetDir, relPath)
|
||||
targetParentDir := filepath.Dir(targetFile)
|
||||
os.MkdirAll(targetParentDir, 0o777)
|
||||
|
||||
contentBytes, err := os.ReadFile(path)
|
||||
if err != nil {
|
||||
c.r.logger.Errorln("Read file error:", path)
|
||||
return err
|
||||
}
|
||||
pf, err := pageparser.ParseFrontMatterAndContent(bytes.NewReader(contentBytes))
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to parse file %q: %s", filename, err)
|
||||
}
|
||||
newmetadata, err := c.convertJekyllMetaData(pf.FrontMatter, postName, postDate, draft)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to convert metadata for file %q: %s", filename, err)
|
||||
}
|
||||
|
||||
content, err := c.convertJekyllContent(newmetadata, string(pf.Content))
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to convert content for file %q: %s", filename, err)
|
||||
}
|
||||
|
||||
fs := hugofs.Os
|
||||
if err := helpers.WriteToDisk(targetFile, strings.NewReader(content), fs); err != nil {
|
||||
return fmt.Errorf("failed to save file %q: %s", filename, err)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *importCommand) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyllPostDirs map[string]bool) (err error) {
|
||||
fs := hugofs.Os
|
||||
|
||||
fi, err := fs.Stat(jekyllRoot)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !fi.IsDir() {
|
||||
return errors.New(jekyllRoot + " is not a directory")
|
||||
}
|
||||
err = os.MkdirAll(dest, fi.Mode())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
entries, err := os.ReadDir(jekyllRoot)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, entry := range entries {
|
||||
sfp := filepath.Join(jekyllRoot, entry.Name())
|
||||
dfp := filepath.Join(dest, entry.Name())
|
||||
if entry.IsDir() {
|
||||
if entry.Name()[0] != '_' && entry.Name()[0] != '.' {
|
||||
if _, ok := jekyllPostDirs[entry.Name()]; !ok {
|
||||
err = hugio.CopyDir(fs, sfp, dfp, nil)
|
||||
if err != nil {
|
||||
c.r.logger.Errorln(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
lowerEntryName := strings.ToLower(entry.Name())
|
||||
exceptSuffix := []string{
|
||||
".md", ".markdown", ".html", ".htm",
|
||||
".xml", ".textile", "rakefile", "gemfile", ".lock",
|
||||
}
|
||||
isExcept := false
|
||||
for _, suffix := range exceptSuffix {
|
||||
if strings.HasSuffix(lowerEntryName, suffix) {
|
||||
isExcept = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !isExcept && entry.Name()[0] != '.' && entry.Name()[0] != '_' {
|
||||
err = hugio.CopyFile(fs, sfp, dfp)
|
||||
if err != nil {
|
||||
c.r.logger.Errorln(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *importCommand) importFromJekyll(args []string) error {
|
||||
jekyllRoot, err := filepath.Abs(filepath.Clean(args[0]))
|
||||
if err != nil {
|
||||
return newUserError("path error:", args[0])
|
||||
}
|
||||
|
||||
targetDir, err := filepath.Abs(filepath.Clean(args[1]))
|
||||
if err != nil {
|
||||
return newUserError("path error:", args[1])
|
||||
}
|
||||
|
||||
c.r.Println("Import Jekyll from:", jekyllRoot, "to:", targetDir)
|
||||
|
||||
if strings.HasPrefix(filepath.Dir(targetDir), jekyllRoot) {
|
||||
return newUserError("abort: target path should not be inside the Jekyll root")
|
||||
}
|
||||
|
||||
fs := afero.NewOsFs()
|
||||
jekyllPostDirs, hasAnyPost := c.getJekyllDirInfo(fs, jekyllRoot)
|
||||
if !hasAnyPost {
|
||||
return errors.New("abort: jekyll root contains neither posts nor drafts")
|
||||
}
|
||||
|
||||
err = c.createSiteFromJekyll(jekyllRoot, targetDir, jekyllPostDirs)
|
||||
if err != nil {
|
||||
return newUserError(err)
|
||||
}
|
||||
|
||||
c.r.Println("Importing...")
|
||||
|
||||
fileCount := 0
|
||||
callback := func(path string, fi hugofs.FileMetaInfo) error {
|
||||
if fi.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
relPath, err := filepath.Rel(jekyllRoot, path)
|
||||
if err != nil {
|
||||
return newUserError("get rel path error:", path)
|
||||
}
|
||||
|
||||
relPath = filepath.ToSlash(relPath)
|
||||
draft := false
|
||||
|
||||
switch {
|
||||
case strings.Contains(relPath, "_posts/"):
|
||||
relPath = filepath.Join("content/post", strings.Replace(relPath, "_posts/", "", -1))
|
||||
case strings.Contains(relPath, "_drafts/"):
|
||||
relPath = filepath.Join("content/draft", strings.Replace(relPath, "_drafts/", "", -1))
|
||||
draft = true
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
|
||||
fileCount++
|
||||
return c.convertJekyllPost(path, relPath, targetDir, draft)
|
||||
}
|
||||
|
||||
for jekyllPostDir, hasAnyPostInDir := range jekyllPostDirs {
|
||||
if hasAnyPostInDir {
|
||||
if err = helpers.Walk(hugofs.Os, filepath.Join(jekyllRoot, jekyllPostDir), callback); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
c.r.Println("Congratulations!", fileCount, "post(s) imported!")
|
||||
c.r.Println("Now, start Hugo by yourself:\n")
|
||||
c.r.Println("cd " + args[1])
|
||||
c.r.Println("git init")
|
||||
c.r.Println("git submodule add https://github.com/theNewDynamic/gohugo-theme-ananke themes/ananke")
|
||||
c.r.Println("echo \"theme = 'ananke'\" > hugo.toml")
|
||||
c.r.Println("hugo server")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *importCommand) loadJekyllConfig(fs afero.Fs, jekyllRoot string) map[string]any {
|
||||
path := filepath.Join(jekyllRoot, "_config.yml")
|
||||
|
||||
exists, err := helpers.Exists(path, fs)
|
||||
|
||||
if err != nil || !exists {
|
||||
c.r.Println("_config.yaml not found: Is the specified Jekyll root correct?")
|
||||
return nil
|
||||
}
|
||||
|
||||
f, err := fs.Open(path)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
defer f.Close()
|
||||
|
||||
b, err := io.ReadAll(f)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
m, err := metadecoders.Default.UnmarshalToMap(b, metadecoders.YAML)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return m
|
||||
}
|
||||
|
||||
func (c *importCommand) parseJekyllFilename(filename string) (time.Time, string, error) {
|
||||
re := regexp.MustCompile(`(\d+-\d+-\d+)-(.+)\..*`)
|
||||
r := re.FindAllStringSubmatch(filename, -1)
|
||||
if len(r) == 0 {
|
||||
return htime.Now(), "", errors.New("filename not match")
|
||||
}
|
||||
|
||||
postDate, err := time.Parse("2006-1-2", r[0][1])
|
||||
if err != nil {
|
||||
return htime.Now(), "", err
|
||||
}
|
||||
|
||||
postName := r[0][2]
|
||||
|
||||
return postDate, postName, nil
|
||||
}
|
||||
|
||||
func (c *importCommand) replaceHighlightTag(match string) string {
|
||||
r := regexp.MustCompile(`{%\s*highlight\s*(.*?)\s*%}`)
|
||||
parts := r.FindStringSubmatch(match)
|
||||
lastQuote := rune(0)
|
||||
f := func(c rune) bool {
|
||||
switch {
|
||||
case c == lastQuote:
|
||||
lastQuote = rune(0)
|
||||
return false
|
||||
case lastQuote != rune(0):
|
||||
return false
|
||||
case unicode.In(c, unicode.Quotation_Mark):
|
||||
lastQuote = c
|
||||
return false
|
||||
default:
|
||||
return unicode.IsSpace(c)
|
||||
}
|
||||
}
|
||||
// splitting string by space but considering quoted section
|
||||
items := strings.FieldsFunc(parts[1], f)
|
||||
|
||||
result := bytes.NewBufferString("{{< highlight ")
|
||||
result.WriteString(items[0]) // language
|
||||
options := items[1:]
|
||||
for i, opt := range options {
|
||||
opt = strings.Replace(opt, "\"", "", -1)
|
||||
if opt == "linenos" {
|
||||
opt = "linenos=table"
|
||||
}
|
||||
if i == 0 {
|
||||
opt = " \"" + opt
|
||||
}
|
||||
if i < len(options)-1 {
|
||||
opt += ","
|
||||
} else if i == len(options)-1 {
|
||||
opt += "\""
|
||||
}
|
||||
result.WriteString(opt)
|
||||
}
|
||||
|
||||
result.WriteString(" >}}")
|
||||
return result.String()
|
||||
}
|
||||
|
||||
func (c *importCommand) replaceImageTag(match string) string {
|
||||
r := regexp.MustCompile(`{%\s+img\s*(\p{L}*)\s+([\S]*/[\S]+)\s+(\d*)\s*(\d*)\s*(.*?)\s*%}`)
|
||||
result := bytes.NewBufferString("{{< figure ")
|
||||
parts := r.FindStringSubmatch(match)
|
||||
// Index 0 is the entire string, ignore
|
||||
c.replaceOptionalPart(result, "class", parts[1])
|
||||
c.replaceOptionalPart(result, "src", parts[2])
|
||||
c.replaceOptionalPart(result, "width", parts[3])
|
||||
c.replaceOptionalPart(result, "height", parts[4])
|
||||
// title + alt
|
||||
part := parts[5]
|
||||
if len(part) > 0 {
|
||||
splits := strings.Split(part, "'")
|
||||
lenSplits := len(splits)
|
||||
if lenSplits == 1 {
|
||||
c.replaceOptionalPart(result, "title", splits[0])
|
||||
} else if lenSplits == 3 {
|
||||
c.replaceOptionalPart(result, "title", splits[1])
|
||||
} else if lenSplits == 5 {
|
||||
c.replaceOptionalPart(result, "title", splits[1])
|
||||
c.replaceOptionalPart(result, "alt", splits[3])
|
||||
}
|
||||
}
|
||||
result.WriteString(">}}")
|
||||
return result.String()
|
||||
}
|
||||
|
||||
func (c *importCommand) replaceOptionalPart(buffer *bytes.Buffer, partName string, part string) {
|
||||
if len(part) > 0 {
|
||||
buffer.WriteString(partName + "=\"" + part + "\" ")
|
||||
}
|
||||
}
|
||||
|
||||
func (c *importCommand) retrieveJekyllPostDir(fs afero.Fs, dir string) (bool, bool) {
|
||||
if strings.HasSuffix(dir, "_posts") || strings.HasSuffix(dir, "_drafts") {
|
||||
isEmpty, _ := helpers.IsEmpty(dir, fs)
|
||||
return true, !isEmpty
|
||||
}
|
||||
|
||||
if entries, err := os.ReadDir(dir); err == nil {
|
||||
for _, entry := range entries {
|
||||
if entry.IsDir() {
|
||||
subDir := filepath.Join(dir, entry.Name())
|
||||
if isPostDir, hasAnyPost := c.retrieveJekyllPostDir(fs, subDir); isPostDir {
|
||||
return isPostDir, hasAnyPost
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false, true
|
||||
}
|
609
commands/import_jekyll.go
Normal file
609
commands/import_jekyll.go
Normal file
|
@ -0,0 +1,609 @@
|
|||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io/ioutil"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
"unicode"
|
||||
|
||||
"github.com/gohugoio/hugo/common/hugio"
|
||||
|
||||
"github.com/gohugoio/hugo/parser/metadecoders"
|
||||
|
||||
"github.com/gohugoio/hugo/common/maps"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/gohugoio/hugo/hugolib"
|
||||
"github.com/gohugoio/hugo/parser"
|
||||
"github.com/spf13/afero"
|
||||
"github.com/spf13/cobra"
|
||||
jww "github.com/spf13/jwalterweatherman"
|
||||
)
|
||||
|
||||
var _ cmder = (*importCmd)(nil)
|
||||
|
||||
type importCmd struct {
|
||||
*baseCmd
|
||||
}
|
||||
|
||||
func newImportCmd() *importCmd {
|
||||
cc := &importCmd{}
|
||||
|
||||
cc.baseCmd = newBaseCmd(&cobra.Command{
|
||||
Use: "import",
|
||||
Short: "Import your site from others.",
|
||||
Long: `Import your site from other web site generators like Jekyll.
|
||||
|
||||
Import requires a subcommand, e.g. ` + "`hugo import jekyll jekyll_root_path target_path`.",
|
||||
RunE: nil,
|
||||
})
|
||||
|
||||
importJekyllCmd := &cobra.Command{
|
||||
Use: "jekyll",
|
||||
Short: "hugo import from Jekyll",
|
||||
Long: `hugo import from Jekyll.
|
||||
|
||||
Import from Jekyll requires two paths, e.g. ` + "`hugo import jekyll jekyll_root_path target_path`.",
|
||||
RunE: cc.importFromJekyll,
|
||||
}
|
||||
|
||||
importJekyllCmd.Flags().Bool("force", false, "allow import into non-empty target directory")
|
||||
|
||||
cc.cmd.AddCommand(importJekyllCmd)
|
||||
|
||||
return cc
|
||||
|
||||
}
|
||||
|
||||
func (i *importCmd) importFromJekyll(cmd *cobra.Command, args []string) error {
|
||||
|
||||
if len(args) < 2 {
|
||||
return newUserError(`import from jekyll requires two paths, e.g. ` + "`hugo import jekyll jekyll_root_path target_path`.")
|
||||
}
|
||||
|
||||
jekyllRoot, err := filepath.Abs(filepath.Clean(args[0]))
|
||||
if err != nil {
|
||||
return newUserError("path error:", args[0])
|
||||
}
|
||||
|
||||
targetDir, err := filepath.Abs(filepath.Clean(args[1]))
|
||||
if err != nil {
|
||||
return newUserError("path error:", args[1])
|
||||
}
|
||||
|
||||
jww.INFO.Println("Import Jekyll from:", jekyllRoot, "to:", targetDir)
|
||||
|
||||
if strings.HasPrefix(filepath.Dir(targetDir), jekyllRoot) {
|
||||
return newUserError("abort: target path should not be inside the Jekyll root")
|
||||
}
|
||||
|
||||
forceImport, _ := cmd.Flags().GetBool("force")
|
||||
|
||||
fs := afero.NewOsFs()
|
||||
jekyllPostDirs, hasAnyPost := i.getJekyllDirInfo(fs, jekyllRoot)
|
||||
if !hasAnyPost {
|
||||
return errors.New("abort: jekyll root contains neither posts nor drafts")
|
||||
}
|
||||
|
||||
err = i.createSiteFromJekyll(jekyllRoot, targetDir, jekyllPostDirs, forceImport)
|
||||
|
||||
if err != nil {
|
||||
return newUserError(err)
|
||||
}
|
||||
|
||||
jww.FEEDBACK.Println("Importing...")
|
||||
|
||||
fileCount := 0
|
||||
callback := func(path string, fi hugofs.FileMetaInfo, err error) error {
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if fi.IsDir() {
|
||||
return nil
|
||||
}
|
||||
|
||||
relPath, err := filepath.Rel(jekyllRoot, path)
|
||||
if err != nil {
|
||||
return newUserError("get rel path error:", path)
|
||||
}
|
||||
|
||||
relPath = filepath.ToSlash(relPath)
|
||||
draft := false
|
||||
|
||||
switch {
|
||||
case strings.Contains(relPath, "_posts/"):
|
||||
relPath = filepath.Join("content/post", strings.Replace(relPath, "_posts/", "", -1))
|
||||
case strings.Contains(relPath, "_drafts/"):
|
||||
relPath = filepath.Join("content/draft", strings.Replace(relPath, "_drafts/", "", -1))
|
||||
draft = true
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
|
||||
fileCount++
|
||||
return convertJekyllPost(path, relPath, targetDir, draft)
|
||||
}
|
||||
|
||||
for jekyllPostDir, hasAnyPostInDir := range jekyllPostDirs {
|
||||
if hasAnyPostInDir {
|
||||
if err = helpers.SymbolicWalk(hugofs.Os, filepath.Join(jekyllRoot, jekyllPostDir), callback); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
jww.FEEDBACK.Println("Congratulations!", fileCount, "post(s) imported!")
|
||||
jww.FEEDBACK.Println("Now, start Hugo by yourself:\n" +
|
||||
"$ git clone https://github.com/spf13/herring-cove.git " + args[1] + "/themes/herring-cove")
|
||||
jww.FEEDBACK.Println("$ cd " + args[1] + "\n$ hugo server --theme=herring-cove")
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *importCmd) getJekyllDirInfo(fs afero.Fs, jekyllRoot string) (map[string]bool, bool) {
|
||||
postDirs := make(map[string]bool)
|
||||
hasAnyPost := false
|
||||
if entries, err := ioutil.ReadDir(jekyllRoot); err == nil {
|
||||
for _, entry := range entries {
|
||||
if entry.IsDir() {
|
||||
subDir := filepath.Join(jekyllRoot, entry.Name())
|
||||
if isPostDir, hasAnyPostInDir := i.retrieveJekyllPostDir(fs, subDir); isPostDir {
|
||||
postDirs[entry.Name()] = hasAnyPostInDir
|
||||
if hasAnyPostInDir {
|
||||
hasAnyPost = true
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
return postDirs, hasAnyPost
|
||||
}
|
||||
|
||||
func (i *importCmd) retrieveJekyllPostDir(fs afero.Fs, dir string) (bool, bool) {
|
||||
if strings.HasSuffix(dir, "_posts") || strings.HasSuffix(dir, "_drafts") {
|
||||
isEmpty, _ := helpers.IsEmpty(dir, fs)
|
||||
return true, !isEmpty
|
||||
}
|
||||
|
||||
if entries, err := ioutil.ReadDir(dir); err == nil {
|
||||
for _, entry := range entries {
|
||||
if entry.IsDir() {
|
||||
subDir := filepath.Join(dir, entry.Name())
|
||||
if isPostDir, hasAnyPost := i.retrieveJekyllPostDir(fs, subDir); isPostDir {
|
||||
return isPostDir, hasAnyPost
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return false, true
|
||||
}
|
||||
|
||||
func (i *importCmd) createSiteFromJekyll(jekyllRoot, targetDir string, jekyllPostDirs map[string]bool, force bool) error {
|
||||
s, err := hugolib.NewSiteDefaultLang()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
fs := s.Fs.Source
|
||||
if exists, _ := helpers.Exists(targetDir, fs); exists {
|
||||
if isDir, _ := helpers.IsDir(targetDir, fs); !isDir {
|
||||
return errors.New("target path \"" + targetDir + "\" exists but is not a directory")
|
||||
}
|
||||
|
||||
isEmpty, _ := helpers.IsEmpty(targetDir, fs)
|
||||
|
||||
if !isEmpty && !force {
|
||||
return errors.New("target path \"" + targetDir + "\" exists and is not empty")
|
||||
}
|
||||
}
|
||||
|
||||
jekyllConfig := i.loadJekyllConfig(fs, jekyllRoot)
|
||||
|
||||
mkdir(targetDir, "layouts")
|
||||
mkdir(targetDir, "content")
|
||||
mkdir(targetDir, "archetypes")
|
||||
mkdir(targetDir, "static")
|
||||
mkdir(targetDir, "data")
|
||||
mkdir(targetDir, "themes")
|
||||
|
||||
i.createConfigFromJekyll(fs, targetDir, "yaml", jekyllConfig)
|
||||
|
||||
i.copyJekyllFilesAndFolders(jekyllRoot, filepath.Join(targetDir, "static"), jekyllPostDirs)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (i *importCmd) loadJekyllConfig(fs afero.Fs, jekyllRoot string) map[string]interface{} {
|
||||
path := filepath.Join(jekyllRoot, "_config.yml")
|
||||
|
||||
exists, err := helpers.Exists(path, fs)
|
||||
|
||||
if err != nil || !exists {
|
||||
jww.WARN.Println("_config.yaml not found: Is the specified Jekyll root correct?")
|
||||
return nil
|
||||
}
|
||||
|
||||
f, err := fs.Open(path)
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
defer f.Close()
|
||||
|
||||
b, err := ioutil.ReadAll(f)
|
||||
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
c, err := metadecoders.Default.UnmarshalToMap(b, metadecoders.YAML)
|
||||
|
||||
if err != nil {
|
||||
return nil
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
func (i *importCmd) createConfigFromJekyll(fs afero.Fs, inpath string, kind metadecoders.Format, jekyllConfig map[string]interface{}) (err error) {
|
||||
title := "My New Hugo Site"
|
||||
baseURL := "http://example.org/"
|
||||
|
||||
for key, value := range jekyllConfig {
|
||||
lowerKey := strings.ToLower(key)
|
||||
|
||||
switch lowerKey {
|
||||
case "title":
|
||||
if str, ok := value.(string); ok {
|
||||
title = str
|
||||
}
|
||||
|
||||
case "url":
|
||||
if str, ok := value.(string); ok {
|
||||
baseURL = str
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
in := map[string]interface{}{
|
||||
"baseURL": baseURL,
|
||||
"title": title,
|
||||
"languageCode": "en-us",
|
||||
"disablePathToLower": true,
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
err = parser.InterfaceToConfig(in, kind, &buf)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return helpers.WriteToDisk(filepath.Join(inpath, "config."+string(kind)), &buf, fs)
|
||||
}
|
||||
|
||||
func (i *importCmd) copyJekyllFilesAndFolders(jekyllRoot, dest string, jekyllPostDirs map[string]bool) (err error) {
|
||||
fs := hugofs.Os
|
||||
|
||||
fi, err := fs.Stat(jekyllRoot)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if !fi.IsDir() {
|
||||
return errors.New(jekyllRoot + " is not a directory")
|
||||
}
|
||||
err = os.MkdirAll(dest, fi.Mode())
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
entries, err := ioutil.ReadDir(jekyllRoot)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
for _, entry := range entries {
|
||||
sfp := filepath.Join(jekyllRoot, entry.Name())
|
||||
dfp := filepath.Join(dest, entry.Name())
|
||||
if entry.IsDir() {
|
||||
if entry.Name()[0] != '_' && entry.Name()[0] != '.' {
|
||||
if _, ok := jekyllPostDirs[entry.Name()]; !ok {
|
||||
err = hugio.CopyDir(fs, sfp, dfp, nil)
|
||||
if err != nil {
|
||||
jww.ERROR.Println(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
} else {
|
||||
lowerEntryName := strings.ToLower(entry.Name())
|
||||
exceptSuffix := []string{".md", ".markdown", ".html", ".htm",
|
||||
".xml", ".textile", "rakefile", "gemfile", ".lock"}
|
||||
isExcept := false
|
||||
for _, suffix := range exceptSuffix {
|
||||
if strings.HasSuffix(lowerEntryName, suffix) {
|
||||
isExcept = true
|
||||
break
|
||||
}
|
||||
}
|
||||
|
||||
if !isExcept && entry.Name()[0] != '.' && entry.Name()[0] != '_' {
|
||||
err = hugio.CopyFile(fs, sfp, dfp)
|
||||
if err != nil {
|
||||
jww.ERROR.Println(err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func parseJekyllFilename(filename string) (time.Time, string, error) {
|
||||
re := regexp.MustCompile(`(\d+-\d+-\d+)-(.+)\..*`)
|
||||
r := re.FindAllStringSubmatch(filename, -1)
|
||||
if len(r) == 0 {
|
||||
return time.Now(), "", errors.New("filename not match")
|
||||
}
|
||||
|
||||
postDate, err := time.Parse("2006-1-2", r[0][1])
|
||||
if err != nil {
|
||||
return time.Now(), "", err
|
||||
}
|
||||
|
||||
postName := r[0][2]
|
||||
|
||||
return postDate, postName, nil
|
||||
}
|
||||
|
||||
func convertJekyllPost(path, relPath, targetDir string, draft bool) error {
|
||||
jww.TRACE.Println("Converting", path)
|
||||
|
||||
filename := filepath.Base(path)
|
||||
postDate, postName, err := parseJekyllFilename(filename)
|
||||
if err != nil {
|
||||
jww.WARN.Printf("Failed to parse filename '%s': %s. Skipping.", filename, err)
|
||||
return nil
|
||||
}
|
||||
|
||||
jww.TRACE.Println(filename, postDate, postName)
|
||||
|
||||
targetFile := filepath.Join(targetDir, relPath)
|
||||
targetParentDir := filepath.Dir(targetFile)
|
||||
os.MkdirAll(targetParentDir, 0777)
|
||||
|
||||
contentBytes, err := ioutil.ReadFile(path)
|
||||
if err != nil {
|
||||
jww.ERROR.Println("Read file error:", path)
|
||||
return err
|
||||
}
|
||||
|
||||
pf, err := parseContentFile(bytes.NewReader(contentBytes))
|
||||
if err != nil {
|
||||
jww.ERROR.Println("Parse file error:", path)
|
||||
return err
|
||||
}
|
||||
|
||||
newmetadata, err := convertJekyllMetaData(pf.frontMatter, postName, postDate, draft)
|
||||
if err != nil {
|
||||
jww.ERROR.Println("Convert metadata error:", path)
|
||||
return err
|
||||
}
|
||||
|
||||
content, err := convertJekyllContent(newmetadata, string(pf.content))
|
||||
if err != nil {
|
||||
jww.ERROR.Println("Converting Jekyll error:", path)
|
||||
return err
|
||||
}
|
||||
|
||||
fs := hugofs.Os
|
||||
if err := helpers.WriteToDisk(targetFile, strings.NewReader(content), fs); err != nil {
|
||||
return fmt.Errorf("failed to save file %q: %s", filename, err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func convertJekyllMetaData(m interface{}, postName string, postDate time.Time, draft bool) (interface{}, error) {
|
||||
metadata, err := maps.ToStringMapE(m)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if draft {
|
||||
metadata["draft"] = true
|
||||
}
|
||||
|
||||
for key, value := range metadata {
|
||||
lowerKey := strings.ToLower(key)
|
||||
|
||||
switch lowerKey {
|
||||
case "layout":
|
||||
delete(metadata, key)
|
||||
case "permalink":
|
||||
if str, ok := value.(string); ok {
|
||||
metadata["url"] = str
|
||||
}
|
||||
delete(metadata, key)
|
||||
case "category":
|
||||
if str, ok := value.(string); ok {
|
||||
metadata["categories"] = []string{str}
|
||||
}
|
||||
delete(metadata, key)
|
||||
case "excerpt_separator":
|
||||
if key != lowerKey {
|
||||
delete(metadata, key)
|
||||
metadata[lowerKey] = value
|
||||
}
|
||||
case "date":
|
||||
if str, ok := value.(string); ok {
|
||||
re := regexp.MustCompile(`(\d+):(\d+):(\d+)`)
|
||||
r := re.FindAllStringSubmatch(str, -1)
|
||||
if len(r) > 0 {
|
||||
hour, _ := strconv.Atoi(r[0][1])
|
||||
minute, _ := strconv.Atoi(r[0][2])
|
||||
second, _ := strconv.Atoi(r[0][3])
|
||||
postDate = time.Date(postDate.Year(), postDate.Month(), postDate.Day(), hour, minute, second, 0, time.UTC)
|
||||
}
|
||||
}
|
||||
delete(metadata, key)
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
metadata["date"] = postDate.Format(time.RFC3339)
|
||||
|
||||
return metadata, nil
|
||||
}
|
||||
|
||||
func convertJekyllContent(m interface{}, content string) (string, error) {
|
||||
metadata, _ := maps.ToStringMapE(m)
|
||||
|
||||
lines := strings.Split(content, "\n")
|
||||
var resultLines []string
|
||||
for _, line := range lines {
|
||||
resultLines = append(resultLines, strings.Trim(line, "\r\n"))
|
||||
}
|
||||
|
||||
content = strings.Join(resultLines, "\n")
|
||||
|
||||
excerptSep := "<!--more-->"
|
||||
if value, ok := metadata["excerpt_separator"]; ok {
|
||||
if str, strOk := value.(string); strOk {
|
||||
content = strings.Replace(content, strings.TrimSpace(str), excerptSep, -1)
|
||||
}
|
||||
}
|
||||
|
||||
replaceList := []struct {
|
||||
re *regexp.Regexp
|
||||
replace string
|
||||
}{
|
||||
{regexp.MustCompile("(?i)<!-- more -->"), "<!--more-->"},
|
||||
{regexp.MustCompile(`\{%\s*raw\s*%\}\s*(.*?)\s*\{%\s*endraw\s*%\}`), "$1"},
|
||||
{regexp.MustCompile(`{%\s*endhighlight\s*%}`), "{{< / highlight >}}"},
|
||||
}
|
||||
|
||||
for _, replace := range replaceList {
|
||||
content = replace.re.ReplaceAllString(content, replace.replace)
|
||||
}
|
||||
|
||||
replaceListFunc := []struct {
|
||||
re *regexp.Regexp
|
||||
replace func(string) string
|
||||
}{
|
||||
// Octopress image tag: http://octopress.org/docs/plugins/image-tag/
|
||||
{regexp.MustCompile(`{%\s+img\s*(.*?)\s*%}`), replaceImageTag},
|
||||
{regexp.MustCompile(`{%\s*highlight\s*(.*?)\s*%}`), replaceHighlightTag},
|
||||
}
|
||||
|
||||
for _, replace := range replaceListFunc {
|
||||
content = replace.re.ReplaceAllStringFunc(content, replace.replace)
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
if len(metadata) != 0 {
|
||||
err := parser.InterfaceToFrontMatter(m, metadecoders.YAML, &buf)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
}
|
||||
buf.WriteString(content)
|
||||
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
func replaceHighlightTag(match string) string {
|
||||
r := regexp.MustCompile(`{%\s*highlight\s*(.*?)\s*%}`)
|
||||
parts := r.FindStringSubmatch(match)
|
||||
lastQuote := rune(0)
|
||||
f := func(c rune) bool {
|
||||
switch {
|
||||
case c == lastQuote:
|
||||
lastQuote = rune(0)
|
||||
return false
|
||||
case lastQuote != rune(0):
|
||||
return false
|
||||
case unicode.In(c, unicode.Quotation_Mark):
|
||||
lastQuote = c
|
||||
return false
|
||||
default:
|
||||
return unicode.IsSpace(c)
|
||||
}
|
||||
}
|
||||
// splitting string by space but considering quoted section
|
||||
items := strings.FieldsFunc(parts[1], f)
|
||||
|
||||
result := bytes.NewBufferString("{{< highlight ")
|
||||
result.WriteString(items[0]) // language
|
||||
options := items[1:]
|
||||
for i, opt := range options {
|
||||
opt = strings.Replace(opt, "\"", "", -1)
|
||||
if opt == "linenos" {
|
||||
opt = "linenos=table"
|
||||
}
|
||||
if i == 0 {
|
||||
opt = " \"" + opt
|
||||
}
|
||||
if i < len(options)-1 {
|
||||
opt += ","
|
||||
} else if i == len(options)-1 {
|
||||
opt += "\""
|
||||
}
|
||||
result.WriteString(opt)
|
||||
}
|
||||
|
||||
result.WriteString(" >}}")
|
||||
return result.String()
|
||||
}
|
||||
|
||||
func replaceImageTag(match string) string {
|
||||
r := regexp.MustCompile(`{%\s+img\s*(\p{L}*)\s+([\S]*/[\S]+)\s+(\d*)\s*(\d*)\s*(.*?)\s*%}`)
|
||||
result := bytes.NewBufferString("{{< figure ")
|
||||
parts := r.FindStringSubmatch(match)
|
||||
// Index 0 is the entire string, ignore
|
||||
replaceOptionalPart(result, "class", parts[1])
|
||||
replaceOptionalPart(result, "src", parts[2])
|
||||
replaceOptionalPart(result, "width", parts[3])
|
||||
replaceOptionalPart(result, "height", parts[4])
|
||||
// title + alt
|
||||
part := parts[5]
|
||||
if len(part) > 0 {
|
||||
splits := strings.Split(part, "'")
|
||||
lenSplits := len(splits)
|
||||
if lenSplits == 1 {
|
||||
replaceOptionalPart(result, "title", splits[0])
|
||||
} else if lenSplits == 3 {
|
||||
replaceOptionalPart(result, "title", splits[1])
|
||||
} else if lenSplits == 5 {
|
||||
replaceOptionalPart(result, "title", splits[1])
|
||||
replaceOptionalPart(result, "alt", splits[3])
|
||||
}
|
||||
}
|
||||
result.WriteString(">}}")
|
||||
return result.String()
|
||||
|
||||
}
|
||||
func replaceOptionalPart(buffer *bytes.Buffer, partName string, part string) {
|
||||
if len(part) > 0 {
|
||||
buffer.WriteString(partName + "=\"" + part + "\" ")
|
||||
}
|
||||
}
|
137
commands/import_jekyll_test.go
Normal file
137
commands/import_jekyll_test.go
Normal file
|
@ -0,0 +1,137 @@
|
|||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"encoding/json"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
)
|
||||
|
||||
func TestParseJekyllFilename(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
filenameArray := []string{
|
||||
"2015-01-02-test.md",
|
||||
"2012-03-15-中文.markup",
|
||||
}
|
||||
|
||||
expectResult := []struct {
|
||||
postDate time.Time
|
||||
postName string
|
||||
}{
|
||||
{time.Date(2015, time.January, 2, 0, 0, 0, 0, time.UTC), "test"},
|
||||
{time.Date(2012, time.March, 15, 0, 0, 0, 0, time.UTC), "中文"},
|
||||
}
|
||||
|
||||
for i, filename := range filenameArray {
|
||||
postDate, postName, err := parseJekyllFilename(filename)
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(expectResult[i].postDate.Format("2006-01-02"), qt.Equals, postDate.Format("2006-01-02"))
|
||||
c.Assert(expectResult[i].postName, qt.Equals, postName)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConvertJekyllMetadata(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
testDataList := []struct {
|
||||
metadata interface{}
|
||||
postName string
|
||||
postDate time.Time
|
||||
draft bool
|
||||
expect string
|
||||
}{
|
||||
{map[interface{}]interface{}{}, "testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
||||
`{"date":"2015-10-01T00:00:00Z"}`},
|
||||
{map[interface{}]interface{}{}, "testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), true,
|
||||
`{"date":"2015-10-01T00:00:00Z","draft":true}`},
|
||||
{map[interface{}]interface{}{"Permalink": "/permalink.html", "layout": "post"},
|
||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
||||
`{"date":"2015-10-01T00:00:00Z","url":"/permalink.html"}`},
|
||||
{map[interface{}]interface{}{"permalink": "/permalink.html"},
|
||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
||||
`{"date":"2015-10-01T00:00:00Z","url":"/permalink.html"}`},
|
||||
{map[interface{}]interface{}{"category": nil, "permalink": 123},
|
||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
||||
`{"date":"2015-10-01T00:00:00Z"}`},
|
||||
{map[interface{}]interface{}{"Excerpt_Separator": "sep"},
|
||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
||||
`{"date":"2015-10-01T00:00:00Z","excerpt_separator":"sep"}`},
|
||||
{map[interface{}]interface{}{"category": "book", "layout": "post", "Others": "Goods", "Date": "2015-10-01 12:13:11"},
|
||||
"testPost", time.Date(2015, 10, 1, 0, 0, 0, 0, time.UTC), false,
|
||||
`{"Others":"Goods","categories":["book"],"date":"2015-10-01T12:13:11Z"}`},
|
||||
}
|
||||
|
||||
for _, data := range testDataList {
|
||||
result, err := convertJekyllMetaData(data.metadata, data.postName, data.postDate, data.draft)
|
||||
c.Assert(err, qt.IsNil)
|
||||
jsonResult, err := json.Marshal(result)
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(string(jsonResult), qt.Equals, data.expect)
|
||||
}
|
||||
}
|
||||
|
||||
func TestConvertJekyllContent(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
testDataList := []struct {
|
||||
metadata interface{}
|
||||
content string
|
||||
expect string
|
||||
}{
|
||||
{map[interface{}]interface{}{},
|
||||
"Test content\r\n<!-- more -->\npart2 content", "Test content\n<!--more-->\npart2 content"},
|
||||
{map[interface{}]interface{}{},
|
||||
"Test content\n<!-- More -->\npart2 content", "Test content\n<!--more-->\npart2 content"},
|
||||
{map[interface{}]interface{}{"excerpt_separator": "<!--sep-->"},
|
||||
"Test content\n<!--sep-->\npart2 content",
|
||||
"---\nexcerpt_separator: <!--sep-->\n---\nTest content\n<!--more-->\npart2 content"},
|
||||
{map[interface{}]interface{}{}, "{% raw %}text{% endraw %}", "text"},
|
||||
{map[interface{}]interface{}{}, "{%raw%} text2 {%endraw %}", "text2"},
|
||||
{map[interface{}]interface{}{},
|
||||
"{% highlight go %}\nvar s int\n{% endhighlight %}",
|
||||
"{{< highlight go >}}\nvar s int\n{{< / highlight >}}"},
|
||||
{map[interface{}]interface{}{},
|
||||
"{% highlight go linenos hl_lines=\"1 2\" %}\nvar s string\nvar i int\n{% endhighlight %}",
|
||||
"{{< highlight go \"linenos=table,hl_lines=1 2\" >}}\nvar s string\nvar i int\n{{< / highlight >}}"},
|
||||
|
||||
// Octopress image tag
|
||||
{map[interface{}]interface{}{},
|
||||
"{% img http://placekitten.com/890/280 %}",
|
||||
"{{< figure src=\"http://placekitten.com/890/280\" >}}"},
|
||||
{map[interface{}]interface{}{},
|
||||
"{% img left http://placekitten.com/320/250 Place Kitten #2 %}",
|
||||
"{{< figure class=\"left\" src=\"http://placekitten.com/320/250\" title=\"Place Kitten #2\" >}}"},
|
||||
{map[interface{}]interface{}{},
|
||||
"{% img right http://placekitten.com/300/500 150 250 'Place Kitten #3' %}",
|
||||
"{{< figure class=\"right\" src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #3\" >}}"},
|
||||
{map[interface{}]interface{}{},
|
||||
"{% img right http://placekitten.com/300/500 150 250 'Place Kitten #4' 'An image of a very cute kitten' %}",
|
||||
"{{< figure class=\"right\" src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}"},
|
||||
{map[interface{}]interface{}{},
|
||||
"{% img http://placekitten.com/300/500 150 250 'Place Kitten #4' 'An image of a very cute kitten' %}",
|
||||
"{{< figure src=\"http://placekitten.com/300/500\" width=\"150\" height=\"250\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}"},
|
||||
{map[interface{}]interface{}{},
|
||||
"{% img right /placekitten/300/500 'Place Kitten #4' 'An image of a very cute kitten' %}",
|
||||
"{{< figure class=\"right\" src=\"/placekitten/300/500\" title=\"Place Kitten #4\" alt=\"An image of a very cute kitten\" >}}"},
|
||||
{map[interface{}]interface{}{"category": "book", "layout": "post", "Date": "2015-10-01 12:13:11"},
|
||||
"somecontent",
|
||||
"---\nDate: \"2015-10-01 12:13:11\"\ncategory: book\nlayout: post\n---\nsomecontent"},
|
||||
}
|
||||
for _, data := range testDataList {
|
||||
result, err := convertJekyllContent(data.metadata, data.content)
|
||||
c.Assert(result, qt.Equals, data.expect)
|
||||
c.Assert(err, qt.IsNil)
|
||||
}
|
||||
}
|
84
commands/limit_darwin.go
Normal file
84
commands/limit_darwin.go
Normal file
|
@ -0,0 +1,84 @@
|
|||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"syscall"
|
||||
|
||||
"github.com/spf13/cobra"
|
||||
jww "github.com/spf13/jwalterweatherman"
|
||||
)
|
||||
|
||||
var _ cmder = (*limitCmd)(nil)
|
||||
|
||||
type limitCmd struct {
|
||||
*baseCmd
|
||||
}
|
||||
|
||||
func newLimitCmd() *limitCmd {
|
||||
ccmd := &cobra.Command{
|
||||
Use: "ulimit",
|
||||
Short: "Check system ulimit settings",
|
||||
Long: `Hugo will inspect the current ulimit settings on the system.
|
||||
This is primarily to ensure that Hugo can watch enough files on some OSs`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
var rLimit syscall.Rlimit
|
||||
err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
|
||||
if err != nil {
|
||||
return newSystemError("Error Getting rlimit ", err)
|
||||
}
|
||||
|
||||
jww.FEEDBACK.Println("Current rLimit:", rLimit)
|
||||
|
||||
if rLimit.Cur >= newRlimit {
|
||||
return nil
|
||||
}
|
||||
|
||||
jww.FEEDBACK.Println("Attempting to increase limit")
|
||||
rLimit.Cur = newRlimit
|
||||
err = syscall.Setrlimit(syscall.RLIMIT_NOFILE, &rLimit)
|
||||
if err != nil {
|
||||
return newSystemError("Error Setting rLimit ", err)
|
||||
}
|
||||
err = syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
|
||||
if err != nil {
|
||||
return newSystemError("Error Getting rLimit ", err)
|
||||
}
|
||||
jww.FEEDBACK.Println("rLimit after change:", rLimit)
|
||||
|
||||
return nil
|
||||
},
|
||||
}
|
||||
|
||||
return &limitCmd{baseCmd: newBaseCmd(ccmd)}
|
||||
}
|
||||
|
||||
const newRlimit = 10240
|
||||
|
||||
func tweakLimit() {
|
||||
var rLimit syscall.Rlimit
|
||||
err := syscall.Getrlimit(syscall.RLIMIT_NOFILE, &rLimit)
|
||||
if err != nil {
|
||||
jww.WARN.Println("Unable to get rlimit:", err)
|
||||
return
|
||||
}
|
||||
if rLimit.Cur < newRlimit {
|
||||
rLimit.Cur = newRlimit
|
||||
err = syscall.Setrlimit(syscall.RLIMIT_NOFILE, &rLimit)
|
||||
if err != nil {
|
||||
// This may not succeed, see https://github.com/golang/go/issues/30401
|
||||
jww.INFO.Println("Unable to increase number of open files limit:", err)
|
||||
}
|
||||
}
|
||||
}
|
20
commands/limit_others.go
Normal file
20
commands/limit_others.go
Normal file
|
@ -0,0 +1,20 @@
|
|||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// +build !darwin
|
||||
|
||||
package commands
|
||||
|
||||
func tweakLimit() {
|
||||
// nothing to do
|
||||
}
|
318
commands/list.go
318
commands/list.go
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -14,50 +14,158 @@
|
|||
package commands
|
||||
|
||||
import (
|
||||
"context"
|
||||
"encoding/csv"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/bep/simplecobra"
|
||||
"github.com/gohugoio/hugo/hugolib"
|
||||
"github.com/gohugoio/hugo/resources/page"
|
||||
"github.com/gohugoio/hugo/resources/resource"
|
||||
"github.com/spf13/cobra"
|
||||
jww "github.com/spf13/jwalterweatherman"
|
||||
)
|
||||
|
||||
// newListCommand creates a new list command and its subcommands.
|
||||
func newListCommand() *listCommand {
|
||||
createRecord := func(workingDir string, p page.Page) []string {
|
||||
return []string{
|
||||
filepath.ToSlash(strings.TrimPrefix(p.File().Filename(), workingDir+string(os.PathSeparator))),
|
||||
p.Slug(),
|
||||
p.Title(),
|
||||
p.Date().Format(time.RFC3339),
|
||||
p.ExpiryDate().Format(time.RFC3339),
|
||||
p.PublishDate().Format(time.RFC3339),
|
||||
strconv.FormatBool(p.Draft()),
|
||||
p.Permalink(),
|
||||
p.Kind(),
|
||||
p.Section(),
|
||||
}
|
||||
var _ cmder = (*listCmd)(nil)
|
||||
|
||||
type listCmd struct {
|
||||
hugoBuilderCommon
|
||||
*baseCmd
|
||||
}
|
||||
|
||||
list := func(cd *simplecobra.Commandeer, r *rootCommand, shouldInclude func(page.Page) bool, opts ...any) error {
|
||||
bcfg := hugolib.BuildCfg{SkipRender: true}
|
||||
cfg := flagsToCfg(cd, nil)
|
||||
for i := 0; i < len(opts); i += 2 {
|
||||
cfg.Set(opts[i].(string), opts[i+1])
|
||||
func (lc *listCmd) buildSites(config map[string]interface{}) (*hugolib.HugoSites, error) {
|
||||
cfgInit := func(c *commandeer) error {
|
||||
for key, value := range config {
|
||||
c.Set(key, value)
|
||||
}
|
||||
h, err := r.Build(cd, bcfg, cfg)
|
||||
return nil
|
||||
}
|
||||
|
||||
c, err := initializeConfig(true, false, &lc.hugoBuilderCommon, lc, cfgInit)
|
||||
if err != nil {
|
||||
return err
|
||||
return nil, err
|
||||
}
|
||||
|
||||
writer := csv.NewWriter(r.StdOut)
|
||||
sites, err := hugolib.NewHugoSites(*c.DepsCfg)
|
||||
|
||||
if err != nil {
|
||||
return nil, newSystemError("Error creating sites", err)
|
||||
}
|
||||
|
||||
if err := sites.Build(hugolib.BuildCfg{SkipRender: true}); err != nil {
|
||||
return nil, newSystemError("Error Processing Source Content", err)
|
||||
}
|
||||
|
||||
return sites, nil
|
||||
}
|
||||
|
||||
func newListCmd() *listCmd {
|
||||
cc := &listCmd{}
|
||||
|
||||
cc.baseCmd = newBaseCmd(&cobra.Command{
|
||||
Use: "list",
|
||||
Short: "Listing out various types of content",
|
||||
Long: `Listing out various types of content.
|
||||
|
||||
List requires a subcommand, e.g. ` + "`hugo list drafts`.",
|
||||
RunE: nil,
|
||||
})
|
||||
|
||||
cc.cmd.AddCommand(
|
||||
&cobra.Command{
|
||||
Use: "drafts",
|
||||
Short: "List all drafts",
|
||||
Long: `List all of the drafts in your content directory.`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
sites, err := cc.buildSites(map[string]interface{}{"buildDrafts": true})
|
||||
|
||||
if err != nil {
|
||||
return newSystemError("Error building sites", err)
|
||||
}
|
||||
|
||||
for _, p := range sites.Pages() {
|
||||
if p.Draft() {
|
||||
jww.FEEDBACK.Println(strings.TrimPrefix(p.File().Filename(), sites.WorkingDir+string(os.PathSeparator)))
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
},
|
||||
&cobra.Command{
|
||||
Use: "future",
|
||||
Short: "List all posts dated in the future",
|
||||
Long: `List all of the posts in your content directory which will be posted in the future.`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
sites, err := cc.buildSites(map[string]interface{}{"buildFuture": true})
|
||||
|
||||
if err != nil {
|
||||
return newSystemError("Error building sites", err)
|
||||
}
|
||||
|
||||
writer := csv.NewWriter(os.Stdout)
|
||||
defer writer.Flush()
|
||||
|
||||
for _, p := range sites.Pages() {
|
||||
if resource.IsFuture(p) {
|
||||
err := writer.Write([]string{
|
||||
strings.TrimPrefix(p.File().Filename(), sites.WorkingDir+string(os.PathSeparator)),
|
||||
p.PublishDate().Format(time.RFC3339),
|
||||
})
|
||||
if err != nil {
|
||||
return newSystemError("Error writing future posts to stdout", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
},
|
||||
&cobra.Command{
|
||||
Use: "expired",
|
||||
Short: "List all posts already expired",
|
||||
Long: `List all of the posts in your content directory which has already expired.`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
sites, err := cc.buildSites(map[string]interface{}{"buildExpired": true})
|
||||
|
||||
if err != nil {
|
||||
return newSystemError("Error building sites", err)
|
||||
}
|
||||
|
||||
writer := csv.NewWriter(os.Stdout)
|
||||
defer writer.Flush()
|
||||
|
||||
for _, p := range sites.Pages() {
|
||||
if resource.IsExpired(p) {
|
||||
err := writer.Write([]string{
|
||||
strings.TrimPrefix(p.File().Filename(), sites.WorkingDir+string(os.PathSeparator)),
|
||||
p.ExpiryDate().Format(time.RFC3339),
|
||||
})
|
||||
if err != nil {
|
||||
return newSystemError("Error writing expired posts to stdout", err)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
},
|
||||
&cobra.Command{
|
||||
Use: "all",
|
||||
Short: "List all posts",
|
||||
Long: `List all of the posts in your content directory, include drafts, future and expired pages.`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
sites, err := cc.buildSites(map[string]interface{}{
|
||||
"buildExpired": true,
|
||||
"buildDrafts": true,
|
||||
"buildFuture": true,
|
||||
})
|
||||
|
||||
if err != nil {
|
||||
return newSystemError("Error building sites", err)
|
||||
}
|
||||
|
||||
writer := csv.NewWriter(os.Stdout)
|
||||
defer writer.Flush()
|
||||
|
||||
writer.Write([]string{
|
||||
|
@ -69,145 +177,33 @@ func newListCommand() *listCommand {
|
|||
"publishDate",
|
||||
"draft",
|
||||
"permalink",
|
||||
"kind",
|
||||
"section",
|
||||
})
|
||||
|
||||
for _, p := range h.Pages() {
|
||||
if shouldInclude(p) {
|
||||
record := createRecord(h.Conf.BaseConfig().WorkingDir, p)
|
||||
if err := writer.Write(record); err != nil {
|
||||
return err
|
||||
for _, p := range sites.Pages() {
|
||||
if !p.IsPage() {
|
||||
continue
|
||||
}
|
||||
err := writer.Write([]string{
|
||||
strings.TrimPrefix(p.File().Filename(), sites.WorkingDir+string(os.PathSeparator)),
|
||||
p.Slug(),
|
||||
p.Title(),
|
||||
p.Date().Format(time.RFC3339),
|
||||
p.ExpiryDate().Format(time.RFC3339),
|
||||
p.PublishDate().Format(time.RFC3339),
|
||||
strconv.FormatBool(p.Draft()),
|
||||
p.Permalink(),
|
||||
})
|
||||
if err != nil {
|
||||
return newSystemError("Error writing posts to stdout", err)
|
||||
}
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
return &listCommand{
|
||||
commands: []simplecobra.Commander{
|
||||
&simpleCommand{
|
||||
name: "drafts",
|
||||
short: "List draft content",
|
||||
long: `List draft content.`,
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
shouldInclude := func(p page.Page) bool {
|
||||
if !p.Draft() || p.File() == nil {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
return list(cd, r, shouldInclude,
|
||||
"buildDrafts", true,
|
||||
"buildFuture", true,
|
||||
"buildExpired", true,
|
||||
},
|
||||
},
|
||||
)
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
},
|
||||
},
|
||||
&simpleCommand{
|
||||
name: "future",
|
||||
short: "List future content",
|
||||
long: `List content with a future publication date.`,
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
shouldInclude := func(p page.Page) bool {
|
||||
if !resource.IsFuture(p) || p.File() == nil {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
return list(cd, r, shouldInclude,
|
||||
"buildFuture", true,
|
||||
"buildDrafts", true,
|
||||
)
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
},
|
||||
},
|
||||
&simpleCommand{
|
||||
name: "expired",
|
||||
short: "List expired content",
|
||||
long: `List content with a past expiration date.`,
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
shouldInclude := func(p page.Page) bool {
|
||||
if !resource.IsExpired(p) || p.File() == nil {
|
||||
return false
|
||||
}
|
||||
return true
|
||||
}
|
||||
return list(cd, r, shouldInclude,
|
||||
"buildExpired", true,
|
||||
"buildDrafts", true,
|
||||
)
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
},
|
||||
},
|
||||
&simpleCommand{
|
||||
name: "all",
|
||||
short: "List all content",
|
||||
long: `List all content including draft, future, and expired.`,
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
shouldInclude := func(p page.Page) bool {
|
||||
return p.File() != nil
|
||||
}
|
||||
return list(cd, r, shouldInclude, "buildDrafts", true, "buildFuture", true, "buildExpired", true)
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
},
|
||||
},
|
||||
&simpleCommand{
|
||||
name: "published",
|
||||
short: "List published content",
|
||||
long: `List content that is not draft, future, or expired.`,
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
shouldInclude := func(p page.Page) bool {
|
||||
return !p.Draft() && !resource.IsFuture(p) && !resource.IsExpired(p) && p.File() != nil
|
||||
}
|
||||
return list(cd, r, shouldInclude)
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
}
|
||||
|
||||
type listCommand struct {
|
||||
commands []simplecobra.Commander
|
||||
}
|
||||
|
||||
func (c *listCommand) Commands() []simplecobra.Commander {
|
||||
return c.commands
|
||||
}
|
||||
|
||||
func (c *listCommand) Name() string {
|
||||
return "list"
|
||||
}
|
||||
|
||||
func (c *listCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
|
||||
// Do nothing.
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *listCommand) Init(cd *simplecobra.Commandeer) error {
|
||||
cmd := cd.CobraCommand
|
||||
cmd.Short = "List content"
|
||||
cmd.Long = `List content.
|
||||
|
||||
List requires a subcommand, e.g. hugo list drafts`
|
||||
|
||||
cmd.RunE = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *listCommand) PreRun(cd, runner *simplecobra.Commandeer) error {
|
||||
return nil
|
||||
cc.cmd.PersistentFlags().StringVarP(&cc.source, "source", "s", "", "filesystem path to read files relative from")
|
||||
cc.cmd.PersistentFlags().SetAnnotation("source", cobra.BashCompSubdirsInDir, []string{})
|
||||
|
||||
return cc
|
||||
}
|
||||
|
|
72
commands/list_test.go
Normal file
72
commands/list_test.go
Normal file
|
@ -0,0 +1,72 @@
|
|||
package commands
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"encoding/csv"
|
||||
"io"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
func captureStdout(f func() (*cobra.Command, error)) (string, error) {
|
||||
old := os.Stdout
|
||||
r, w, _ := os.Pipe()
|
||||
os.Stdout = w
|
||||
|
||||
_, err := f()
|
||||
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
|
||||
w.Close()
|
||||
os.Stdout = old
|
||||
|
||||
var buf bytes.Buffer
|
||||
io.Copy(&buf, r)
|
||||
return buf.String(), nil
|
||||
}
|
||||
|
||||
func TestListAll(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
dir, err := createSimpleTestSite(t, testSiteConfig{})
|
||||
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
hugoCmd := newCommandsBuilder().addAll().build()
|
||||
cmd := hugoCmd.getCommand()
|
||||
|
||||
defer func() {
|
||||
os.RemoveAll(dir)
|
||||
}()
|
||||
|
||||
cmd.SetArgs([]string{"-s=" + dir, "list", "all"})
|
||||
|
||||
out, err := captureStdout(cmd.ExecuteC)
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
r := csv.NewReader(strings.NewReader(out))
|
||||
|
||||
header, err := r.Read()
|
||||
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(header, qt.DeepEquals, []string{
|
||||
"path", "slug", "title",
|
||||
"date", "expiryDate", "publishDate",
|
||||
"draft", "permalink",
|
||||
})
|
||||
|
||||
record, err := r.Read()
|
||||
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(record, qt.DeepEquals, []string{
|
||||
filepath.Join("content", "p1.md"), "", "P1",
|
||||
"0001-01-01T00:00:00Z", "0001-01-01T00:00:00Z", "0001-01-01T00:00:00Z",
|
||||
"false", "https://example.org/p1/",
|
||||
})
|
||||
}
|
400
commands/mod.go
400
commands/mod.go
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -14,204 +14,50 @@
|
|||
package commands
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/bep/simplecobra"
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/gohugoio/hugo/modules/npm"
|
||||
"github.com/gohugoio/hugo/modules"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
const commonUsageMod = `
|
||||
var _ cmder = (*modCmd)(nil)
|
||||
|
||||
type modCmd struct {
|
||||
*baseBuilderCmd
|
||||
}
|
||||
|
||||
func (b *commandsBuilder) newModCmd() *modCmd {
|
||||
c := &modCmd{}
|
||||
|
||||
const commonUsage = `
|
||||
Note that Hugo will always start out by resolving the components defined in the site
|
||||
configuration, provided by a _vendor directory (if no --ignoreVendorPaths flag provided),
|
||||
configuration, provided by a _vendor directory (if no --ignoreVendor flag provided),
|
||||
Go Modules, or a folder inside the themes directory, in that order.
|
||||
|
||||
See https://gohugo.io/hugo-modules/ for more information.
|
||||
|
||||
`
|
||||
|
||||
// buildConfigCommands creates a new config command and its subcommands.
|
||||
func newModCommands() *modCommands {
|
||||
var (
|
||||
clean bool
|
||||
pattern string
|
||||
all bool
|
||||
)
|
||||
cmd := &cobra.Command{
|
||||
Use: "mod",
|
||||
Short: "Various Hugo Modules helpers.",
|
||||
Long: `Various helpers to help manage the modules in your project's dependency graph.
|
||||
|
||||
npmCommand := &simpleCommand{
|
||||
name: "npm",
|
||||
short: "Various npm helpers",
|
||||
long: `Various npm (Node package manager) helpers.`,
|
||||
commands: []simplecobra.Commander{
|
||||
&simpleCommand{
|
||||
name: "pack",
|
||||
short: "Experimental: Prepares and writes a composite package.json file for your project",
|
||||
long: `Prepares and writes a composite package.json file for your project.
|
||||
Most operations here requires a Go version installed on your system (>= Go 1.12) and the relevant VCS client (typically Git).
|
||||
This is not needed if you only operate on modules inside /themes or if you have vendored them via "hugo mod vendor".
|
||||
|
||||
On first run it creates a "package.hugo.json" in the project root if not already there. This file will be used as a template file
|
||||
with the base dependency set.
|
||||
` + commonUsage,
|
||||
|
||||
This set will be merged with all "package.hugo.json" files found in the dependency tree, picking the version closest to the project.
|
||||
|
||||
This command is marked as 'Experimental'. We think it's a great idea, so it's not likely to be
|
||||
removed from Hugo, but we need to test this out in "real life" to get a feel of it,
|
||||
so this may/will change in future versions of Hugo.
|
||||
`,
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
applyLocalFlagsBuildConfig(cmd, r)
|
||||
},
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
h, err := r.Hugo(flagsToCfg(cd, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return npm.Pack(h.BaseFs.ProjectSourceFs, h.BaseFs.AssetsWithDuplicatesPreserved.Fs)
|
||||
},
|
||||
},
|
||||
},
|
||||
RunE: nil,
|
||||
}
|
||||
|
||||
return &modCommands{
|
||||
commands: []simplecobra.Commander{
|
||||
&simpleCommand{
|
||||
name: "init",
|
||||
short: "Initialize this project as a Hugo Module",
|
||||
long: `Initialize this project as a Hugo Module.
|
||||
It will try to guess the module path, but you may help by passing it as an argument, e.g:
|
||||
|
||||
hugo mod init github.com/gohugoio/testshortcodes
|
||||
|
||||
Note that Hugo Modules supports multi-module projects, so you can initialize a Hugo Module
|
||||
inside a subfolder on GitHub, as one example.
|
||||
`,
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
applyLocalFlagsBuildConfig(cmd, r)
|
||||
},
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
h, err := r.getOrCreateHugo(flagsToCfg(cd, nil), true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
var initPath string
|
||||
if len(args) >= 1 {
|
||||
initPath = args[0]
|
||||
}
|
||||
c := h.Configs.ModulesClient
|
||||
if err := c.Init(initPath); err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
},
|
||||
},
|
||||
&simpleCommand{
|
||||
name: "verify",
|
||||
short: "Verify dependencies",
|
||||
long: `Verify checks that the dependencies of the current module, which are stored in a local downloaded source cache, have not been modified since being downloaded.`,
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
applyLocalFlagsBuildConfig(cmd, r)
|
||||
cmd.Flags().BoolVarP(&clean, "clean", "", false, "delete module cache for dependencies that fail verification")
|
||||
},
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
conf, err := r.ConfigFromProvider(configKey{counter: r.configVersionID.Load()}, flagsToCfg(cd, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
client := conf.configs.ModulesClient
|
||||
return client.Verify(clean)
|
||||
},
|
||||
},
|
||||
&simpleCommand{
|
||||
name: "graph",
|
||||
short: "Print a module dependency graph",
|
||||
long: `Print a module dependency graph with information about module status (disabled, vendored).
|
||||
Note that for vendored modules, that is the version listed and not the one from go.mod.
|
||||
`,
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
applyLocalFlagsBuildConfig(cmd, r)
|
||||
cmd.Flags().BoolVarP(&clean, "clean", "", false, "delete module cache for dependencies that fail verification")
|
||||
},
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
conf, err := r.ConfigFromProvider(configKey{counter: r.configVersionID.Load()}, flagsToCfg(cd, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
client := conf.configs.ModulesClient
|
||||
return client.Graph(os.Stdout)
|
||||
},
|
||||
},
|
||||
&simpleCommand{
|
||||
name: "clean",
|
||||
short: "Delete the Hugo Module cache for the current project",
|
||||
long: `Delete the Hugo Module cache for the current project.`,
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
applyLocalFlagsBuildConfig(cmd, r)
|
||||
cmd.Flags().StringVarP(&pattern, "pattern", "", "", `pattern matching module paths to clean (all if not set), e.g. "**hugo*"`)
|
||||
_ = cmd.RegisterFlagCompletionFunc("pattern", cobra.NoFileCompletions)
|
||||
cmd.Flags().BoolVarP(&all, "all", "", false, "clean entire module cache")
|
||||
},
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
h, err := r.Hugo(flagsToCfg(cd, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if all {
|
||||
modCache := h.ResourceSpec.FileCaches.ModulesCache()
|
||||
count, err := modCache.Prune(true)
|
||||
r.Printf("Deleted %d files from module cache.", count)
|
||||
return err
|
||||
}
|
||||
|
||||
return h.Configs.ModulesClient.Clean(pattern)
|
||||
},
|
||||
},
|
||||
&simpleCommand{
|
||||
name: "tidy",
|
||||
short: "Remove unused entries in go.mod and go.sum",
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
applyLocalFlagsBuildConfig(cmd, r)
|
||||
},
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
h, err := r.Hugo(flagsToCfg(cd, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return h.Configs.ModulesClient.Tidy()
|
||||
},
|
||||
},
|
||||
&simpleCommand{
|
||||
name: "vendor",
|
||||
short: "Vendor all module dependencies into the _vendor directory",
|
||||
long: `Vendor all module dependencies into the _vendor directory.
|
||||
If a module is vendored, that is where Hugo will look for it's dependencies.
|
||||
`,
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
applyLocalFlagsBuildConfig(cmd, r)
|
||||
},
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
h, err := r.Hugo(flagsToCfg(cd, nil))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return h.Configs.ModulesClient.Vendor()
|
||||
},
|
||||
},
|
||||
|
||||
&simpleCommand{
|
||||
name: "get",
|
||||
short: "Resolves dependencies in your current Hugo project",
|
||||
long: `
|
||||
Resolves dependencies in your current Hugo project.
|
||||
cmd.AddCommand(
|
||||
&cobra.Command{
|
||||
Use: "get",
|
||||
DisableFlagParsing: true,
|
||||
Short: "Resolves dependencies in your current Hugo Project.",
|
||||
Long: `
|
||||
Resolves dependencies in your current Hugo Project.
|
||||
|
||||
Some examples:
|
||||
|
||||
|
@ -223,122 +69,120 @@ Install a specific version:
|
|||
|
||||
hugo mod get github.com/gohugoio/testshortcodes@v0.3.0
|
||||
|
||||
Install the latest versions of all direct module dependencies:
|
||||
|
||||
hugo mod get
|
||||
hugo mod get ./... (recursive)
|
||||
|
||||
Install the latest versions of all module dependencies (direct and indirect):
|
||||
Install the latest versions of all module dependencies:
|
||||
|
||||
hugo mod get -u
|
||||
hugo mod get -u ./... (recursive)
|
||||
|
||||
Run "go help get" for more information. All flags available for "go get" is also relevant here.
|
||||
` + commonUsageMod,
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.DisableFlagParsing = true
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
},
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
` + commonUsage,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
return c.withModsClient(false, func(c *modules.Client) error {
|
||||
// We currently just pass on the flags we get to Go and
|
||||
// need to do the flag handling manually.
|
||||
if len(args) == 1 && (args[0] == "-h" || args[0] == "--help") {
|
||||
return errHelp
|
||||
if len(args) == 1 && args[0] == "-h" {
|
||||
return cmd.Help()
|
||||
}
|
||||
|
||||
var lastArg string
|
||||
if len(args) != 0 {
|
||||
lastArg = args[len(args)-1]
|
||||
}
|
||||
|
||||
if lastArg == "./..." {
|
||||
args = args[:len(args)-1]
|
||||
// Do a recursive update.
|
||||
dirname, err := os.Getwd()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Sanity chesimplecobra. We do recursive walking and want to avoid
|
||||
// accidents.
|
||||
if len(dirname) < 5 {
|
||||
return errors.New("must not be run from the file system root")
|
||||
}
|
||||
|
||||
filepath.Walk(dirname, func(path string, info os.FileInfo, err error) error {
|
||||
if info.IsDir() {
|
||||
return nil
|
||||
}
|
||||
if info.Name() == "go.mod" {
|
||||
// Found a module.
|
||||
dir := filepath.Dir(path)
|
||||
|
||||
cfg := config.New()
|
||||
cfg.Set("workingDir", dir)
|
||||
conf, err := r.ConfigFromProvider(configKey{counter: r.configVersionID.Add(1)}, flagsToCfg(cd, cfg))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
r.Println("Update module in", conf.configs.Base.WorkingDir)
|
||||
client := conf.configs.ModulesClient
|
||||
return client.Get(args...)
|
||||
|
||||
}
|
||||
return nil
|
||||
return c.Get(args...)
|
||||
})
|
||||
return nil
|
||||
} else {
|
||||
conf, err := r.ConfigFromProvider(configKey{counter: r.configVersionID.Load()}, flagsToCfg(cd, nil))
|
||||
},
|
||||
},
|
||||
&cobra.Command{
|
||||
Use: "graph",
|
||||
Short: "Print a module dependency graph.",
|
||||
Long: `Print a module dependency graph with information about module status (disabled, vendored).
|
||||
Note that for vendored modules, that is the version listed and not the one from go.mod.
|
||||
`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
return c.withModsClient(true, func(c *modules.Client) error {
|
||||
return c.Graph(os.Stdout)
|
||||
})
|
||||
},
|
||||
},
|
||||
&cobra.Command{
|
||||
Use: "init",
|
||||
Short: "Initialize this project as a Hugo Module.",
|
||||
Long: `Initialize this project as a Hugo Module.
|
||||
It will try to guess the module path, but you may help by passing it as an argument, e.g:
|
||||
|
||||
hugo mod init github.com/gohugoio/testshortcodes
|
||||
|
||||
Note that Hugo Modules supports multi-module projects, so you can initialize a Hugo Module
|
||||
inside a subfolder on GitHub, as one example.
|
||||
`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
var path string
|
||||
if len(args) >= 1 {
|
||||
path = args[0]
|
||||
}
|
||||
return c.withModsClient(false, func(c *modules.Client) error {
|
||||
return c.Init(path)
|
||||
})
|
||||
},
|
||||
},
|
||||
&cobra.Command{
|
||||
Use: "vendor",
|
||||
Short: "Vendor all module dependencies into the _vendor directory.",
|
||||
Long: `Vendor all module dependencies into the _vendor directory.
|
||||
|
||||
If a module is vendored, that is where Hugo will look for it's dependencies.
|
||||
`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
return c.withModsClient(true, func(c *modules.Client) error {
|
||||
return c.Vendor()
|
||||
})
|
||||
},
|
||||
},
|
||||
&cobra.Command{
|
||||
Use: "tidy",
|
||||
Short: "Remove unused entries in go.mod and go.sum.",
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
return c.withModsClient(true, func(c *modules.Client) error {
|
||||
return c.Tidy()
|
||||
})
|
||||
},
|
||||
},
|
||||
&cobra.Command{
|
||||
Use: "clean",
|
||||
Short: "Delete the entire Hugo Module cache.",
|
||||
Long: `Delete the entire Hugo Module cache.
|
||||
|
||||
Note that after you run this command, all of your dependencies will be re-downloaded next time you run "hugo".
|
||||
|
||||
Also note that if you configure a positive maxAge for the "modules" file cache, it will also be cleaned as part of "hugo --gc".
|
||||
|
||||
`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
com, err := c.initConfig(true)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
client := conf.configs.ModulesClient
|
||||
return client.Get(args...)
|
||||
}
|
||||
|
||||
_, err = com.hugo().FileCaches.ModulesCache().Prune(true)
|
||||
return err
|
||||
|
||||
},
|
||||
},
|
||||
npmCommand,
|
||||
},
|
||||
}
|
||||
)
|
||||
|
||||
c.baseBuilderCmd = b.newBuilderCmd(cmd)
|
||||
|
||||
return c
|
||||
|
||||
}
|
||||
|
||||
type modCommands struct {
|
||||
r *rootCommand
|
||||
|
||||
commands []simplecobra.Commander
|
||||
}
|
||||
|
||||
func (c *modCommands) Commands() []simplecobra.Commander {
|
||||
return c.commands
|
||||
}
|
||||
|
||||
func (c *modCommands) Name() string {
|
||||
return "mod"
|
||||
}
|
||||
|
||||
func (c *modCommands) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
|
||||
_, err := c.r.ConfigFromProvider(configKey{counter: c.r.configVersionID.Load()}, nil)
|
||||
func (c *modCmd) withModsClient(failOnMissingConfig bool, f func(*modules.Client) error) error {
|
||||
com, err := c.initConfig(failOnMissingConfig)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
// config := conf.configs.Base
|
||||
|
||||
return nil
|
||||
return f(com.hugo().ModulesClient)
|
||||
}
|
||||
|
||||
func (c *modCommands) Init(cd *simplecobra.Commandeer) error {
|
||||
cmd := cd.CobraCommand
|
||||
cmd.Short = "Manage modules"
|
||||
cmd.Long = `Various helpers to help manage the modules in your project's dependency graph.
|
||||
Most operations here requires a Go version installed on your system (>= Go 1.12) and the relevant VCS client (typically Git).
|
||||
This is not needed if you only operate on modules inside /themes or if you have vendored them via "hugo mod vendor".
|
||||
|
||||
` + commonUsageMod
|
||||
cmd.RunE = nil
|
||||
return nil
|
||||
func (c *modCmd) initConfig(failOnNoConfig bool) (*commandeer, error) {
|
||||
com, err := initializeConfig(failOnNoConfig, false, &c.hugoBuilderCommon, c, nil)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
func (c *modCommands) PreRun(cd, runner *simplecobra.Commandeer) error {
|
||||
c.r = cd.Root.Command.(*rootCommand)
|
||||
return nil
|
||||
return com, nil
|
||||
}
|
||||
|
|
264
commands/new.go
264
commands/new.go
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -15,33 +15,32 @@ package commands
|
|||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"os"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/bep/simplecobra"
|
||||
"github.com/gohugoio/hugo/common/paths"
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/gohugoio/hugo/create"
|
||||
"github.com/gohugoio/hugo/create/skeletons"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/hugolib"
|
||||
"github.com/spf13/afero"
|
||||
"github.com/spf13/cobra"
|
||||
jww "github.com/spf13/jwalterweatherman"
|
||||
)
|
||||
|
||||
func newNewCommand() *newCommand {
|
||||
var (
|
||||
force bool
|
||||
var _ cmder = (*newCmd)(nil)
|
||||
|
||||
type newCmd struct {
|
||||
contentEditor string
|
||||
contentType string
|
||||
format string
|
||||
)
|
||||
|
||||
var c *newCommand
|
||||
c = &newCommand{
|
||||
commands: []simplecobra.Commander{
|
||||
&simpleCommand{
|
||||
name: "content",
|
||||
use: "content [path]",
|
||||
short: "Create new content",
|
||||
long: `Create a new content file and automatically set the date and title.
|
||||
*baseBuilderCmd
|
||||
}
|
||||
|
||||
func (b *commandsBuilder) newNewCmd() *newCmd {
|
||||
cmd := &cobra.Command{
|
||||
Use: "new [path]",
|
||||
Short: "Create new content for your site",
|
||||
Long: `Create a new content file and automatically set the date and title.
|
||||
It will guess which kind of file to create based on the path provided.
|
||||
|
||||
You can also specify the kind with ` + "`-k KIND`" + `.
|
||||
|
@ -49,179 +48,90 @@ You can also specify the kind with ` + "`-k KIND`" + `.
|
|||
If archetypes are provided in your theme or site, they will be used.
|
||||
|
||||
Ensure you run this within the root directory of your site.`,
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
}
|
||||
|
||||
cc := &newCmd{baseBuilderCmd: b.newBuilderCmd(cmd)}
|
||||
|
||||
cmd.Flags().StringVarP(&cc.contentType, "kind", "k", "", "content type to create")
|
||||
cmd.Flags().StringVar(&cc.contentEditor, "editor", "", "edit new content with this editor, if provided")
|
||||
|
||||
cmd.AddCommand(newNewSiteCmd().getCommand())
|
||||
cmd.AddCommand(newNewThemeCmd().getCommand())
|
||||
|
||||
cmd.RunE = cc.newContent
|
||||
|
||||
return cc
|
||||
}
|
||||
|
||||
func (n *newCmd) newContent(cmd *cobra.Command, args []string) error {
|
||||
cfgInit := func(c *commandeer) error {
|
||||
if cmd.Flags().Changed("editor") {
|
||||
c.Set("newContentEditor", n.contentEditor)
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
c, err := initializeConfig(true, false, &n.hugoBuilderCommon, n, cfgInit)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(args) < 1 {
|
||||
return newUserError("path needs to be provided")
|
||||
}
|
||||
h, err := r.Hugo(flagsToCfg(cd, nil))
|
||||
|
||||
createPath := args[0]
|
||||
|
||||
var kind string
|
||||
|
||||
createPath, kind = newContentPathSection(c.hugo(), createPath)
|
||||
|
||||
if n.contentType != "" {
|
||||
kind = n.contentType
|
||||
}
|
||||
|
||||
return create.NewContent(c.hugo(), kind, createPath)
|
||||
}
|
||||
|
||||
func mkdir(x ...string) {
|
||||
p := filepath.Join(x...)
|
||||
|
||||
err := os.MkdirAll(p, 0777) // before umask
|
||||
if err != nil {
|
||||
return err
|
||||
jww.FATAL.Fatalln(err)
|
||||
}
|
||||
return create.NewContent(h, contentType, args[0], force)
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||
if len(args) != 0 {
|
||||
return []string{}, cobra.ShellCompDirectiveNoFileComp
|
||||
}
|
||||
return []string{}, cobra.ShellCompDirectiveNoFileComp | cobra.ShellCompDirectiveFilterDirs
|
||||
}
|
||||
cmd.Flags().StringVarP(&contentType, "kind", "k", "", "content type to create")
|
||||
cmd.Flags().String("editor", "", "edit new content with this editor, if provided")
|
||||
_ = cmd.RegisterFlagCompletionFunc("editor", cobra.NoFileCompletions)
|
||||
cmd.Flags().BoolVarP(&force, "force", "f", false, "overwrite file if it already exists")
|
||||
applyLocalFlagsBuildConfig(cmd, r)
|
||||
},
|
||||
},
|
||||
&simpleCommand{
|
||||
name: "site",
|
||||
use: "site [path]",
|
||||
short: "Create a new site",
|
||||
long: `Create a new site at the specified path.`,
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
if len(args) < 1 {
|
||||
return newUserError("path needs to be provided")
|
||||
}
|
||||
createpath, err := filepath.Abs(filepath.Clean(args[0]))
|
||||
|
||||
func touchFile(fs afero.Fs, x ...string) {
|
||||
inpath := filepath.Join(x...)
|
||||
mkdir(filepath.Dir(inpath))
|
||||
err := helpers.WriteToDisk(inpath, bytes.NewReader([]byte{}), fs)
|
||||
if err != nil {
|
||||
return err
|
||||
jww.FATAL.Fatalln(err)
|
||||
}
|
||||
}
|
||||
|
||||
cfg := config.New()
|
||||
cfg.Set("workingDir", createpath)
|
||||
cfg.Set("publishDir", "public")
|
||||
func newContentPathSection(h *hugolib.HugoSites, path string) (string, string) {
|
||||
// Forward slashes is used in all examples. Convert if needed.
|
||||
// Issue #1133
|
||||
createpath := filepath.FromSlash(path)
|
||||
|
||||
conf, err := r.ConfigFromProvider(configKey{counter: r.configVersionID.Load()}, flagsToCfg(cd, cfg))
|
||||
if err != nil {
|
||||
return err
|
||||
if h != nil {
|
||||
for _, dir := range h.BaseFs.Content.Dirs {
|
||||
createpath = strings.TrimPrefix(createpath, dir.Meta().Filename())
|
||||
}
|
||||
sourceFs := conf.fs.Source
|
||||
|
||||
err = skeletons.CreateSite(createpath, sourceFs, force, format)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r.Printf("Congratulations! Your new Hugo site was created in %s.\n\n", createpath)
|
||||
r.Println(c.newSiteNextStepsText(createpath, format))
|
||||
|
||||
return nil
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||
if len(args) != 0 {
|
||||
return []string{}, cobra.ShellCompDirectiveNoFileComp
|
||||
}
|
||||
return []string{}, cobra.ShellCompDirectiveNoFileComp | cobra.ShellCompDirectiveFilterDirs
|
||||
}
|
||||
cmd.Flags().BoolVarP(&force, "force", "f", false, "init inside non-empty directory")
|
||||
cmd.Flags().StringVar(&format, "format", "toml", "preferred file format (toml, yaml or json)")
|
||||
_ = cmd.RegisterFlagCompletionFunc("format", cobra.FixedCompletions([]string{"toml", "yaml", "json"}, cobra.ShellCompDirectiveNoFileComp))
|
||||
},
|
||||
},
|
||||
&simpleCommand{
|
||||
name: "theme",
|
||||
use: "theme [name]",
|
||||
short: "Create a new theme",
|
||||
long: `Create a new theme with the specified name in the ./themes directory.
|
||||
This generates a functional theme including template examples and sample content.`,
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
if len(args) < 1 {
|
||||
return newUserError("theme name needs to be provided")
|
||||
}
|
||||
cfg := config.New()
|
||||
cfg.Set("publishDir", "public")
|
||||
|
||||
conf, err := r.ConfigFromProvider(configKey{counter: r.configVersionID.Load()}, flagsToCfg(cd, cfg))
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
sourceFs := conf.fs.Source
|
||||
createpath := paths.AbsPathify(conf.configs.Base.WorkingDir, filepath.Join(conf.configs.Base.ThemesDir, args[0]))
|
||||
r.Println("Creating new theme in", createpath)
|
||||
|
||||
err = skeletons.CreateTheme(createpath, sourceFs, format)
|
||||
if err != nil {
|
||||
return err
|
||||
var section string
|
||||
// assume the first directory is the section (kind)
|
||||
if strings.Contains(createpath[1:], helpers.FilePathSeparator) {
|
||||
parts := strings.Split(strings.TrimPrefix(createpath, helpers.FilePathSeparator), helpers.FilePathSeparator)
|
||||
if len(parts) > 0 {
|
||||
section = parts[0]
|
||||
}
|
||||
|
||||
return nil
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.ValidArgsFunction = func(cmd *cobra.Command, args []string, toComplete string) ([]string, cobra.ShellCompDirective) {
|
||||
if len(args) != 0 {
|
||||
return []string{}, cobra.ShellCompDirectiveNoFileComp
|
||||
}
|
||||
return []string{}, cobra.ShellCompDirectiveNoFileComp | cobra.ShellCompDirectiveFilterDirs
|
||||
}
|
||||
cmd.Flags().StringVar(&format, "format", "toml", "preferred file format (toml, yaml or json)")
|
||||
_ = cmd.RegisterFlagCompletionFunc("format", cobra.FixedCompletions([]string{"toml", "yaml", "json"}, cobra.ShellCompDirectiveNoFileComp))
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
return c
|
||||
}
|
||||
|
||||
type newCommand struct {
|
||||
rootCmd *rootCommand
|
||||
|
||||
commands []simplecobra.Commander
|
||||
}
|
||||
|
||||
func (c *newCommand) Commands() []simplecobra.Commander {
|
||||
return c.commands
|
||||
}
|
||||
|
||||
func (c *newCommand) Name() string {
|
||||
return "new"
|
||||
}
|
||||
|
||||
func (c *newCommand) Run(ctx context.Context, cd *simplecobra.Commandeer, args []string) error {
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *newCommand) Init(cd *simplecobra.Commandeer) error {
|
||||
cmd := cd.CobraCommand
|
||||
cmd.Short = "Create new content"
|
||||
cmd.Long = `Create a new content file and automatically set the date and title.
|
||||
It will guess which kind of file to create based on the path provided.
|
||||
|
||||
You can also specify the kind with ` + "`-k KIND`" + `.
|
||||
|
||||
If archetypes are provided in your theme or site, they will be used.
|
||||
|
||||
Ensure you run this within the root directory of your site.`
|
||||
|
||||
cmd.RunE = nil
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *newCommand) PreRun(cd, runner *simplecobra.Commandeer) error {
|
||||
c.rootCmd = cd.Root.Command.(*rootCommand)
|
||||
return nil
|
||||
}
|
||||
|
||||
func (c *newCommand) newSiteNextStepsText(path string, format string) string {
|
||||
format = strings.ToLower(format)
|
||||
var nextStepsText bytes.Buffer
|
||||
|
||||
nextStepsText.WriteString(`Just a few more steps...
|
||||
|
||||
1. Change the current directory to ` + path + `.
|
||||
2. Create or install a theme:
|
||||
- Create a new theme with the command "hugo new theme <THEMENAME>"
|
||||
- Or, install a theme from https://themes.gohugo.io/
|
||||
3. Edit hugo.` + format + `, setting the "theme" property to the theme name.
|
||||
4. Create new content with the command "hugo new content `)
|
||||
|
||||
nextStepsText.WriteString(filepath.Join("<SECTIONNAME>", "<FILENAME>.<FORMAT>"))
|
||||
|
||||
nextStepsText.WriteString(`".
|
||||
5. Start the embedded web server with the command "hugo server --buildDrafts".
|
||||
|
||||
See documentation at https://gohugo.io/.`)
|
||||
|
||||
return nextStepsText.String()
|
||||
return createpath, section
|
||||
}
|
||||
|
|
134
commands/new_content_test.go
Normal file
134
commands/new_content_test.go
Normal file
|
@ -0,0 +1,134 @@
|
|||
// Copyright 2019 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"path/filepath"
|
||||
"testing"
|
||||
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/spf13/viper"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
)
|
||||
|
||||
// Issue #1133
|
||||
func TestNewContentPathSectionWithForwardSlashes(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
p, s := newContentPathSection(nil, "/post/new.md")
|
||||
c.Assert(p, qt.Equals, filepath.FromSlash("/post/new.md"))
|
||||
c.Assert(s, qt.Equals, "post")
|
||||
}
|
||||
|
||||
func checkNewSiteInited(fs *hugofs.Fs, basepath string, t *testing.T) {
|
||||
c := qt.New(t)
|
||||
paths := []string{
|
||||
filepath.Join(basepath, "layouts"),
|
||||
filepath.Join(basepath, "content"),
|
||||
filepath.Join(basepath, "archetypes"),
|
||||
filepath.Join(basepath, "static"),
|
||||
filepath.Join(basepath, "data"),
|
||||
filepath.Join(basepath, "config.toml"),
|
||||
}
|
||||
|
||||
for _, path := range paths {
|
||||
_, err := fs.Source.Stat(path)
|
||||
c.Assert(err, qt.IsNil)
|
||||
}
|
||||
}
|
||||
|
||||
func TestDoNewSite(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
n := newNewSiteCmd()
|
||||
basepath := filepath.Join("base", "blog")
|
||||
_, fs := newTestCfg()
|
||||
|
||||
c.Assert(n.doNewSite(fs, basepath, false), qt.IsNil)
|
||||
|
||||
checkNewSiteInited(fs, basepath, t)
|
||||
}
|
||||
|
||||
func TestDoNewSite_noerror_base_exists_but_empty(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
basepath := filepath.Join("base", "blog")
|
||||
_, fs := newTestCfg()
|
||||
n := newNewSiteCmd()
|
||||
|
||||
c.Assert(fs.Source.MkdirAll(basepath, 0777), qt.IsNil)
|
||||
|
||||
c.Assert(n.doNewSite(fs, basepath, false), qt.IsNil)
|
||||
}
|
||||
|
||||
func TestDoNewSite_error_base_exists(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
basepath := filepath.Join("base", "blog")
|
||||
_, fs := newTestCfg()
|
||||
n := newNewSiteCmd()
|
||||
|
||||
c.Assert(fs.Source.MkdirAll(basepath, 0777), qt.IsNil)
|
||||
_, err := fs.Source.Create(filepath.Join(basepath, "foo"))
|
||||
c.Assert(err, qt.IsNil)
|
||||
// Since the directory already exists and isn't empty, expect an error
|
||||
c.Assert(n.doNewSite(fs, basepath, false), qt.Not(qt.IsNil))
|
||||
|
||||
}
|
||||
|
||||
func TestDoNewSite_force_empty_dir(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
basepath := filepath.Join("base", "blog")
|
||||
_, fs := newTestCfg()
|
||||
n := newNewSiteCmd()
|
||||
|
||||
c.Assert(fs.Source.MkdirAll(basepath, 0777), qt.IsNil)
|
||||
c.Assert(n.doNewSite(fs, basepath, true), qt.IsNil)
|
||||
|
||||
checkNewSiteInited(fs, basepath, t)
|
||||
}
|
||||
|
||||
func TestDoNewSite_error_force_dir_inside_exists(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
basepath := filepath.Join("base", "blog")
|
||||
_, fs := newTestCfg()
|
||||
n := newNewSiteCmd()
|
||||
|
||||
contentPath := filepath.Join(basepath, "content")
|
||||
|
||||
c.Assert(fs.Source.MkdirAll(contentPath, 0777), qt.IsNil)
|
||||
c.Assert(n.doNewSite(fs, basepath, true), qt.Not(qt.IsNil))
|
||||
}
|
||||
|
||||
func TestDoNewSite_error_force_config_inside_exists(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
basepath := filepath.Join("base", "blog")
|
||||
_, fs := newTestCfg()
|
||||
n := newNewSiteCmd()
|
||||
|
||||
configPath := filepath.Join(basepath, "config.toml")
|
||||
c.Assert(fs.Source.MkdirAll(basepath, 0777), qt.IsNil)
|
||||
_, err := fs.Source.Create(configPath)
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
c.Assert(n.doNewSite(fs, basepath, true), qt.Not(qt.IsNil))
|
||||
}
|
||||
|
||||
func newTestCfg() (*viper.Viper, *hugofs.Fs) {
|
||||
|
||||
v := viper.New()
|
||||
fs := hugofs.NewMem(v)
|
||||
|
||||
v.SetFs(fs.Source)
|
||||
|
||||
return v, fs
|
||||
|
||||
}
|
165
commands/new_site.go
Normal file
165
commands/new_site.go
Normal file
|
@ -0,0 +1,165 @@
|
|||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/gohugoio/hugo/parser/metadecoders"
|
||||
|
||||
_errors "github.com/pkg/errors"
|
||||
|
||||
"github.com/gohugoio/hugo/create"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/gohugoio/hugo/parser"
|
||||
"github.com/spf13/cobra"
|
||||
jww "github.com/spf13/jwalterweatherman"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
var _ cmder = (*newSiteCmd)(nil)
|
||||
|
||||
type newSiteCmd struct {
|
||||
configFormat string
|
||||
|
||||
*baseCmd
|
||||
}
|
||||
|
||||
func newNewSiteCmd() *newSiteCmd {
|
||||
ccmd := &newSiteCmd{}
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "site [path]",
|
||||
Short: "Create a new site (skeleton)",
|
||||
Long: `Create a new site in the provided directory.
|
||||
The new site will have the correct structure, but no content or theme yet.
|
||||
Use ` + "`hugo new [contentPath]`" + ` to create new content.`,
|
||||
RunE: ccmd.newSite,
|
||||
}
|
||||
|
||||
cmd.Flags().StringVarP(&ccmd.configFormat, "format", "f", "toml", "config & frontmatter format")
|
||||
cmd.Flags().Bool("force", false, "init inside non-empty directory")
|
||||
|
||||
ccmd.baseCmd = newBaseCmd(cmd)
|
||||
|
||||
return ccmd
|
||||
|
||||
}
|
||||
|
||||
func (n *newSiteCmd) doNewSite(fs *hugofs.Fs, basepath string, force bool) error {
|
||||
archeTypePath := filepath.Join(basepath, "archetypes")
|
||||
dirs := []string{
|
||||
filepath.Join(basepath, "layouts"),
|
||||
filepath.Join(basepath, "content"),
|
||||
archeTypePath,
|
||||
filepath.Join(basepath, "static"),
|
||||
filepath.Join(basepath, "data"),
|
||||
filepath.Join(basepath, "themes"),
|
||||
}
|
||||
|
||||
if exists, _ := helpers.Exists(basepath, fs.Source); exists {
|
||||
if isDir, _ := helpers.IsDir(basepath, fs.Source); !isDir {
|
||||
return errors.New(basepath + " already exists but not a directory")
|
||||
}
|
||||
|
||||
isEmpty, _ := helpers.IsEmpty(basepath, fs.Source)
|
||||
|
||||
switch {
|
||||
case !isEmpty && !force:
|
||||
return errors.New(basepath + " already exists and is not empty. See --force.")
|
||||
|
||||
case !isEmpty && force:
|
||||
all := append(dirs, filepath.Join(basepath, "config."+n.configFormat))
|
||||
for _, path := range all {
|
||||
if exists, _ := helpers.Exists(path, fs.Source); exists {
|
||||
return errors.New(path + " already exists")
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
for _, dir := range dirs {
|
||||
if err := fs.Source.MkdirAll(dir, 0777); err != nil {
|
||||
return _errors.Wrap(err, "Failed to create dir")
|
||||
}
|
||||
}
|
||||
|
||||
createConfig(fs, basepath, n.configFormat)
|
||||
|
||||
// Create a default archetype file.
|
||||
helpers.SafeWriteToDisk(filepath.Join(archeTypePath, "default.md"),
|
||||
strings.NewReader(create.ArchetypeTemplateTemplate), fs.Source)
|
||||
|
||||
jww.FEEDBACK.Printf("Congratulations! Your new Hugo site is created in %s.\n\n", basepath)
|
||||
jww.FEEDBACK.Println(nextStepsText())
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// newSite creates a new Hugo site and initializes a structured Hugo directory.
|
||||
func (n *newSiteCmd) newSite(cmd *cobra.Command, args []string) error {
|
||||
if len(args) < 1 {
|
||||
return newUserError("path needs to be provided")
|
||||
}
|
||||
|
||||
createpath, err := filepath.Abs(filepath.Clean(args[0]))
|
||||
if err != nil {
|
||||
return newUserError(err)
|
||||
}
|
||||
|
||||
forceNew, _ := cmd.Flags().GetBool("force")
|
||||
|
||||
return n.doNewSite(hugofs.NewDefault(viper.New()), createpath, forceNew)
|
||||
}
|
||||
|
||||
func createConfig(fs *hugofs.Fs, inpath string, kind string) (err error) {
|
||||
in := map[string]string{
|
||||
"baseURL": "http://example.org/",
|
||||
"title": "My New Hugo Site",
|
||||
"languageCode": "en-us",
|
||||
}
|
||||
|
||||
var buf bytes.Buffer
|
||||
err = parser.InterfaceToConfig(in, metadecoders.FormatFromString(kind), &buf)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return helpers.WriteToDisk(filepath.Join(inpath, "config."+kind), &buf, fs.Source)
|
||||
}
|
||||
|
||||
func nextStepsText() string {
|
||||
var nextStepsText bytes.Buffer
|
||||
|
||||
nextStepsText.WriteString(`Just a few more steps and you're ready to go:
|
||||
|
||||
1. Download a theme into the same-named folder.
|
||||
Choose a theme from https://themes.gohugo.io/ or
|
||||
create your own with the "hugo new theme <THEMENAME>" command.
|
||||
2. Perhaps you want to add some content. You can add single files
|
||||
with "hugo new `)
|
||||
|
||||
nextStepsText.WriteString(filepath.Join("<SECTIONNAME>", "<FILENAME>.<FORMAT>"))
|
||||
|
||||
nextStepsText.WriteString(`".
|
||||
3. Start the built-in live server via "hugo server".
|
||||
|
||||
Visit https://gohugo.io/ for quickstart guide and full documentation.`)
|
||||
|
||||
return nextStepsText.String()
|
||||
}
|
179
commands/new_theme.go
Normal file
179
commands/new_theme.go
Normal file
|
@ -0,0 +1,179 @@
|
|||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/gohugoio/hugo/hugofs"
|
||||
"github.com/spf13/cobra"
|
||||
jww "github.com/spf13/jwalterweatherman"
|
||||
)
|
||||
|
||||
var _ cmder = (*newThemeCmd)(nil)
|
||||
|
||||
type newThemeCmd struct {
|
||||
*baseCmd
|
||||
hugoBuilderCommon
|
||||
}
|
||||
|
||||
func newNewThemeCmd() *newThemeCmd {
|
||||
ccmd := &newThemeCmd{baseCmd: newBaseCmd(nil)}
|
||||
|
||||
cmd := &cobra.Command{
|
||||
Use: "theme [name]",
|
||||
Short: "Create a new theme",
|
||||
Long: `Create a new theme (skeleton) called [name] in the current directory.
|
||||
New theme is a skeleton. Please add content to the touched files. Add your
|
||||
name to the copyright line in the license and adjust the theme.toml file
|
||||
as you see fit.`,
|
||||
RunE: ccmd.newTheme,
|
||||
}
|
||||
|
||||
ccmd.cmd = cmd
|
||||
|
||||
return ccmd
|
||||
}
|
||||
|
||||
// newTheme creates a new Hugo theme template
|
||||
func (n *newThemeCmd) newTheme(cmd *cobra.Command, args []string) error {
|
||||
c, err := initializeConfig(false, false, &n.hugoBuilderCommon, n, nil)
|
||||
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if len(args) < 1 {
|
||||
return newUserError("theme name needs to be provided")
|
||||
}
|
||||
|
||||
createpath := c.hugo().PathSpec.AbsPathify(filepath.Join(c.Cfg.GetString("themesDir"), args[0]))
|
||||
jww.FEEDBACK.Println("Creating theme at", createpath)
|
||||
|
||||
cfg := c.DepsCfg
|
||||
|
||||
if x, _ := helpers.Exists(createpath, cfg.Fs.Source); x {
|
||||
return errors.New(createpath + " already exists")
|
||||
}
|
||||
|
||||
mkdir(createpath, "layouts", "_default")
|
||||
mkdir(createpath, "layouts", "partials")
|
||||
|
||||
touchFile(cfg.Fs.Source, createpath, "layouts", "index.html")
|
||||
touchFile(cfg.Fs.Source, createpath, "layouts", "404.html")
|
||||
touchFile(cfg.Fs.Source, createpath, "layouts", "_default", "list.html")
|
||||
touchFile(cfg.Fs.Source, createpath, "layouts", "_default", "single.html")
|
||||
|
||||
baseofDefault := []byte(`<!DOCTYPE html>
|
||||
<html>
|
||||
{{- partial "head.html" . -}}
|
||||
<body>
|
||||
{{- partial "header.html" . -}}
|
||||
<div id="content">
|
||||
{{- block "main" . }}{{- end }}
|
||||
</div>
|
||||
{{- partial "footer.html" . -}}
|
||||
</body>
|
||||
</html>
|
||||
`)
|
||||
err = helpers.WriteToDisk(filepath.Join(createpath, "layouts", "_default", "baseof.html"), bytes.NewReader(baseofDefault), cfg.Fs.Source)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
touchFile(cfg.Fs.Source, createpath, "layouts", "partials", "head.html")
|
||||
touchFile(cfg.Fs.Source, createpath, "layouts", "partials", "header.html")
|
||||
touchFile(cfg.Fs.Source, createpath, "layouts", "partials", "footer.html")
|
||||
|
||||
mkdir(createpath, "archetypes")
|
||||
|
||||
archDefault := []byte("+++\n+++\n")
|
||||
|
||||
err = helpers.WriteToDisk(filepath.Join(createpath, "archetypes", "default.md"), bytes.NewReader(archDefault), cfg.Fs.Source)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
mkdir(createpath, "static", "js")
|
||||
mkdir(createpath, "static", "css")
|
||||
|
||||
by := []byte(`The MIT License (MIT)
|
||||
|
||||
Copyright (c) ` + time.Now().Format("2006") + ` YOUR_NAME_HERE
|
||||
|
||||
Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||
this software and associated documentation files (the "Software"), to deal in
|
||||
the Software without restriction, including without limitation the rights to
|
||||
use, copy, modify, merge, publish, distribute, sublicense, and/or sell copies of
|
||||
the Software, and to permit persons to whom the Software is furnished to do so,
|
||||
subject to the following conditions:
|
||||
|
||||
The above copyright notice and this permission notice shall be included in all
|
||||
copies or substantial portions of the Software.
|
||||
|
||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS
|
||||
FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR
|
||||
COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER
|
||||
IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN
|
||||
CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||
`)
|
||||
|
||||
err = helpers.WriteToDisk(filepath.Join(createpath, "LICENSE"), bytes.NewReader(by), cfg.Fs.Source)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
n.createThemeMD(cfg.Fs, createpath)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func (n *newThemeCmd) createThemeMD(fs *hugofs.Fs, inpath string) (err error) {
|
||||
|
||||
by := []byte(`# theme.toml template for a Hugo theme
|
||||
# See https://github.com/gohugoio/hugoThemes#themetoml for an example
|
||||
|
||||
name = "` + strings.Title(helpers.MakeTitle(filepath.Base(inpath))) + `"
|
||||
license = "MIT"
|
||||
licenselink = "https://github.com/yourname/yourtheme/blob/master/LICENSE"
|
||||
description = ""
|
||||
homepage = "http://example.com/"
|
||||
tags = []
|
||||
features = []
|
||||
min_version = "0.41"
|
||||
|
||||
[author]
|
||||
name = ""
|
||||
homepage = ""
|
||||
|
||||
# If porting an existing theme
|
||||
[original]
|
||||
name = ""
|
||||
homepage = ""
|
||||
repo = ""
|
||||
`)
|
||||
|
||||
err = helpers.WriteToDisk(filepath.Join(inpath, "theme.toml"), bytes.NewReader(by), fs.Source)
|
||||
if err != nil {
|
||||
return
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
|
@ -1,4 +1,6 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
// +build release
|
||||
|
||||
// Copyright 2017-present The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -14,40 +16,57 @@
|
|||
package commands
|
||||
|
||||
import (
|
||||
"context"
|
||||
"errors"
|
||||
|
||||
"github.com/bep/simplecobra"
|
||||
"github.com/gohugoio/hugo/config"
|
||||
"github.com/gohugoio/hugo/releaser"
|
||||
"github.com/spf13/cobra"
|
||||
)
|
||||
|
||||
var _ cmder = (*releaseCommandeer)(nil)
|
||||
|
||||
type releaseCommandeer struct {
|
||||
cmd *cobra.Command
|
||||
|
||||
version string
|
||||
|
||||
skipPublish bool
|
||||
try bool
|
||||
}
|
||||
|
||||
func createReleaser() cmder {
|
||||
// Note: This is a command only meant for internal use and must be run
|
||||
// via "go run -tags release main.go release" on the actual code base that is in the release.
|
||||
func newReleaseCommand() simplecobra.Commander {
|
||||
var (
|
||||
step int
|
||||
skipPush bool
|
||||
try bool
|
||||
)
|
||||
|
||||
return &simpleCommand{
|
||||
name: "release",
|
||||
short: "Release a new version of Hugo",
|
||||
run: func(ctx context.Context, cd *simplecobra.Commandeer, r *rootCommand, args []string) error {
|
||||
rel, err := releaser.New(skipPush, try, step)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
return rel.Run()
|
||||
},
|
||||
withc: func(cmd *cobra.Command, r *rootCommand) {
|
||||
cmd.Hidden = true
|
||||
cmd.ValidArgsFunction = cobra.NoFileCompletions
|
||||
cmd.PersistentFlags().BoolVarP(&skipPush, "skip-push", "", false, "skip pushing to remote")
|
||||
cmd.PersistentFlags().BoolVarP(&try, "try", "", false, "no changes")
|
||||
cmd.PersistentFlags().IntVarP(&step, "step", "", 0, "step to run (1: set new version 2: prepare next dev version)")
|
||||
_ = cmd.RegisterFlagCompletionFunc("step", cobra.FixedCompletions([]string{"1", "2"}, cobra.ShellCompDirectiveNoFileComp))
|
||||
r := &releaseCommandeer{
|
||||
cmd: &cobra.Command{
|
||||
Use: "release",
|
||||
Short: "Release a new version of Hugo.",
|
||||
Hidden: true,
|
||||
},
|
||||
}
|
||||
|
||||
r.cmd.RunE = func(cmd *cobra.Command, args []string) error {
|
||||
return r.release()
|
||||
}
|
||||
|
||||
r.cmd.PersistentFlags().StringVarP(&r.version, "rel", "r", "", "new release version, i.e. 0.25.1")
|
||||
r.cmd.PersistentFlags().BoolVarP(&r.skipPublish, "skip-publish", "", false, "skip all publishing pipes of the release")
|
||||
r.cmd.PersistentFlags().BoolVarP(&r.try, "try", "", false, "simulate a release, i.e. no changes")
|
||||
|
||||
return r
|
||||
}
|
||||
|
||||
func (c *releaseCommandeer) getCommand() *cobra.Command {
|
||||
return c.cmd
|
||||
}
|
||||
|
||||
func (c *releaseCommandeer) flagsToConfig(cfg config.Provider) {
|
||||
|
||||
}
|
||||
|
||||
func (r *releaseCommandeer) release() error {
|
||||
if r.version == "" {
|
||||
return errors.New("must set the --rel flag to the relevant version number")
|
||||
}
|
||||
return releaser.New(r.version, r.skipPublish, r.try).Run()
|
||||
}
|
||||
|
|
20
commands/release_noop.go
Normal file
20
commands/release_noop.go
Normal file
|
@ -0,0 +1,20 @@
|
|||
// +build !release
|
||||
|
||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
func createReleaser() cmder {
|
||||
return &nilCommand{}
|
||||
}
|
1454
commands/server.go
1454
commands/server.go
File diff suppressed because it is too large
Load diff
95
commands/server_errors.go
Normal file
95
commands/server_errors.go
Normal file
|
@ -0,0 +1,95 @@
|
|||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"io"
|
||||
|
||||
"github.com/gohugoio/hugo/transform"
|
||||
"github.com/gohugoio/hugo/transform/livereloadinject"
|
||||
)
|
||||
|
||||
var buildErrorTemplate = `<!doctype html>
|
||||
<html class="no-js" lang="">
|
||||
<head>
|
||||
<meta charset="utf-8">
|
||||
<title>Hugo Server: Error</title>
|
||||
<style type="text/css">
|
||||
body {
|
||||
font-family: "Muli",avenir, -apple-system, BlinkMacSystemFont, "Segoe UI", Roboto, Helvetica, Arial, sans-serif, "Apple Color Emoji", "Segoe UI Emoji", "Segoe UI Symbol";
|
||||
font-size: 16px;
|
||||
background-color: black;
|
||||
color: rgba(255, 255, 255, 0.9);
|
||||
}
|
||||
main {
|
||||
margin: auto;
|
||||
width: 95%;
|
||||
padding: 1rem;
|
||||
}
|
||||
.version {
|
||||
color: #ccc;
|
||||
padding: 1rem 0;
|
||||
}
|
||||
.stack {
|
||||
margin-top: 6rem;
|
||||
}
|
||||
pre {
|
||||
white-space: pre-wrap;
|
||||
white-space: -moz-pre-wrap;
|
||||
white-space: -pre-wrap;
|
||||
white-space: -o-pre-wrap;
|
||||
word-wrap: break-word;
|
||||
}
|
||||
.highlight {
|
||||
overflow-x: auto;
|
||||
padding: 0.75rem;
|
||||
margin-bottom: 1rem;
|
||||
background-color: #272822;
|
||||
border: 1px solid black;
|
||||
}
|
||||
a {
|
||||
color: #0594cb;
|
||||
text-decoration: none;
|
||||
}
|
||||
a:hover {
|
||||
color: #ccc;
|
||||
}
|
||||
</style>
|
||||
</head>
|
||||
<body>
|
||||
<main>
|
||||
{{ highlight .Error "apl" "noclasses=true,style=monokai" }}
|
||||
{{ with .File }}
|
||||
{{ $params := printf "noclasses=true,style=monokai,linenos=table,hl_lines=%d,linenostart=%d" (add .LinesPos 1) (sub .Position.LineNumber .LinesPos) }}
|
||||
{{ $lexer := .ChromaLexer | default "go-html-template" }}
|
||||
{{ highlight (delimit .Lines "\n") $lexer $params }}
|
||||
{{ end }}
|
||||
{{ with .StackTrace }}
|
||||
{{ highlight . "apl" "noclasses=true,style=monokai" }}
|
||||
{{ end }}
|
||||
<p class="version">{{ .Version }}</p>
|
||||
<a href="">Reload Page</a>
|
||||
</main>
|
||||
</body>
|
||||
</html>
|
||||
`
|
||||
|
||||
func injectLiveReloadScript(src io.Reader, port int) string {
|
||||
var b bytes.Buffer
|
||||
chain := transform.Chain{livereloadinject.New(port)}
|
||||
chain.Apply(&b, src)
|
||||
|
||||
return b.String()
|
||||
}
|
134
commands/server_test.go
Normal file
134
commands/server_test.go
Normal file
|
@ -0,0 +1,134 @@
|
|||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"net/http"
|
||||
"os"
|
||||
"runtime"
|
||||
"strings"
|
||||
"testing"
|
||||
"time"
|
||||
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
"github.com/spf13/viper"
|
||||
)
|
||||
|
||||
func TestServer(t *testing.T) {
|
||||
if isWindowsCI() {
|
||||
// TODO(bep) not sure why server tests have started to fail on the Windows CI server.
|
||||
t.Skip("Skip server test on appveyor")
|
||||
}
|
||||
c := qt.New(t)
|
||||
dir, err := createSimpleTestSite(t, testSiteConfig{})
|
||||
c.Assert(err, qt.IsNil)
|
||||
|
||||
// Let us hope that this port is available on all systems ...
|
||||
port := 1331
|
||||
|
||||
defer func() {
|
||||
os.RemoveAll(dir)
|
||||
}()
|
||||
|
||||
stop := make(chan bool)
|
||||
|
||||
b := newCommandsBuilder()
|
||||
scmd := b.newServerCmdSignaled(stop)
|
||||
|
||||
cmd := scmd.getCommand()
|
||||
cmd.SetArgs([]string{"-s=" + dir, fmt.Sprintf("-p=%d", port)})
|
||||
|
||||
go func() {
|
||||
_, err = cmd.ExecuteC()
|
||||
c.Assert(err, qt.IsNil)
|
||||
}()
|
||||
|
||||
// There is no way to know exactly when the server is ready for connections.
|
||||
// We could improve by something like https://golang.org/pkg/net/http/httptest/#Server
|
||||
// But for now, let us sleep and pray!
|
||||
time.Sleep(2 * time.Second)
|
||||
|
||||
resp, err := http.Get("http://localhost:1331/")
|
||||
c.Assert(err, qt.IsNil)
|
||||
defer resp.Body.Close()
|
||||
homeContent := helpers.ReaderToString(resp.Body)
|
||||
|
||||
c.Assert(homeContent, qt.Contains, "List: Hugo Commands")
|
||||
c.Assert(homeContent, qt.Contains, "Environment: development")
|
||||
|
||||
// Stop the server.
|
||||
stop <- true
|
||||
|
||||
}
|
||||
|
||||
func TestFixURL(t *testing.T) {
|
||||
type data struct {
|
||||
TestName string
|
||||
CLIBaseURL string
|
||||
CfgBaseURL string
|
||||
AppendPort bool
|
||||
Port int
|
||||
Result string
|
||||
}
|
||||
tests := []data{
|
||||
{"Basic http localhost", "", "http://foo.com", true, 1313, "http://localhost:1313/"},
|
||||
{"Basic https production, http localhost", "", "https://foo.com", true, 1313, "http://localhost:1313/"},
|
||||
{"Basic subdir", "", "http://foo.com/bar", true, 1313, "http://localhost:1313/bar/"},
|
||||
{"Basic production", "http://foo.com", "http://foo.com", false, 80, "http://foo.com/"},
|
||||
{"Production subdir", "http://foo.com/bar", "http://foo.com/bar", false, 80, "http://foo.com/bar/"},
|
||||
{"No http", "", "foo.com", true, 1313, "//localhost:1313/"},
|
||||
{"Override configured port", "", "foo.com:2020", true, 1313, "//localhost:1313/"},
|
||||
{"No http production", "foo.com", "foo.com", false, 80, "//foo.com/"},
|
||||
{"No http production with port", "foo.com", "foo.com", true, 2020, "//foo.com:2020/"},
|
||||
{"No config", "", "", true, 1313, "//localhost:1313/"},
|
||||
}
|
||||
|
||||
for _, test := range tests {
|
||||
t.Run(test.TestName, func(t *testing.T) {
|
||||
b := newCommandsBuilder()
|
||||
s := b.newServerCmd()
|
||||
v := viper.New()
|
||||
baseURL := test.CLIBaseURL
|
||||
v.Set("baseURL", test.CfgBaseURL)
|
||||
s.serverAppend = test.AppendPort
|
||||
s.serverPort = test.Port
|
||||
result, err := s.fixURL(v, baseURL, s.serverPort)
|
||||
if err != nil {
|
||||
t.Errorf("Unexpected error %s", err)
|
||||
}
|
||||
if result != test.Result {
|
||||
t.Errorf("Expected %q, got %q", test.Result, result)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestRemoveErrorPrefixFromLog(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
content := `ERROR 2018/10/07 13:11:12 Error while rendering "home": template: _default/baseof.html:4:3: executing "main" at <partial "logo" .>: error calling partial: template: partials/logo.html:5:84: executing "partials/logo.html" at <$resized.AHeight>: can't evaluate field AHeight in type *resource.Image
|
||||
ERROR 2018/10/07 13:11:12 Rebuild failed: logged 1 error(s)
|
||||
`
|
||||
|
||||
withoutError := removeErrorPrefixFromLog(content)
|
||||
|
||||
c.Assert(strings.Contains(withoutError, "ERROR"), qt.Equals, false)
|
||||
|
||||
}
|
||||
|
||||
func isWindowsCI() bool {
|
||||
return runtime.GOOS == "windows" && os.Getenv("CI") != ""
|
||||
}
|
132
commands/static_syncer.go
Normal file
132
commands/static_syncer.go
Normal file
|
@ -0,0 +1,132 @@
|
|||
// Copyright 2017 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"os"
|
||||
"path/filepath"
|
||||
|
||||
"github.com/gohugoio/hugo/hugolib/filesystems"
|
||||
|
||||
"github.com/fsnotify/fsnotify"
|
||||
"github.com/gohugoio/hugo/helpers"
|
||||
"github.com/spf13/fsync"
|
||||
)
|
||||
|
||||
type staticSyncer struct {
|
||||
c *commandeer
|
||||
}
|
||||
|
||||
func newStaticSyncer(c *commandeer) (*staticSyncer, error) {
|
||||
return &staticSyncer{c: c}, nil
|
||||
}
|
||||
|
||||
func (s *staticSyncer) isStatic(filename string) bool {
|
||||
return s.c.hugo().BaseFs.SourceFilesystems.IsStatic(filename)
|
||||
}
|
||||
|
||||
func (s *staticSyncer) syncsStaticEvents(staticEvents []fsnotify.Event) error {
|
||||
c := s.c
|
||||
|
||||
syncFn := func(sourceFs *filesystems.SourceFilesystem) (uint64, error) {
|
||||
publishDir := c.hugo().PathSpec.PublishDir
|
||||
// If root, remove the second '/'
|
||||
if publishDir == "//" {
|
||||
publishDir = helpers.FilePathSeparator
|
||||
}
|
||||
|
||||
if sourceFs.PublishFolder != "" {
|
||||
publishDir = filepath.Join(publishDir, sourceFs.PublishFolder)
|
||||
}
|
||||
|
||||
syncer := fsync.NewSyncer()
|
||||
syncer.NoTimes = c.Cfg.GetBool("noTimes")
|
||||
syncer.NoChmod = c.Cfg.GetBool("noChmod")
|
||||
syncer.ChmodFilter = chmodFilter
|
||||
syncer.SrcFs = sourceFs.Fs
|
||||
syncer.DestFs = c.Fs.Destination
|
||||
|
||||
// prevent spamming the log on changes
|
||||
logger := helpers.NewDistinctFeedbackLogger()
|
||||
|
||||
for _, ev := range staticEvents {
|
||||
// Due to our approach of layering both directories and the content's rendered output
|
||||
// into one we can't accurately remove a file not in one of the source directories.
|
||||
// If a file is in the local static dir and also in the theme static dir and we remove
|
||||
// it from one of those locations we expect it to still exist in the destination
|
||||
//
|
||||
// If Hugo generates a file (from the content dir) over a static file
|
||||
// the content generated file should take precedence.
|
||||
//
|
||||
// Because we are now watching and handling individual events it is possible that a static
|
||||
// event that occupies the same path as a content generated file will take precedence
|
||||
// until a regeneration of the content takes places.
|
||||
//
|
||||
// Hugo assumes that these cases are very rare and will permit this bad behavior
|
||||
// The alternative is to track every single file and which pipeline rendered it
|
||||
// and then to handle conflict resolution on every event.
|
||||
|
||||
fromPath := ev.Name
|
||||
|
||||
relPath := sourceFs.MakePathRelative(fromPath)
|
||||
|
||||
if relPath == "" {
|
||||
// Not member of this virtual host.
|
||||
continue
|
||||
}
|
||||
|
||||
// Remove || rename is harder and will require an assumption.
|
||||
// Hugo takes the following approach:
|
||||
// If the static file exists in any of the static source directories after this event
|
||||
// Hugo will re-sync it.
|
||||
// If it does not exist in all of the static directories Hugo will remove it.
|
||||
//
|
||||
// This assumes that Hugo has not generated content on top of a static file and then removed
|
||||
// the source of that static file. In this case Hugo will incorrectly remove that file
|
||||
// from the published directory.
|
||||
if ev.Op&fsnotify.Rename == fsnotify.Rename || ev.Op&fsnotify.Remove == fsnotify.Remove {
|
||||
if _, err := sourceFs.Fs.Stat(relPath); os.IsNotExist(err) {
|
||||
// If file doesn't exist in any static dir, remove it
|
||||
toRemove := filepath.Join(publishDir, relPath)
|
||||
|
||||
logger.Println("File no longer exists in static dir, removing", toRemove)
|
||||
_ = c.Fs.Destination.RemoveAll(toRemove)
|
||||
} else if err == nil {
|
||||
// If file still exists, sync it
|
||||
logger.Println("Syncing", relPath, "to", publishDir)
|
||||
|
||||
if err := syncer.Sync(filepath.Join(publishDir, relPath), relPath); err != nil {
|
||||
c.logger.ERROR.Println(err)
|
||||
}
|
||||
} else {
|
||||
c.logger.ERROR.Println(err)
|
||||
}
|
||||
|
||||
continue
|
||||
}
|
||||
|
||||
// For all other event operations Hugo will sync static.
|
||||
logger.Println("Syncing", relPath, "to", publishDir)
|
||||
if err := syncer.Sync(filepath.Join(publishDir, relPath), relPath); err != nil {
|
||||
c.logger.ERROR.Println(err)
|
||||
}
|
||||
}
|
||||
|
||||
return 0, nil
|
||||
}
|
||||
|
||||
_, err := c.doWithPublishDirs(syncFn)
|
||||
return err
|
||||
|
||||
}
|
44
commands/version.go
Normal file
44
commands/version.go
Normal file
|
@ -0,0 +1,44 @@
|
|||
// Copyright 2015 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package commands
|
||||
|
||||
import (
|
||||
"github.com/gohugoio/hugo/common/hugo"
|
||||
"github.com/spf13/cobra"
|
||||
jww "github.com/spf13/jwalterweatherman"
|
||||
)
|
||||
|
||||
var _ cmder = (*versionCmd)(nil)
|
||||
|
||||
type versionCmd struct {
|
||||
*baseCmd
|
||||
}
|
||||
|
||||
func newVersionCmd() *versionCmd {
|
||||
return &versionCmd{
|
||||
newBaseCmd(&cobra.Command{
|
||||
Use: "version",
|
||||
Short: "Print the version number of Hugo",
|
||||
Long: `All software has versions. This is Hugo's.`,
|
||||
RunE: func(cmd *cobra.Command, args []string) error {
|
||||
printHugoVersion()
|
||||
return nil
|
||||
},
|
||||
}),
|
||||
}
|
||||
}
|
||||
|
||||
func printHugoVersion() {
|
||||
jww.FEEDBACK.Println(hugo.BuildVersionString())
|
||||
}
|
|
@ -21,76 +21,41 @@ import (
|
|||
// Append appends from to a slice to and returns the resulting slice.
|
||||
// If length of from is one and the only element is a slice of same type as to,
|
||||
// it will be appended.
|
||||
func Append(to any, from ...any) (any, error) {
|
||||
if len(from) == 0 {
|
||||
return to, nil
|
||||
}
|
||||
func Append(to interface{}, from ...interface{}) (interface{}, error) {
|
||||
tov, toIsNil := indirect(reflect.ValueOf(to))
|
||||
|
||||
toIsNil = toIsNil || to == nil
|
||||
var tot reflect.Type
|
||||
|
||||
if !toIsNil {
|
||||
if tov.Kind() == reflect.Slice {
|
||||
// Create a copy of tov, so we don't modify the original.
|
||||
c := reflect.MakeSlice(tov.Type(), tov.Len(), tov.Len()+len(from))
|
||||
reflect.Copy(c, tov)
|
||||
tov = c
|
||||
}
|
||||
|
||||
if tov.Kind() != reflect.Slice {
|
||||
return nil, fmt.Errorf("expected a slice, got %T", to)
|
||||
}
|
||||
|
||||
tot = tov.Type().Elem()
|
||||
if tot.Kind() == reflect.Slice {
|
||||
totvt := tot.Elem()
|
||||
fromvs := make([]reflect.Value, len(from))
|
||||
for i, f := range from {
|
||||
fromv := reflect.ValueOf(f)
|
||||
fromt := fromv.Type()
|
||||
if fromt.Kind() == reflect.Slice {
|
||||
fromt = fromt.Elem()
|
||||
}
|
||||
if totvt != fromt {
|
||||
return nil, fmt.Errorf("cannot append slice of %s to slice of %s", fromt, totvt)
|
||||
} else {
|
||||
fromvs[i] = fromv
|
||||
}
|
||||
}
|
||||
return reflect.Append(tov, fromvs...).Interface(), nil
|
||||
|
||||
}
|
||||
|
||||
toIsNil = tov.Len() == 0
|
||||
|
||||
if len(from) == 1 {
|
||||
fromv := reflect.ValueOf(from[0])
|
||||
if !fromv.IsValid() {
|
||||
// from[0] is nil
|
||||
return appendToInterfaceSliceFromValues(tov, fromv)
|
||||
}
|
||||
fromt := fromv.Type()
|
||||
if fromt.Kind() == reflect.Slice {
|
||||
fromt = fromt.Elem()
|
||||
}
|
||||
if fromv.Kind() == reflect.Slice {
|
||||
if toIsNil {
|
||||
// If we get nil []string, we just return the []string
|
||||
return from[0], nil
|
||||
}
|
||||
|
||||
fromt := reflect.TypeOf(from[0]).Elem()
|
||||
|
||||
// If we get []string []string, we append the from slice to to
|
||||
if tot == fromt {
|
||||
return reflect.AppendSlice(tov, fromv).Interface(), nil
|
||||
} else if !fromt.AssignableTo(tot) {
|
||||
// Fall back to a []interface{} slice.
|
||||
return appendToInterfaceSliceFromValues(tov, fromv)
|
||||
}
|
||||
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
if toIsNil {
|
||||
return Slice(from...), nil
|
||||
|
@ -98,7 +63,7 @@ func Append(to any, from ...any) (any, error) {
|
|||
|
||||
for _, f := range from {
|
||||
fv := reflect.ValueOf(f)
|
||||
if !fv.IsValid() || !fv.Type().AssignableTo(tot) {
|
||||
if !fv.Type().AssignableTo(tot) {
|
||||
// Fall back to a []interface{} slice.
|
||||
tov, _ := indirect(reflect.ValueOf(to))
|
||||
return appendToInterfaceSlice(tov, from...)
|
||||
|
@ -109,15 +74,11 @@ func Append(to any, from ...any) (any, error) {
|
|||
return tov.Interface(), nil
|
||||
}
|
||||
|
||||
func appendToInterfaceSliceFromValues(slice1, slice2 reflect.Value) ([]any, error) {
|
||||
var tos []any
|
||||
func appendToInterfaceSliceFromValues(slice1, slice2 reflect.Value) ([]interface{}, error) {
|
||||
var tos []interface{}
|
||||
|
||||
for _, slice := range []reflect.Value{slice1, slice2} {
|
||||
if !slice.IsValid() {
|
||||
tos = append(tos, nil)
|
||||
continue
|
||||
}
|
||||
for i := range slice.Len() {
|
||||
for i := 0; i < slice.Len(); i++ {
|
||||
tos = append(tos, slice.Index(i).Interface())
|
||||
}
|
||||
}
|
||||
|
@ -125,10 +86,10 @@ func appendToInterfaceSliceFromValues(slice1, slice2 reflect.Value) ([]any, erro
|
|||
return tos, nil
|
||||
}
|
||||
|
||||
func appendToInterfaceSlice(tov reflect.Value, from ...any) ([]any, error) {
|
||||
var tos []any
|
||||
func appendToInterfaceSlice(tov reflect.Value, from ...interface{}) ([]interface{}, error) {
|
||||
var tos []interface{}
|
||||
|
||||
for i := range tov.Len() {
|
||||
for i := 0; i < tov.Len(); i++ {
|
||||
tos = append(tos, tov.Index(i).Interface())
|
||||
}
|
||||
|
||||
|
|
|
@ -15,7 +15,6 @@ package collections
|
|||
|
||||
import (
|
||||
"html/template"
|
||||
"reflect"
|
||||
"testing"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
|
@ -25,60 +24,40 @@ func TestAppend(t *testing.T) {
|
|||
t.Parallel()
|
||||
c := qt.New(t)
|
||||
|
||||
for i, test := range []struct {
|
||||
start any
|
||||
addend []any
|
||||
expected any
|
||||
for _, test := range []struct {
|
||||
start interface{}
|
||||
addend []interface{}
|
||||
expected interface{}
|
||||
}{
|
||||
{[]string{"a", "b"}, []any{"c"}, []string{"a", "b", "c"}},
|
||||
{[]string{"a", "b"}, []any{"c", "d", "e"}, []string{"a", "b", "c", "d", "e"}},
|
||||
{[]string{"a", "b"}, []any{[]string{"c", "d", "e"}}, []string{"a", "b", "c", "d", "e"}},
|
||||
{[]string{"a"}, []any{"b", template.HTML("c")}, []any{"a", "b", template.HTML("c")}},
|
||||
{nil, []any{"a", "b"}, []string{"a", "b"}},
|
||||
{nil, []any{nil}, []any{nil}},
|
||||
{[]any{}, []any{[]string{"c", "d", "e"}}, []string{"c", "d", "e"}},
|
||||
{
|
||||
tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}},
|
||||
[]any{&tstSlicer{"c"}},
|
||||
tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}, &tstSlicer{"c"}},
|
||||
},
|
||||
{
|
||||
&tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}},
|
||||
[]any{&tstSlicer{"c"}},
|
||||
tstSlicers{
|
||||
&tstSlicer{"a"},
|
||||
{[]string{"a", "b"}, []interface{}{"c"}, []string{"a", "b", "c"}},
|
||||
{[]string{"a", "b"}, []interface{}{"c", "d", "e"}, []string{"a", "b", "c", "d", "e"}},
|
||||
{[]string{"a", "b"}, []interface{}{[]string{"c", "d", "e"}}, []string{"a", "b", "c", "d", "e"}},
|
||||
{[]string{"a"}, []interface{}{"b", template.HTML("c")}, []interface{}{"a", "b", template.HTML("c")}},
|
||||
{nil, []interface{}{"a", "b"}, []string{"a", "b"}},
|
||||
{nil, []interface{}{nil}, []interface{}{nil}},
|
||||
{[]interface{}{}, []interface{}{[]string{"c", "d", "e"}}, []string{"c", "d", "e"}},
|
||||
{tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}},
|
||||
[]interface{}{&tstSlicer{"c"}},
|
||||
tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}, &tstSlicer{"c"}}},
|
||||
{&tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}},
|
||||
[]interface{}{&tstSlicer{"c"}},
|
||||
tstSlicers{&tstSlicer{"a"},
|
||||
&tstSlicer{"b"},
|
||||
&tstSlicer{"c"},
|
||||
},
|
||||
},
|
||||
{
|
||||
testSlicerInterfaces{&tstSlicerIn1{"a"}, &tstSlicerIn1{"b"}},
|
||||
[]any{&tstSlicerIn1{"c"}},
|
||||
testSlicerInterfaces{&tstSlicerIn1{"a"}, &tstSlicerIn1{"b"}, &tstSlicerIn1{"c"}},
|
||||
},
|
||||
&tstSlicer{"c"}}},
|
||||
{testSlicerInterfaces{&tstSlicerIn1{"a"}, &tstSlicerIn1{"b"}},
|
||||
[]interface{}{&tstSlicerIn1{"c"}},
|
||||
testSlicerInterfaces{&tstSlicerIn1{"a"}, &tstSlicerIn1{"b"}, &tstSlicerIn1{"c"}}},
|
||||
//https://github.com/gohugoio/hugo/issues/5361
|
||||
{
|
||||
[]string{"a", "b"},
|
||||
[]any{tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}}},
|
||||
[]any{"a", "b", &tstSlicer{"a"}, &tstSlicer{"b"}},
|
||||
},
|
||||
{
|
||||
[]string{"a", "b"},
|
||||
[]any{&tstSlicer{"a"}},
|
||||
[]any{"a", "b", &tstSlicer{"a"}},
|
||||
},
|
||||
{[]string{"a", "b"}, []interface{}{tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}}},
|
||||
[]interface{}{"a", "b", &tstSlicer{"a"}, &tstSlicer{"b"}}},
|
||||
{[]string{"a", "b"}, []interface{}{&tstSlicer{"a"}},
|
||||
[]interface{}{"a", "b", &tstSlicer{"a"}}},
|
||||
// Errors
|
||||
{"", []any{[]string{"a", "b"}}, false},
|
||||
{"", []interface{}{[]string{"a", "b"}}, false},
|
||||
// No string concatenation.
|
||||
{
|
||||
"ab",
|
||||
[]any{"c"},
|
||||
false,
|
||||
},
|
||||
{[]string{"a", "b"}, []any{nil}, []any{"a", "b", nil}},
|
||||
{[]string{"a", "b"}, []any{nil, "d", nil}, []any{"a", "b", nil, "d", nil}},
|
||||
{[]any{"a", nil, "c"}, []any{"d", nil, "f"}, []any{"a", nil, "c", "d", nil, "f"}},
|
||||
{[]string{"a", "b"}, []any{}, []string{"a", "b"}},
|
||||
{"ab",
|
||||
[]interface{}{"c"},
|
||||
false},
|
||||
} {
|
||||
|
||||
result, err := Append(test.start, test.addend...)
|
||||
|
@ -89,125 +68,8 @@ func TestAppend(t *testing.T) {
|
|||
continue
|
||||
}
|
||||
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(result, qt.DeepEquals, test.expected, qt.Commentf("test: [%d] %v", i, test))
|
||||
}
|
||||
}
|
||||
|
||||
// #11093
|
||||
func TestAppendToMultiDimensionalSlice(t *testing.T) {
|
||||
t.Parallel()
|
||||
c := qt.New(t)
|
||||
|
||||
for _, test := range []struct {
|
||||
to any
|
||||
from []any
|
||||
expected any
|
||||
}{
|
||||
{
|
||||
[][]string{{"a", "b"}},
|
||||
[]any{[]string{"c", "d"}},
|
||||
[][]string{
|
||||
{"a", "b"},
|
||||
{"c", "d"},
|
||||
},
|
||||
},
|
||||
{
|
||||
[][]string{{"a", "b"}},
|
||||
[]any{[]string{"c", "d"}, []string{"e", "f"}},
|
||||
[][]string{
|
||||
{"a", "b"},
|
||||
{"c", "d"},
|
||||
{"e", "f"},
|
||||
},
|
||||
},
|
||||
{
|
||||
[][]string{{"a", "b"}},
|
||||
[]any{[]int{1, 2}},
|
||||
false,
|
||||
},
|
||||
} {
|
||||
result, err := Append(test.to, test.from...)
|
||||
if b, ok := test.expected.(bool); ok && !b {
|
||||
c.Assert(err, qt.Not(qt.IsNil))
|
||||
} else {
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(result, qt.DeepEquals, test.expected)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func TestAppendShouldMakeACopyOfTheInputSlice(t *testing.T) {
|
||||
t.Parallel()
|
||||
c := qt.New(t)
|
||||
slice := make([]string, 0, 100)
|
||||
slice = append(slice, "a", "b")
|
||||
result, err := Append(slice, "c")
|
||||
c.Assert(err, qt.IsNil)
|
||||
slice[0] = "d"
|
||||
c.Assert(result, qt.DeepEquals, []string{"a", "b", "c"})
|
||||
c.Assert(slice, qt.DeepEquals, []string{"d", "b"})
|
||||
}
|
||||
|
||||
func TestIndirect(t *testing.T) {
|
||||
t.Parallel()
|
||||
c := qt.New(t)
|
||||
|
||||
type testStruct struct {
|
||||
Field string
|
||||
}
|
||||
|
||||
var (
|
||||
nilPtr *testStruct
|
||||
nilIface interface{} = nil
|
||||
nonNilIface interface{} = &testStruct{Field: "hello"}
|
||||
)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
input any
|
||||
wantKind reflect.Kind
|
||||
wantNil bool
|
||||
}{
|
||||
{
|
||||
name: "nil pointer",
|
||||
input: nilPtr,
|
||||
wantKind: reflect.Ptr,
|
||||
wantNil: true,
|
||||
},
|
||||
{
|
||||
name: "nil interface",
|
||||
input: nilIface,
|
||||
wantKind: reflect.Invalid,
|
||||
wantNil: false,
|
||||
},
|
||||
{
|
||||
name: "non-nil pointer to struct",
|
||||
input: &testStruct{Field: "abc"},
|
||||
wantKind: reflect.Struct,
|
||||
wantNil: false,
|
||||
},
|
||||
{
|
||||
name: "non-nil interface holding pointer",
|
||||
input: nonNilIface,
|
||||
wantKind: reflect.Struct,
|
||||
wantNil: false,
|
||||
},
|
||||
{
|
||||
name: "plain value",
|
||||
input: testStruct{Field: "xyz"},
|
||||
wantKind: reflect.Struct,
|
||||
wantNil: false,
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
v := reflect.ValueOf(tt.input)
|
||||
got, isNil := indirect(v)
|
||||
|
||||
c.Assert(got.Kind(), qt.Equals, tt.wantKind)
|
||||
c.Assert(isNil, qt.Equals, tt.wantNil)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -17,5 +17,5 @@ package collections
|
|||
|
||||
// Grouper defines a very generic way to group items by a given key.
|
||||
type Grouper interface {
|
||||
Group(key any, items any) (any, error)
|
||||
Group(key interface{}, items interface{}) (interface{}, error)
|
||||
}
|
||||
|
|
|
@ -1,20 +0,0 @@
|
|||
// Copyright 2020 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package collections
|
||||
|
||||
type Order interface {
|
||||
// Ordinal is a zero-based ordinal that represents the order of an object
|
||||
// in a collection.
|
||||
Ordinal() int
|
||||
}
|
|
@ -15,18 +15,17 @@ package collections
|
|||
|
||||
import (
|
||||
"reflect"
|
||||
"sort"
|
||||
)
|
||||
|
||||
// Slicer defines a very generic way to create a typed slice. This is used
|
||||
// in collections.Slice template func to get types such as Pages, PageGroups etc.
|
||||
// instead of the less useful []interface{}.
|
||||
type Slicer interface {
|
||||
Slice(items any) (any, error)
|
||||
Slice(items interface{}) (interface{}, error)
|
||||
}
|
||||
|
||||
// Slice returns a slice of all passed arguments.
|
||||
func Slice(args ...any) any {
|
||||
func Slice(args ...interface{}) interface{} {
|
||||
if len(args) == 0 {
|
||||
return args
|
||||
}
|
||||
|
@ -65,31 +64,3 @@ func Slice(args ...any) any {
|
|||
}
|
||||
return slice.Interface()
|
||||
}
|
||||
|
||||
// StringSliceToInterfaceSlice converts ss to []interface{}.
|
||||
func StringSliceToInterfaceSlice(ss []string) []any {
|
||||
result := make([]any, len(ss))
|
||||
for i, s := range ss {
|
||||
result[i] = s
|
||||
}
|
||||
return result
|
||||
}
|
||||
|
||||
type SortedStringSlice []string
|
||||
|
||||
// Contains returns true if s is in ss.
|
||||
func (ss SortedStringSlice) Contains(s string) bool {
|
||||
i := sort.SearchStrings(ss, s)
|
||||
return i < len(ss) && ss[i] == s
|
||||
}
|
||||
|
||||
// Count returns the number of times s is in ss.
|
||||
func (ss SortedStringSlice) Count(s string) int {
|
||||
var count int
|
||||
i := sort.SearchStrings(ss, s)
|
||||
for i < len(ss) && ss[i] == s {
|
||||
count++
|
||||
i++
|
||||
}
|
||||
return count
|
||||
}
|
||||
|
|
|
@ -20,13 +20,11 @@ import (
|
|||
qt "github.com/frankban/quicktest"
|
||||
)
|
||||
|
||||
var (
|
||||
_ Slicer = (*tstSlicer)(nil)
|
||||
_ Slicer = (*tstSlicerIn1)(nil)
|
||||
_ Slicer = (*tstSlicerIn2)(nil)
|
||||
_ testSlicerInterface = (*tstSlicerIn1)(nil)
|
||||
_ testSlicerInterface = (*tstSlicerIn1)(nil)
|
||||
)
|
||||
var _ Slicer = (*tstSlicer)(nil)
|
||||
var _ Slicer = (*tstSlicerIn1)(nil)
|
||||
var _ Slicer = (*tstSlicerIn2)(nil)
|
||||
var _ testSlicerInterface = (*tstSlicerIn1)(nil)
|
||||
var _ testSlicerInterface = (*tstSlicerIn1)(nil)
|
||||
|
||||
type testSlicerInterface interface {
|
||||
Name() string
|
||||
|
@ -46,8 +44,8 @@ type tstSlicer struct {
|
|||
TheName string
|
||||
}
|
||||
|
||||
func (p *tstSlicerIn1) Slice(in any) (any, error) {
|
||||
items := in.([]any)
|
||||
func (p *tstSlicerIn1) Slice(in interface{}) (interface{}, error) {
|
||||
items := in.([]interface{})
|
||||
result := make(testSlicerInterfaces, len(items))
|
||||
for i, v := range items {
|
||||
switch vv := v.(type) {
|
||||
|
@ -56,12 +54,13 @@ func (p *tstSlicerIn1) Slice(in any) (any, error) {
|
|||
default:
|
||||
return nil, errors.New("invalid type")
|
||||
}
|
||||
|
||||
}
|
||||
return result, nil
|
||||
}
|
||||
|
||||
func (p *tstSlicerIn2) Slice(in any) (any, error) {
|
||||
items := in.([]any)
|
||||
func (p *tstSlicerIn2) Slice(in interface{}) (interface{}, error) {
|
||||
items := in.([]interface{})
|
||||
result := make(testSlicerInterfaces, len(items))
|
||||
for i, v := range items {
|
||||
switch vv := v.(type) {
|
||||
|
@ -82,8 +81,8 @@ func (p *tstSlicerIn2) Name() string {
|
|||
return p.TheName
|
||||
}
|
||||
|
||||
func (p *tstSlicer) Slice(in any) (any, error) {
|
||||
items := in.([]any)
|
||||
func (p *tstSlicer) Slice(in interface{}) (interface{}, error) {
|
||||
items := in.([]interface{})
|
||||
result := make(tstSlicers, len(items))
|
||||
for i, v := range items {
|
||||
switch vv := v.(type) {
|
||||
|
@ -103,17 +102,17 @@ func TestSlice(t *testing.T) {
|
|||
c := qt.New(t)
|
||||
|
||||
for i, test := range []struct {
|
||||
args []any
|
||||
expected any
|
||||
args []interface{}
|
||||
expected interface{}
|
||||
}{
|
||||
{[]any{"a", "b"}, []string{"a", "b"}},
|
||||
{[]any{&tstSlicer{"a"}, &tstSlicer{"b"}}, tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}}},
|
||||
{[]any{&tstSlicer{"a"}, "b"}, []any{&tstSlicer{"a"}, "b"}},
|
||||
{[]any{}, []any{}},
|
||||
{[]any{nil}, []any{nil}},
|
||||
{[]any{5, "b"}, []any{5, "b"}},
|
||||
{[]any{&tstSlicerIn1{"a"}, &tstSlicerIn2{"b"}}, testSlicerInterfaces{&tstSlicerIn1{"a"}, &tstSlicerIn2{"b"}}},
|
||||
{[]any{&tstSlicerIn1{"a"}, &tstSlicer{"b"}}, []any{&tstSlicerIn1{"a"}, &tstSlicer{"b"}}},
|
||||
{[]interface{}{"a", "b"}, []string{"a", "b"}},
|
||||
{[]interface{}{&tstSlicer{"a"}, &tstSlicer{"b"}}, tstSlicers{&tstSlicer{"a"}, &tstSlicer{"b"}}},
|
||||
{[]interface{}{&tstSlicer{"a"}, "b"}, []interface{}{&tstSlicer{"a"}, "b"}},
|
||||
{[]interface{}{}, []interface{}{}},
|
||||
{[]interface{}{nil}, []interface{}{nil}},
|
||||
{[]interface{}{5, "b"}, []interface{}{5, "b"}},
|
||||
{[]interface{}{&tstSlicerIn1{"a"}, &tstSlicerIn2{"b"}}, testSlicerInterfaces{&tstSlicerIn1{"a"}, &tstSlicerIn2{"b"}}},
|
||||
{[]interface{}{&tstSlicerIn1{"a"}, &tstSlicer{"b"}}, []interface{}{&tstSlicerIn1{"a"}, &tstSlicer{"b"}}},
|
||||
} {
|
||||
errMsg := qt.Commentf("[%d] %v", i, test.args)
|
||||
|
||||
|
@ -121,52 +120,5 @@ func TestSlice(t *testing.T) {
|
|||
|
||||
c.Assert(test.expected, qt.DeepEquals, result, errMsg)
|
||||
}
|
||||
}
|
||||
|
||||
func TestSortedStringSlice(t *testing.T) {
|
||||
t.Parallel()
|
||||
c := qt.New(t)
|
||||
|
||||
var s SortedStringSlice = []string{"a", "b", "b", "b", "c", "d"}
|
||||
|
||||
c.Assert(s.Contains("a"), qt.IsTrue)
|
||||
c.Assert(s.Contains("b"), qt.IsTrue)
|
||||
c.Assert(s.Contains("z"), qt.IsFalse)
|
||||
c.Assert(s.Count("b"), qt.Equals, 3)
|
||||
c.Assert(s.Count("z"), qt.Equals, 0)
|
||||
c.Assert(s.Count("a"), qt.Equals, 1)
|
||||
}
|
||||
|
||||
func TestStringSliceToInterfaceSlice(t *testing.T) {
|
||||
t.Parallel()
|
||||
c := qt.New(t)
|
||||
|
||||
tests := []struct {
|
||||
name string
|
||||
in []string
|
||||
want []any
|
||||
}{
|
||||
{
|
||||
name: "empty slice",
|
||||
in: []string{},
|
||||
want: []any{},
|
||||
},
|
||||
{
|
||||
name: "single element",
|
||||
in: []string{"hello"},
|
||||
want: []any{"hello"},
|
||||
},
|
||||
{
|
||||
name: "multiple elements",
|
||||
in: []string{"a", "b", "c"},
|
||||
want: []any{"a", "b", "c"},
|
||||
},
|
||||
}
|
||||
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.name, func(t *testing.T) {
|
||||
got := StringSliceToInterfaceSlice(tt.in)
|
||||
c.Assert(got, qt.DeepEquals, tt.want)
|
||||
})
|
||||
}
|
||||
|
||||
}
|
||||
|
|
|
@ -1,82 +0,0 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package collections
|
||||
|
||||
import "slices"
|
||||
|
||||
import "sync"
|
||||
|
||||
// Stack is a simple LIFO stack that is safe for concurrent use.
|
||||
type Stack[T any] struct {
|
||||
items []T
|
||||
zero T
|
||||
mu sync.RWMutex
|
||||
}
|
||||
|
||||
func NewStack[T any]() *Stack[T] {
|
||||
return &Stack[T]{}
|
||||
}
|
||||
|
||||
func (s *Stack[T]) Push(item T) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
s.items = append(s.items, item)
|
||||
}
|
||||
|
||||
func (s *Stack[T]) Pop() (T, bool) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
if len(s.items) == 0 {
|
||||
return s.zero, false
|
||||
}
|
||||
item := s.items[len(s.items)-1]
|
||||
s.items = s.items[:len(s.items)-1]
|
||||
return item, true
|
||||
}
|
||||
|
||||
func (s *Stack[T]) Peek() (T, bool) {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
if len(s.items) == 0 {
|
||||
return s.zero, false
|
||||
}
|
||||
return s.items[len(s.items)-1], true
|
||||
}
|
||||
|
||||
func (s *Stack[T]) Len() int {
|
||||
s.mu.RLock()
|
||||
defer s.mu.RUnlock()
|
||||
return len(s.items)
|
||||
}
|
||||
|
||||
func (s *Stack[T]) Drain() []T {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
items := s.items
|
||||
s.items = nil
|
||||
return items
|
||||
}
|
||||
|
||||
func (s *Stack[T]) DrainMatching(predicate func(T) bool) []T {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
var items []T
|
||||
for i := len(s.items) - 1; i >= 0; i-- {
|
||||
if predicate(s.items[i]) {
|
||||
items = append(items, s.items[i])
|
||||
s.items = slices.Delete(s.items, i, i+1)
|
||||
}
|
||||
}
|
||||
return items
|
||||
}
|
|
@ -1,77 +0,0 @@
|
|||
package collections
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
)
|
||||
|
||||
func TestNewStack(t *testing.T) {
|
||||
t.Parallel()
|
||||
c := qt.New(t)
|
||||
|
||||
s := NewStack[int]()
|
||||
|
||||
c.Assert(s, qt.IsNotNil)
|
||||
}
|
||||
|
||||
func TestStackBasic(t *testing.T) {
|
||||
t.Parallel()
|
||||
c := qt.New(t)
|
||||
|
||||
s := NewStack[int]()
|
||||
|
||||
c.Assert(s.Len(), qt.Equals, 0)
|
||||
|
||||
s.Push(1)
|
||||
s.Push(2)
|
||||
s.Push(3)
|
||||
|
||||
c.Assert(s.Len(), qt.Equals, 3)
|
||||
|
||||
top, ok := s.Peek()
|
||||
c.Assert(ok, qt.Equals, true)
|
||||
c.Assert(top, qt.Equals, 3)
|
||||
|
||||
popped, ok := s.Pop()
|
||||
c.Assert(ok, qt.Equals, true)
|
||||
c.Assert(popped, qt.Equals, 3)
|
||||
|
||||
c.Assert(s.Len(), qt.Equals, 2)
|
||||
|
||||
_, _ = s.Pop()
|
||||
_, _ = s.Pop()
|
||||
_, ok = s.Pop()
|
||||
|
||||
c.Assert(ok, qt.Equals, false)
|
||||
}
|
||||
|
||||
func TestStackDrain(t *testing.T) {
|
||||
t.Parallel()
|
||||
c := qt.New(t)
|
||||
|
||||
s := NewStack[string]()
|
||||
s.Push("a")
|
||||
s.Push("b")
|
||||
|
||||
got := s.Drain()
|
||||
|
||||
c.Assert(got, qt.DeepEquals, []string{"a", "b"})
|
||||
c.Assert(s.Len(), qt.Equals, 0)
|
||||
}
|
||||
|
||||
func TestStackDrainMatching(t *testing.T) {
|
||||
t.Parallel()
|
||||
c := qt.New(t)
|
||||
|
||||
s := NewStack[int]()
|
||||
s.Push(1)
|
||||
s.Push(2)
|
||||
s.Push(3)
|
||||
s.Push(4)
|
||||
|
||||
got := s.DrainMatching(func(v int) bool { return v%2 == 0 })
|
||||
|
||||
c.Assert(got, qt.DeepEquals, []int{4, 2})
|
||||
c.Assert(s.Drain(), qt.DeepEquals, []int{1, 3})
|
||||
}
|
|
@ -1,49 +0,0 @@
|
|||
// Copyright 2020 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package constants
|
||||
|
||||
// Error/Warning IDs.
|
||||
// Do not change these values.
|
||||
const (
|
||||
// IDs for remote errors in tpl/data.
|
||||
ErrRemoteGetJSON = "error-remote-getjson"
|
||||
ErrRemoteGetCSV = "error-remote-getcsv"
|
||||
|
||||
WarnFrontMatterParamsOverrides = "warning-frontmatter-params-overrides"
|
||||
WarnRenderShortcodesInHTML = "warning-rendershortcodes-in-html"
|
||||
WarnGoldmarkRawHTML = "warning-goldmark-raw-html"
|
||||
WarnPartialSuperfluousPrefix = "warning-partial-superfluous-prefix"
|
||||
WarnHomePageIsLeafBundle = "warning-home-page-is-leaf-bundle"
|
||||
)
|
||||
|
||||
// Field/method names with special meaning.
|
||||
const (
|
||||
FieldRelPermalink = "RelPermalink"
|
||||
FieldPermalink = "Permalink"
|
||||
)
|
||||
|
||||
// IsFieldRelOrPermalink returns whether the given name is a RelPermalink or Permalink.
|
||||
func IsFieldRelOrPermalink(name string) bool {
|
||||
return name == FieldRelPermalink || name == FieldPermalink
|
||||
}
|
||||
|
||||
// Resource transformations.
|
||||
const (
|
||||
ResourceTransformationFingerprint = "fingerprint"
|
||||
)
|
||||
|
||||
// IsResourceTransformationPermalinkHash returns whether the given name is a resource transformation that changes the permalink based on the content.
|
||||
func IsResourceTransformationPermalinkHash(name string) bool {
|
||||
return name == ResourceTransformationFingerprint
|
||||
}
|
|
@ -1,2 +0,0 @@
|
|||
// Package common provides common helper functionality for Hugo.
|
||||
package common
|
|
@ -1,194 +0,0 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package hashing provides common hashing utilities.
|
||||
package hashing
|
||||
|
||||
import (
|
||||
"crypto/md5"
|
||||
"encoding/hex"
|
||||
"io"
|
||||
"strconv"
|
||||
"sync"
|
||||
|
||||
"github.com/cespare/xxhash/v2"
|
||||
"github.com/gohugoio/hashstructure"
|
||||
"github.com/gohugoio/hugo/identity"
|
||||
)
|
||||
|
||||
// XXHashFromReader calculates the xxHash for the given reader.
|
||||
func XXHashFromReader(r io.Reader) (uint64, int64, error) {
|
||||
h := getXxHashReadFrom()
|
||||
defer putXxHashReadFrom(h)
|
||||
|
||||
size, err := io.Copy(h, r)
|
||||
if err != nil {
|
||||
return 0, 0, err
|
||||
}
|
||||
return h.Sum64(), size, nil
|
||||
}
|
||||
|
||||
// XxHashFromReaderHexEncoded calculates the xxHash for the given reader
|
||||
// and returns the hash as a hex encoded string.
|
||||
func XxHashFromReaderHexEncoded(r io.Reader) (string, error) {
|
||||
h := getXxHashReadFrom()
|
||||
defer putXxHashReadFrom(h)
|
||||
_, err := io.Copy(h, r)
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
hash := h.Sum(nil)
|
||||
return hex.EncodeToString(hash), nil
|
||||
}
|
||||
|
||||
// XXHashFromString calculates the xxHash for the given string.
|
||||
func XXHashFromString(s string) (uint64, error) {
|
||||
h := xxhash.New()
|
||||
h.WriteString(s)
|
||||
return h.Sum64(), nil
|
||||
}
|
||||
|
||||
// XxHashFromStringHexEncoded calculates the xxHash for the given string
|
||||
// and returns the hash as a hex encoded string.
|
||||
func XxHashFromStringHexEncoded(f string) string {
|
||||
h := xxhash.New()
|
||||
h.WriteString(f)
|
||||
hash := h.Sum(nil)
|
||||
return hex.EncodeToString(hash)
|
||||
}
|
||||
|
||||
// MD5FromStringHexEncoded returns the MD5 hash of the given string.
|
||||
func MD5FromStringHexEncoded(f string) string {
|
||||
h := md5.New()
|
||||
h.Write([]byte(f))
|
||||
return hex.EncodeToString(h.Sum(nil))
|
||||
}
|
||||
|
||||
// HashString returns a hash from the given elements.
|
||||
// It will panic if the hash cannot be calculated.
|
||||
// Note that this hash should be used primarily for identity, not for change detection as
|
||||
// it in the more complex values (e.g. Page) will not hash the full content.
|
||||
func HashString(vs ...any) string {
|
||||
hash := HashUint64(vs...)
|
||||
return strconv.FormatUint(hash, 10)
|
||||
}
|
||||
|
||||
// HashStringHex returns a hash from the given elements as a hex encoded string.
|
||||
// See HashString for more information.
|
||||
func HashStringHex(vs ...any) string {
|
||||
hash := HashUint64(vs...)
|
||||
return strconv.FormatUint(hash, 16)
|
||||
}
|
||||
|
||||
var hashOptsPool = sync.Pool{
|
||||
New: func() any {
|
||||
return &hashstructure.HashOptions{
|
||||
Hasher: xxhash.New(),
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
func getHashOpts() *hashstructure.HashOptions {
|
||||
return hashOptsPool.Get().(*hashstructure.HashOptions)
|
||||
}
|
||||
|
||||
func putHashOpts(opts *hashstructure.HashOptions) {
|
||||
opts.Hasher.Reset()
|
||||
hashOptsPool.Put(opts)
|
||||
}
|
||||
|
||||
// HashUint64 returns a hash from the given elements.
|
||||
// It will panic if the hash cannot be calculated.
|
||||
// Note that this hash should be used primarily for identity, not for change detection as
|
||||
// it in the more complex values (e.g. Page) will not hash the full content.
|
||||
func HashUint64(vs ...any) uint64 {
|
||||
var o any
|
||||
if len(vs) == 1 {
|
||||
o = toHashable(vs[0])
|
||||
} else {
|
||||
elements := make([]any, len(vs))
|
||||
for i, e := range vs {
|
||||
elements[i] = toHashable(e)
|
||||
}
|
||||
o = elements
|
||||
}
|
||||
|
||||
hash, err := Hash(o)
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
return hash
|
||||
}
|
||||
|
||||
// Hash returns a hash from vs.
|
||||
func Hash(vs ...any) (uint64, error) {
|
||||
hashOpts := getHashOpts()
|
||||
defer putHashOpts(hashOpts)
|
||||
var v any = vs
|
||||
if len(vs) == 1 {
|
||||
v = vs[0]
|
||||
}
|
||||
return hashstructure.Hash(v, hashOpts)
|
||||
}
|
||||
|
||||
type keyer interface {
|
||||
Key() string
|
||||
}
|
||||
|
||||
// For structs, hashstructure.Hash only works on the exported fields,
|
||||
// so rewrite the input slice for known identity types.
|
||||
func toHashable(v any) any {
|
||||
switch t := v.(type) {
|
||||
case keyer:
|
||||
return t.Key()
|
||||
case identity.IdentityProvider:
|
||||
return t.GetIdentity()
|
||||
default:
|
||||
return v
|
||||
}
|
||||
}
|
||||
|
||||
type xxhashReadFrom struct {
|
||||
buff []byte
|
||||
*xxhash.Digest
|
||||
}
|
||||
|
||||
func (x *xxhashReadFrom) ReadFrom(r io.Reader) (int64, error) {
|
||||
for {
|
||||
n, err := r.Read(x.buff)
|
||||
if n > 0 {
|
||||
x.Digest.Write(x.buff[:n])
|
||||
}
|
||||
if err != nil {
|
||||
if err == io.EOF {
|
||||
err = nil
|
||||
}
|
||||
return int64(n), err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
var xXhashReadFromPool = sync.Pool{
|
||||
New: func() any {
|
||||
return &xxhashReadFrom{Digest: xxhash.New(), buff: make([]byte, 48*1024)}
|
||||
},
|
||||
}
|
||||
|
||||
func getXxHashReadFrom() *xxhashReadFrom {
|
||||
return xXhashReadFromPool.Get().(*xxhashReadFrom)
|
||||
}
|
||||
|
||||
func putXxHashReadFrom(h *xxhashReadFrom) {
|
||||
h.Reset()
|
||||
xXhashReadFromPool.Put(h)
|
||||
}
|
|
@ -1,157 +0,0 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hashing
|
||||
|
||||
import (
|
||||
"fmt"
|
||||
"math"
|
||||
"strings"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
)
|
||||
|
||||
func TestXxHashFromReader(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
s := "Hello World"
|
||||
r := strings.NewReader(s)
|
||||
got, size, err := XXHashFromReader(r)
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(size, qt.Equals, int64(len(s)))
|
||||
c.Assert(got, qt.Equals, uint64(7148569436472236994))
|
||||
}
|
||||
|
||||
func TestXxHashFromReaderPara(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
|
||||
var wg sync.WaitGroup
|
||||
for i := range 10 {
|
||||
i := i
|
||||
wg.Add(1)
|
||||
go func() {
|
||||
defer wg.Done()
|
||||
for j := range 100 {
|
||||
s := strings.Repeat("Hello ", i+j+1*42)
|
||||
r := strings.NewReader(s)
|
||||
got, size, err := XXHashFromReader(r)
|
||||
c.Assert(size, qt.Equals, int64(len(s)))
|
||||
c.Assert(err, qt.IsNil)
|
||||
expect, _ := XXHashFromString(s)
|
||||
c.Assert(got, qt.Equals, expect)
|
||||
}
|
||||
}()
|
||||
}
|
||||
|
||||
wg.Wait()
|
||||
}
|
||||
|
||||
func TestXxHashFromString(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
s := "Hello World"
|
||||
got, err := XXHashFromString(s)
|
||||
c.Assert(err, qt.IsNil)
|
||||
c.Assert(got, qt.Equals, uint64(7148569436472236994))
|
||||
}
|
||||
|
||||
func TestXxHashFromStringHexEncoded(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
s := "The quick brown fox jumps over the lazy dog"
|
||||
got := XxHashFromStringHexEncoded(s)
|
||||
// Facit: https://asecuritysite.com/encryption/xxhash?val=The%20quick%20brown%20fox%20jumps%20over%20the%20lazy%20dog
|
||||
c.Assert(got, qt.Equals, "0b242d361fda71bc")
|
||||
}
|
||||
|
||||
func BenchmarkXXHashFromReader(b *testing.B) {
|
||||
r := strings.NewReader("Hello World")
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
XXHashFromReader(r)
|
||||
r.Seek(0, 0)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkXXHashFromString(b *testing.B) {
|
||||
s := "Hello World"
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
XXHashFromString(s)
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkXXHashFromStringHexEncoded(b *testing.B) {
|
||||
s := "The quick brown fox jumps over the lazy dog"
|
||||
b.ResetTimer()
|
||||
for i := 0; i < b.N; i++ {
|
||||
XxHashFromStringHexEncoded(s)
|
||||
}
|
||||
}
|
||||
|
||||
func TestHashString(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
|
||||
c.Assert(HashString("a", "b"), qt.Equals, "3176555414984061461")
|
||||
c.Assert(HashString("ab"), qt.Equals, "7347350983217793633")
|
||||
|
||||
var vals []any = []any{"a", "b", tstKeyer{"c"}}
|
||||
|
||||
c.Assert(HashString(vals...), qt.Equals, "4438730547989914315")
|
||||
c.Assert(vals[2], qt.Equals, tstKeyer{"c"})
|
||||
}
|
||||
|
||||
type tstKeyer struct {
|
||||
key string
|
||||
}
|
||||
|
||||
func (t tstKeyer) Key() string {
|
||||
return t.key
|
||||
}
|
||||
|
||||
func (t tstKeyer) String() string {
|
||||
return "key: " + t.key
|
||||
}
|
||||
|
||||
func BenchmarkHashString(b *testing.B) {
|
||||
word := " hello "
|
||||
|
||||
var tests []string
|
||||
|
||||
for i := 1; i <= 5; i++ {
|
||||
sentence := strings.Repeat(word, int(math.Pow(4, float64(i))))
|
||||
tests = append(tests, sentence)
|
||||
}
|
||||
|
||||
b.ResetTimer()
|
||||
|
||||
for _, test := range tests {
|
||||
b.Run(fmt.Sprintf("n%d", len(test)), func(b *testing.B) {
|
||||
for i := 0; i < b.N; i++ {
|
||||
HashString(test)
|
||||
}
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
func BenchmarkHashMap(b *testing.B) {
|
||||
m := map[string]any{}
|
||||
for i := range 1000 {
|
||||
m[fmt.Sprintf("key%d", i)] = i
|
||||
}
|
||||
|
||||
b.ResetTimer()
|
||||
|
||||
for i := 0; i < b.N; i++ {
|
||||
HashString(m)
|
||||
}
|
||||
}
|
|
@ -1,46 +0,0 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package hcontext
|
||||
|
||||
import "context"
|
||||
|
||||
// ContextDispatcher is a generic interface for setting and getting values from a context.
|
||||
type ContextDispatcher[T any] interface {
|
||||
Set(ctx context.Context, value T) context.Context
|
||||
Get(ctx context.Context) T
|
||||
}
|
||||
|
||||
// NewContextDispatcher creates a new ContextDispatcher with the given key.
|
||||
func NewContextDispatcher[T any, R comparable](key R) ContextDispatcher[T] {
|
||||
return keyInContext[T, R]{
|
||||
id: key,
|
||||
}
|
||||
}
|
||||
|
||||
type keyInContext[T any, R comparable] struct {
|
||||
zero T
|
||||
id R
|
||||
}
|
||||
|
||||
func (f keyInContext[T, R]) Get(ctx context.Context) T {
|
||||
v := ctx.Value(f.id)
|
||||
if v == nil {
|
||||
return f.zero
|
||||
}
|
||||
return v.(T)
|
||||
}
|
||||
|
||||
func (f keyInContext[T, R]) Set(ctx context.Context, value T) context.Context {
|
||||
return context.WithValue(ctx, f.id, value)
|
||||
}
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -11,15 +11,18 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package herrors contains common Hugo errors and error related utilities.
|
||||
// Package errors contains common Hugo errors and error related utilities.
|
||||
package herrors
|
||||
|
||||
import (
|
||||
"io"
|
||||
"io/ioutil"
|
||||
"path/filepath"
|
||||
"strings"
|
||||
|
||||
"github.com/gohugoio/hugo/common/text"
|
||||
|
||||
"github.com/spf13/afero"
|
||||
)
|
||||
|
||||
// LineMatcher contains the elements used to match an error to a line
|
||||
|
@ -33,47 +36,19 @@ type LineMatcher struct {
|
|||
}
|
||||
|
||||
// LineMatcherFn is used to match a line with an error.
|
||||
// It returns the column number or 0 if the line was found, but column could not be determined. Returns -1 if no line match.
|
||||
type LineMatcherFn func(m LineMatcher) int
|
||||
type LineMatcherFn func(m LineMatcher) bool
|
||||
|
||||
// SimpleLineMatcher simply matches by line number.
|
||||
var SimpleLineMatcher = func(m LineMatcher) int {
|
||||
if m.Position.LineNumber == m.LineNumber {
|
||||
// We found the line, but don't know the column.
|
||||
return 0
|
||||
}
|
||||
return -1
|
||||
var SimpleLineMatcher = func(m LineMatcher) bool {
|
||||
return m.Position.LineNumber == m.LineNumber
|
||||
}
|
||||
|
||||
// NopLineMatcher is a matcher that always returns 1.
|
||||
// This will effectively give line 1, column 1.
|
||||
var NopLineMatcher = func(m LineMatcher) int {
|
||||
return 1
|
||||
}
|
||||
|
||||
// OffsetMatcher is a line matcher that matches by offset.
|
||||
var OffsetMatcher = func(m LineMatcher) int {
|
||||
if m.Offset+len(m.Line) >= m.Position.Offset {
|
||||
// We found the line, but return 0 to signal that we want to determine
|
||||
// the column from the error.
|
||||
return 0
|
||||
}
|
||||
return -1
|
||||
}
|
||||
|
||||
// ContainsMatcher is a line matcher that matches by line content.
|
||||
func ContainsMatcher(text string) func(m LineMatcher) int {
|
||||
return func(m LineMatcher) int {
|
||||
if idx := strings.Index(m.Line, text); idx != -1 {
|
||||
return idx + 1
|
||||
}
|
||||
return -1
|
||||
}
|
||||
}
|
||||
var _ text.Positioner = ErrorContext{}
|
||||
|
||||
// ErrorContext contains contextual information about an error. This will
|
||||
// typically be the lines surrounding some problem in a file.
|
||||
type ErrorContext struct {
|
||||
|
||||
// If a match will contain the matched line and up to 2 lines before and after.
|
||||
// Will be empty if no match.
|
||||
Lines []string
|
||||
|
@ -81,15 +56,114 @@ type ErrorContext struct {
|
|||
// The position of the error in the Lines above. 0 based.
|
||||
LinesPos int
|
||||
|
||||
// The position of the content in the file. Note that this may be different from the error's position set
|
||||
// in FileError.
|
||||
Position text.Position
|
||||
position text.Position
|
||||
|
||||
// The lexer to use for syntax highlighting.
|
||||
// https://gohugo.io/content-management/syntax-highlighting/#list-of-chroma-highlighting-languages
|
||||
ChromaLexer string
|
||||
}
|
||||
|
||||
// Position returns the text position of this error.
|
||||
func (e ErrorContext) Position() text.Position {
|
||||
return e.position
|
||||
}
|
||||
|
||||
var _ causer = (*ErrorWithFileContext)(nil)
|
||||
|
||||
// ErrorWithFileContext is an error with some additional file context related
|
||||
// to that error.
|
||||
type ErrorWithFileContext struct {
|
||||
cause error
|
||||
ErrorContext
|
||||
}
|
||||
|
||||
func (e *ErrorWithFileContext) Error() string {
|
||||
pos := e.Position()
|
||||
if pos.IsValid() {
|
||||
return pos.String() + ": " + e.cause.Error()
|
||||
}
|
||||
return e.cause.Error()
|
||||
}
|
||||
|
||||
func (e *ErrorWithFileContext) Cause() error {
|
||||
return e.cause
|
||||
}
|
||||
|
||||
// WithFileContextForFile will try to add a file context with lines matching the given matcher.
|
||||
// If no match could be found, the original error is returned with false as the second return value.
|
||||
func WithFileContextForFile(e error, realFilename, filename string, fs afero.Fs, matcher LineMatcherFn) (error, bool) {
|
||||
f, err := fs.Open(filename)
|
||||
if err != nil {
|
||||
return e, false
|
||||
}
|
||||
defer f.Close()
|
||||
return WithFileContext(e, realFilename, f, matcher)
|
||||
}
|
||||
|
||||
// WithFileContextForFile will try to add a file context with lines matching the given matcher.
|
||||
// If no match could be found, the original error is returned with false as the second return value.
|
||||
func WithFileContext(e error, realFilename string, r io.Reader, matcher LineMatcherFn) (error, bool) {
|
||||
if e == nil {
|
||||
panic("error missing")
|
||||
}
|
||||
le := UnwrapFileError(e)
|
||||
|
||||
if le == nil {
|
||||
var ok bool
|
||||
if le, ok = ToFileError("", e).(FileError); !ok {
|
||||
return e, false
|
||||
}
|
||||
}
|
||||
|
||||
var errCtx ErrorContext
|
||||
|
||||
posle := le.Position()
|
||||
|
||||
if posle.Offset != -1 {
|
||||
errCtx = locateError(r, le, func(m LineMatcher) bool {
|
||||
if posle.Offset >= m.Offset && posle.Offset < m.Offset+len(m.Line) {
|
||||
lno := posle.LineNumber - m.Position.LineNumber + m.LineNumber
|
||||
m.Position = text.Position{LineNumber: lno}
|
||||
}
|
||||
return matcher(m)
|
||||
})
|
||||
} else {
|
||||
errCtx = locateError(r, le, matcher)
|
||||
}
|
||||
|
||||
pos := &errCtx.position
|
||||
|
||||
if pos.LineNumber == -1 {
|
||||
return e, false
|
||||
}
|
||||
|
||||
pos.Filename = realFilename
|
||||
|
||||
if le.Type() != "" {
|
||||
errCtx.ChromaLexer = chromaLexerFromType(le.Type())
|
||||
} else {
|
||||
errCtx.ChromaLexer = chromaLexerFromFilename(realFilename)
|
||||
}
|
||||
|
||||
return &ErrorWithFileContext{cause: e, ErrorContext: errCtx}, true
|
||||
}
|
||||
|
||||
// UnwrapErrorWithFileContext tries to unwrap an ErrorWithFileContext from err.
|
||||
// It returns nil if this is not possible.
|
||||
func UnwrapErrorWithFileContext(err error) *ErrorWithFileContext {
|
||||
for err != nil {
|
||||
switch v := err.(type) {
|
||||
case *ErrorWithFileContext:
|
||||
return v
|
||||
case causer:
|
||||
err = v.Cause()
|
||||
default:
|
||||
return nil
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func chromaLexerFromType(fileType string) string {
|
||||
switch fileType {
|
||||
case "html", "htm":
|
||||
|
@ -111,24 +185,31 @@ func chromaLexerFromFilename(filename string) string {
|
|||
return chromaLexerFromType(ext)
|
||||
}
|
||||
|
||||
func locateErrorInString(src string, matcher LineMatcherFn) *ErrorContext {
|
||||
func locateErrorInString(src string, matcher LineMatcherFn) ErrorContext {
|
||||
return locateError(strings.NewReader(src), &fileError{}, matcher)
|
||||
}
|
||||
|
||||
func locateError(r io.Reader, le FileError, matches LineMatcherFn) *ErrorContext {
|
||||
func locateError(r io.Reader, le FileError, matches LineMatcherFn) ErrorContext {
|
||||
if le == nil {
|
||||
panic("must provide an error")
|
||||
}
|
||||
|
||||
ectx := &ErrorContext{LinesPos: -1, Position: text.Position{Offset: -1}}
|
||||
errCtx := ErrorContext{position: text.Position{LineNumber: -1, ColumnNumber: 1, Offset: -1}, LinesPos: -1}
|
||||
|
||||
b, err := io.ReadAll(r)
|
||||
b, err := ioutil.ReadAll(r)
|
||||
if err != nil {
|
||||
return ectx
|
||||
return errCtx
|
||||
}
|
||||
|
||||
pos := &errCtx.position
|
||||
lepos := le.Position()
|
||||
|
||||
lines := strings.Split(string(b), "\n")
|
||||
|
||||
if lepos.ColumnNumber >= 0 {
|
||||
pos.ColumnNumber = lepos.ColumnNumber
|
||||
}
|
||||
|
||||
lineNo := 0
|
||||
posBytes := 0
|
||||
|
||||
|
@ -141,30 +222,34 @@ func locateError(r io.Reader, le FileError, matches LineMatcherFn) *ErrorContext
|
|||
Offset: posBytes,
|
||||
Line: line,
|
||||
}
|
||||
v := matches(m)
|
||||
if ectx.LinesPos == -1 && v != -1 {
|
||||
ectx.Position.LineNumber = lineNo
|
||||
ectx.Position.ColumnNumber = v
|
||||
if errCtx.LinesPos == -1 && matches(m) {
|
||||
pos.LineNumber = lineNo
|
||||
break
|
||||
}
|
||||
|
||||
posBytes += len(line)
|
||||
}
|
||||
|
||||
if ectx.Position.LineNumber > 0 {
|
||||
low := max(ectx.Position.LineNumber-3, 0)
|
||||
if pos.LineNumber != -1 {
|
||||
low := pos.LineNumber - 3
|
||||
if low < 0 {
|
||||
low = 0
|
||||
}
|
||||
|
||||
if ectx.Position.LineNumber > 2 {
|
||||
ectx.LinesPos = 2
|
||||
if pos.LineNumber > 2 {
|
||||
errCtx.LinesPos = 2
|
||||
} else {
|
||||
ectx.LinesPos = ectx.Position.LineNumber - 1
|
||||
errCtx.LinesPos = pos.LineNumber - 1
|
||||
}
|
||||
|
||||
high := min(ectx.Position.LineNumber+2, len(lines))
|
||||
high := pos.LineNumber + 2
|
||||
if high > len(lines) {
|
||||
high = len(lines)
|
||||
}
|
||||
|
||||
ectx.Lines = lines[low:high]
|
||||
errCtx.Lines = lines[low:high]
|
||||
|
||||
}
|
||||
|
||||
return ectx
|
||||
return errCtx
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -11,7 +11,7 @@
|
|||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
// Package herrors contains common Hugo errors and error related utilities.
|
||||
// Package errors contains common Hugo errors and error related utilities.
|
||||
package herrors
|
||||
|
||||
import (
|
||||
|
@ -24,11 +24,8 @@ import (
|
|||
func TestErrorLocator(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
|
||||
lineMatcher := func(m LineMatcher) int {
|
||||
if strings.Contains(m.Line, "THEONE") {
|
||||
return 1
|
||||
}
|
||||
return -1
|
||||
lineMatcher := func(m LineMatcher) bool {
|
||||
return strings.Contains(m.Line, "THEONE")
|
||||
}
|
||||
|
||||
lines := `LINE 1
|
||||
|
@ -42,41 +39,35 @@ LINE 8
|
|||
`
|
||||
|
||||
location := locateErrorInString(lines, lineMatcher)
|
||||
pos := location.Position
|
||||
c.Assert(location.Lines, qt.DeepEquals, []string{"LINE 3", "LINE 4", "This is THEONE", "LINE 6", "LINE 7"})
|
||||
|
||||
pos := location.Position()
|
||||
c.Assert(pos.LineNumber, qt.Equals, 5)
|
||||
c.Assert(location.LinesPos, qt.Equals, 2)
|
||||
|
||||
locate := func(s string, m LineMatcherFn) *ErrorContext {
|
||||
ctx := locateErrorInString(s, m)
|
||||
return ctx
|
||||
}
|
||||
|
||||
c.Assert(locate(`This is THEONE`, lineMatcher).Lines, qt.DeepEquals, []string{"This is THEONE"})
|
||||
c.Assert(locateErrorInString(`This is THEONE`, lineMatcher).Lines, qt.DeepEquals, []string{"This is THEONE"})
|
||||
|
||||
location = locateErrorInString(`L1
|
||||
This is THEONE
|
||||
L2
|
||||
`, lineMatcher)
|
||||
pos = location.Position
|
||||
c.Assert(pos.LineNumber, qt.Equals, 2)
|
||||
c.Assert(location.Position().LineNumber, qt.Equals, 2)
|
||||
c.Assert(location.LinesPos, qt.Equals, 1)
|
||||
c.Assert(location.Lines, qt.DeepEquals, []string{"L1", "This is THEONE", "L2", ""})
|
||||
|
||||
location = locate(`This is THEONE
|
||||
location = locateErrorInString(`This is THEONE
|
||||
L2
|
||||
`, lineMatcher)
|
||||
c.Assert(location.LinesPos, qt.Equals, 0)
|
||||
c.Assert(location.Lines, qt.DeepEquals, []string{"This is THEONE", "L2", ""})
|
||||
|
||||
location = locate(`L1
|
||||
location = locateErrorInString(`L1
|
||||
This THEONE
|
||||
`, lineMatcher)
|
||||
c.Assert(location.Lines, qt.DeepEquals, []string{"L1", "This THEONE", ""})
|
||||
c.Assert(location.LinesPos, qt.Equals, 1)
|
||||
|
||||
location = locate(`L1
|
||||
location = locateErrorInString(`L1
|
||||
L2
|
||||
This THEONE
|
||||
`, lineMatcher)
|
||||
|
@ -84,16 +75,12 @@ This THEONE
|
|||
c.Assert(location.LinesPos, qt.Equals, 2)
|
||||
|
||||
location = locateErrorInString("NO MATCH", lineMatcher)
|
||||
pos = location.Position
|
||||
c.Assert(pos.LineNumber, qt.Equals, 0)
|
||||
c.Assert(location.Position().LineNumber, qt.Equals, -1)
|
||||
c.Assert(location.LinesPos, qt.Equals, -1)
|
||||
c.Assert(len(location.Lines), qt.Equals, 0)
|
||||
|
||||
lineMatcher = func(m LineMatcher) int {
|
||||
if m.LineNumber == 6 {
|
||||
return 1
|
||||
}
|
||||
return -1
|
||||
lineMatcher = func(m LineMatcher) bool {
|
||||
return m.LineNumber == 6
|
||||
}
|
||||
|
||||
location = locateErrorInString(`A
|
||||
|
@ -106,18 +93,14 @@ G
|
|||
H
|
||||
I
|
||||
J`, lineMatcher)
|
||||
pos = location.Position
|
||||
|
||||
c.Assert(location.Lines, qt.DeepEquals, []string{"D", "E", "F", "G", "H"})
|
||||
c.Assert(pos.LineNumber, qt.Equals, 6)
|
||||
c.Assert(location.Position().LineNumber, qt.Equals, 6)
|
||||
c.Assert(location.LinesPos, qt.Equals, 2)
|
||||
|
||||
// Test match EOF
|
||||
lineMatcher = func(m LineMatcher) int {
|
||||
if m.LineNumber == 4 {
|
||||
return 1
|
||||
}
|
||||
return -1
|
||||
lineMatcher = func(m LineMatcher) bool {
|
||||
return m.LineNumber == 4
|
||||
}
|
||||
|
||||
location = locateErrorInString(`A
|
||||
|
@ -125,17 +108,12 @@ B
|
|||
C
|
||||
`, lineMatcher)
|
||||
|
||||
pos = location.Position
|
||||
|
||||
c.Assert(location.Lines, qt.DeepEquals, []string{"B", "C", ""})
|
||||
c.Assert(pos.LineNumber, qt.Equals, 4)
|
||||
c.Assert(location.Position().LineNumber, qt.Equals, 4)
|
||||
c.Assert(location.LinesPos, qt.Equals, 2)
|
||||
|
||||
offsetMatcher := func(m LineMatcher) int {
|
||||
if m.Offset == 1 {
|
||||
return 1
|
||||
}
|
||||
return -1
|
||||
offsetMatcher := func(m LineMatcher) bool {
|
||||
return m.Offset == 1
|
||||
}
|
||||
|
||||
location = locateErrorInString(`A
|
||||
|
@ -144,9 +122,8 @@ C
|
|||
D
|
||||
E`, offsetMatcher)
|
||||
|
||||
pos = location.Position
|
||||
|
||||
c.Assert(location.Lines, qt.DeepEquals, []string{"A", "B", "C", "D"})
|
||||
c.Assert(pos.LineNumber, qt.Equals, 2)
|
||||
c.Assert(location.Position().LineNumber, qt.Equals, 2)
|
||||
c.Assert(location.LinesPos, qt.Equals, 1)
|
||||
|
||||
}
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
// Copyright 2018 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
|
@ -15,17 +15,41 @@
|
|||
package herrors
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
"os"
|
||||
"regexp"
|
||||
"runtime"
|
||||
"runtime/debug"
|
||||
"strings"
|
||||
"time"
|
||||
"strconv"
|
||||
|
||||
_errors "github.com/pkg/errors"
|
||||
)
|
||||
|
||||
// As defined in https://godoc.org/github.com/pkg/errors
|
||||
type causer interface {
|
||||
Cause() error
|
||||
}
|
||||
|
||||
type stackTracer interface {
|
||||
StackTrace() _errors.StackTrace
|
||||
}
|
||||
|
||||
// PrintStackTraceFromErr prints the error's stack trace to stdoud.
|
||||
func PrintStackTraceFromErr(err error) {
|
||||
FprintStackTraceFromErr(os.Stdout, err)
|
||||
}
|
||||
|
||||
// FprintStackTraceFromErr prints the error's stack trace to w.
|
||||
func FprintStackTraceFromErr(w io.Writer, err error) {
|
||||
if err, ok := err.(stackTracer); ok {
|
||||
for _, f := range err.StackTrace() {
|
||||
fmt.Fprintf(w, "%+s:%d\n", f, f)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
// PrintStackTrace prints the current stacktrace to w.
|
||||
func PrintStackTrace(w io.Writer) {
|
||||
buf := make([]byte, 1<<16)
|
||||
|
@ -33,16 +57,10 @@ func PrintStackTrace(w io.Writer) {
|
|||
fmt.Fprintf(w, "%s", buf)
|
||||
}
|
||||
|
||||
// ErrorSender is a, typically, non-blocking error handler.
|
||||
type ErrorSender interface {
|
||||
SendError(err error)
|
||||
}
|
||||
|
||||
// Recover is a helper function that can be used to capture panics.
|
||||
// Put this at the top of a method/function that crashes in a template:
|
||||
//
|
||||
// defer herrors.Recover()
|
||||
func Recover(args ...any) {
|
||||
func Recover(args ...interface{}) {
|
||||
if r := recover(); r != nil {
|
||||
fmt.Println("ERR:", r)
|
||||
args = append(args, "stacktrace from panic: \n"+string(debug.Stack()), "\n")
|
||||
|
@ -50,138 +68,18 @@ func Recover(args ...any) {
|
|||
}
|
||||
}
|
||||
|
||||
// IsTimeoutError returns true if the given error is or contains a TimeoutError.
|
||||
func IsTimeoutError(err error) bool {
|
||||
return errors.Is(err, &TimeoutError{})
|
||||
}
|
||||
|
||||
type TimeoutError struct {
|
||||
Duration time.Duration
|
||||
}
|
||||
|
||||
func (e *TimeoutError) Error() string {
|
||||
return fmt.Sprintf("timeout after %s", e.Duration)
|
||||
}
|
||||
|
||||
func (e *TimeoutError) Is(target error) bool {
|
||||
_, ok := target.(*TimeoutError)
|
||||
return ok
|
||||
}
|
||||
|
||||
// errMessage wraps an error with a message.
|
||||
type errMessage struct {
|
||||
msg string
|
||||
err error
|
||||
}
|
||||
|
||||
func (e *errMessage) Error() string {
|
||||
return e.msg
|
||||
}
|
||||
|
||||
func (e *errMessage) Unwrap() error {
|
||||
return e.err
|
||||
}
|
||||
|
||||
// IsFeatureNotAvailableError returns true if the given error is or contains a FeatureNotAvailableError.
|
||||
func IsFeatureNotAvailableError(err error) bool {
|
||||
return errors.Is(err, &FeatureNotAvailableError{})
|
||||
// Get the current goroutine id. Used only for debugging.
|
||||
func GetGID() uint64 {
|
||||
b := make([]byte, 64)
|
||||
b = b[:runtime.Stack(b, false)]
|
||||
b = bytes.TrimPrefix(b, []byte("goroutine "))
|
||||
b = b[:bytes.IndexByte(b, ' ')]
|
||||
n, _ := strconv.ParseUint(string(b), 10, 64)
|
||||
return n
|
||||
}
|
||||
|
||||
// ErrFeatureNotAvailable denotes that a feature is unavailable.
|
||||
//
|
||||
// We will, at least to begin with, make some Hugo features (SCSS with libsass) optional,
|
||||
// and this error is used to signal those situations.
|
||||
var ErrFeatureNotAvailable = &FeatureNotAvailableError{Cause: errors.New("this feature is not available in your current Hugo version, see https://goo.gl/YMrWcn for more information")}
|
||||
|
||||
// FeatureNotAvailableError is an error type used to signal that a feature is not available.
|
||||
type FeatureNotAvailableError struct {
|
||||
Cause error
|
||||
}
|
||||
|
||||
func (e *FeatureNotAvailableError) Unwrap() error {
|
||||
return e.Cause
|
||||
}
|
||||
|
||||
func (e *FeatureNotAvailableError) Error() string {
|
||||
return e.Cause.Error()
|
||||
}
|
||||
|
||||
func (e *FeatureNotAvailableError) Is(target error) bool {
|
||||
_, ok := target.(*FeatureNotAvailableError)
|
||||
return ok
|
||||
}
|
||||
|
||||
// Must panics if err != nil.
|
||||
func Must(err error) {
|
||||
if err != nil {
|
||||
panic(err)
|
||||
}
|
||||
}
|
||||
|
||||
// IsNotExist returns true if the error is a file not found error.
|
||||
// Unlike os.IsNotExist, this also considers wrapped errors.
|
||||
func IsNotExist(err error) bool {
|
||||
if os.IsNotExist(err) {
|
||||
return true
|
||||
}
|
||||
|
||||
// os.IsNotExist does not consider wrapped errors.
|
||||
if os.IsNotExist(errors.Unwrap(err)) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
// IsExist returns true if the error is a file exists error.
|
||||
// Unlike os.IsExist, this also considers wrapped errors.
|
||||
func IsExist(err error) bool {
|
||||
if os.IsExist(err) {
|
||||
return true
|
||||
}
|
||||
|
||||
// os.IsExist does not consider wrapped errors.
|
||||
if os.IsExist(errors.Unwrap(err)) {
|
||||
return true
|
||||
}
|
||||
|
||||
return false
|
||||
}
|
||||
|
||||
var nilPointerErrRe = regexp.MustCompile(`at <(.*)>: error calling (.*?): runtime error: invalid memory address or nil pointer dereference`)
|
||||
|
||||
const deferredPrefix = "__hdeferred/"
|
||||
|
||||
var deferredStringToRemove = regexp.MustCompile(`executing "__hdeferred/.*?" `)
|
||||
|
||||
// ImproveRenderErr improves the error message for rendering errors.
|
||||
func ImproveRenderErr(inErr error) (outErr error) {
|
||||
outErr = inErr
|
||||
msg := improveIfNilPointerMsg(inErr)
|
||||
if msg != "" {
|
||||
outErr = &errMessage{msg: msg, err: outErr}
|
||||
}
|
||||
|
||||
if strings.Contains(inErr.Error(), deferredPrefix) {
|
||||
msg := deferredStringToRemove.ReplaceAllString(inErr.Error(), "executing ")
|
||||
outErr = &errMessage{msg: msg, err: outErr}
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
func improveIfNilPointerMsg(inErr error) string {
|
||||
m := nilPointerErrRe.FindStringSubmatch(inErr.Error())
|
||||
if len(m) == 0 {
|
||||
return ""
|
||||
}
|
||||
call := m[1]
|
||||
field := m[2]
|
||||
parts := strings.Split(call, ".")
|
||||
if len(parts) < 2 {
|
||||
return ""
|
||||
}
|
||||
receiverName := parts[len(parts)-2]
|
||||
receiver := strings.Join(parts[:len(parts)-1], ".")
|
||||
s := fmt.Sprintf("– %s is nil; wrap it in if or with: {{ with %s }}{{ .%s }}{{ end }}", receiverName, receiver, field)
|
||||
return nilPointerErrRe.ReplaceAllString(inErr.Error(), s)
|
||||
}
|
||||
var ErrFeatureNotAvailable = errors.New("this feature is not available in your current Hugo version, see https://goo.gl/YMrWcn for more information")
|
||||
|
|
|
@ -1,45 +0,0 @@
|
|||
// Copyright 2024 The Hugo Authors. All rights reserved.
|
||||
//
|
||||
// Licensed under the Apache License, Version 2.0 (the "License");
|
||||
// you may not use this file except in compliance with the License.
|
||||
// You may obtain a copy of the License at
|
||||
// http://www.apache.org/licenses/LICENSE-2.0
|
||||
//
|
||||
// Unless required by applicable law or agreed to in writing, software
|
||||
// distributed under the License is distributed on an "AS IS" BASIS,
|
||||
// WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||
// See the License for the specific language governing permissions and
|
||||
// limitations under the License.
|
||||
|
||||
package herrors
|
||||
|
||||
import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"testing"
|
||||
|
||||
qt "github.com/frankban/quicktest"
|
||||
"github.com/spf13/afero"
|
||||
)
|
||||
|
||||
func TestIsNotExist(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
|
||||
c.Assert(IsNotExist(afero.ErrFileNotFound), qt.Equals, true)
|
||||
c.Assert(IsNotExist(afero.ErrFileExists), qt.Equals, false)
|
||||
c.Assert(IsNotExist(afero.ErrDestinationExists), qt.Equals, false)
|
||||
c.Assert(IsNotExist(nil), qt.Equals, false)
|
||||
|
||||
c.Assert(IsNotExist(fmt.Errorf("foo")), qt.Equals, false)
|
||||
|
||||
// os.IsNotExist returns false for wrapped errors.
|
||||
c.Assert(IsNotExist(fmt.Errorf("foo: %w", afero.ErrFileNotFound)), qt.Equals, true)
|
||||
}
|
||||
|
||||
func TestIsFeatureNotAvailableError(t *testing.T) {
|
||||
c := qt.New(t)
|
||||
|
||||
c.Assert(IsFeatureNotAvailableError(ErrFeatureNotAvailable), qt.Equals, true)
|
||||
c.Assert(IsFeatureNotAvailableError(&FeatureNotAvailableError{}), qt.Equals, true)
|
||||
c.Assert(IsFeatureNotAvailableError(errors.New("asdf")), qt.Equals, false)
|
||||
}
|
Some files were not shown because too many files have changed in this diff Show more
Loading…
Add table
Add a link
Reference in a new issue