mirror of
https://codeberg.org/forgejo/forgejo.git
synced 2025-05-31 11:52:10 +00:00
Merge branch 'rebase-forgejo-dependency' into wip-forgejo
This commit is contained in:
commit
094c84ed6d
292 changed files with 8842 additions and 1269 deletions
|
@ -35,6 +35,9 @@ func canGithubEventMatch(eventName string, triggedEvent webhook_module.HookEvent
|
|||
case GithubEventGollum:
|
||||
return triggedEvent == webhook_module.HookEventWiki
|
||||
|
||||
case GithubEventSchedule:
|
||||
return triggedEvent == webhook_module.HookEventSchedule
|
||||
|
||||
case GithubEventIssues:
|
||||
switch triggedEvent {
|
||||
case webhook_module.HookEventIssues,
|
||||
|
@ -70,9 +73,6 @@ func canGithubEventMatch(eventName string, triggedEvent webhook_module.HookEvent
|
|||
return false
|
||||
}
|
||||
|
||||
case GithubEventSchedule:
|
||||
return triggedEvent == webhook_module.HookEventSchedule
|
||||
|
||||
default:
|
||||
return eventName == string(triggedEvent)
|
||||
}
|
||||
|
|
|
@ -22,7 +22,7 @@ import (
|
|||
|
||||
type DetectedWorkflow struct {
|
||||
EntryName string
|
||||
TriggerEvent *jobparser.Event
|
||||
TriggerEvent string
|
||||
Content []byte
|
||||
}
|
||||
|
||||
|
@ -103,7 +103,6 @@ func DetectWorkflows(
|
|||
commit *git.Commit,
|
||||
triggedEvent webhook_module.HookEventType,
|
||||
payload api.Payloader,
|
||||
detectSchedule bool,
|
||||
) ([]*DetectedWorkflow, []*DetectedWorkflow, error) {
|
||||
entries, err := ListWorkflows(commit)
|
||||
if err != nil {
|
||||
|
@ -118,7 +117,6 @@ func DetectWorkflows(
|
|||
return nil, nil, err
|
||||
}
|
||||
|
||||
// one workflow may have multiple events
|
||||
events, err := GetEventsFromContent(content)
|
||||
if err != nil {
|
||||
log.Warn("ignore invalid workflow %q: %v", entry.Name(), err)
|
||||
|
@ -127,18 +125,17 @@ func DetectWorkflows(
|
|||
for _, evt := range events {
|
||||
log.Trace("detect workflow %q for event %#v matching %q", entry.Name(), evt, triggedEvent)
|
||||
if evt.IsSchedule() {
|
||||
if detectSchedule {
|
||||
dwf := &DetectedWorkflow{
|
||||
EntryName: entry.Name(),
|
||||
TriggerEvent: evt,
|
||||
Content: content,
|
||||
}
|
||||
schedules = append(schedules, dwf)
|
||||
}
|
||||
} else if detectMatched(gitRepo, commit, triggedEvent, payload, evt) {
|
||||
dwf := &DetectedWorkflow{
|
||||
EntryName: entry.Name(),
|
||||
TriggerEvent: evt,
|
||||
TriggerEvent: evt.Name,
|
||||
Content: content,
|
||||
}
|
||||
schedules = append(schedules, dwf)
|
||||
}
|
||||
if detectMatched(gitRepo, commit, triggedEvent, payload, evt) {
|
||||
dwf := &DetectedWorkflow{
|
||||
EntryName: entry.Name(),
|
||||
TriggerEvent: evt.Name,
|
||||
Content: content,
|
||||
}
|
||||
workflows = append(workflows, dwf)
|
||||
|
@ -149,41 +146,6 @@ func DetectWorkflows(
|
|||
return workflows, schedules, nil
|
||||
}
|
||||
|
||||
func DetectScheduledWorkflows(gitRepo *git.Repository, commit *git.Commit) ([]*DetectedWorkflow, error) {
|
||||
entries, err := ListWorkflows(commit)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
wfs := make([]*DetectedWorkflow, 0, len(entries))
|
||||
for _, entry := range entries {
|
||||
content, err := GetContentFromEntry(entry)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
// one workflow may have multiple events
|
||||
events, err := GetEventsFromContent(content)
|
||||
if err != nil {
|
||||
log.Warn("ignore invalid workflow %q: %v", entry.Name(), err)
|
||||
continue
|
||||
}
|
||||
for _, evt := range events {
|
||||
if evt.IsSchedule() {
|
||||
log.Trace("detect scheduled workflow: %q", entry.Name())
|
||||
dwf := &DetectedWorkflow{
|
||||
EntryName: entry.Name(),
|
||||
TriggerEvent: evt,
|
||||
Content: content,
|
||||
}
|
||||
wfs = append(wfs, dwf)
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
return wfs, nil
|
||||
}
|
||||
|
||||
func detectMatched(gitRepo *git.Repository, commit *git.Commit, triggedEvent webhook_module.HookEventType, payload api.Payloader, evt *jobparser.Event) bool {
|
||||
if !canGithubEventMatch(evt.Name, triggedEvent) {
|
||||
return false
|
||||
|
@ -191,11 +153,11 @@ func detectMatched(gitRepo *git.Repository, commit *git.Commit, triggedEvent web
|
|||
|
||||
switch triggedEvent {
|
||||
case // events with no activity types
|
||||
webhook_module.HookEventSchedule,
|
||||
webhook_module.HookEventCreate,
|
||||
webhook_module.HookEventDelete,
|
||||
webhook_module.HookEventFork,
|
||||
webhook_module.HookEventWiki,
|
||||
webhook_module.HookEventSchedule:
|
||||
webhook_module.HookEventWiki:
|
||||
if len(evt.Acts()) != 0 {
|
||||
log.Warn("Ignore unsupported %s event arguments %v", triggedEvent, evt.Acts())
|
||||
}
|
||||
|
|
|
@ -4,12 +4,12 @@
|
|||
package hash
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
|
||||
"github.com/minio/sha256-simd"
|
||||
"golang.org/x/crypto/pbkdf2"
|
||||
)
|
||||
|
||||
|
|
|
@ -4,10 +4,9 @@
|
|||
package avatar
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"strconv"
|
||||
|
||||
"github.com/minio/sha256-simd"
|
||||
)
|
||||
|
||||
// HashAvatar will generate a unique string, which ensures that when there's a
|
||||
|
|
|
@ -7,11 +7,10 @@
|
|||
package identicon
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"image"
|
||||
"image/color"
|
||||
|
||||
"github.com/minio/sha256-simd"
|
||||
)
|
||||
|
||||
const minImageSize = 16
|
||||
|
|
|
@ -5,6 +5,7 @@ package base
|
|||
|
||||
import (
|
||||
"crypto/sha1"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
|
@ -22,7 +23,6 @@ import (
|
|||
"code.gitea.io/gitea/modules/setting"
|
||||
|
||||
"github.com/dustin/go-humanize"
|
||||
"github.com/minio/sha256-simd"
|
||||
)
|
||||
|
||||
// EncodeSha1 string to sha1 hex value.
|
||||
|
|
|
@ -11,6 +11,7 @@ import (
|
|||
"net/url"
|
||||
"strings"
|
||||
|
||||
issues_model "code.gitea.io/gitea/models/issues"
|
||||
"code.gitea.io/gitea/models/unit"
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
mc "code.gitea.io/gitea/modules/cache"
|
||||
|
@ -38,6 +39,7 @@ type APIContext struct {
|
|||
ContextUser *user_model.User // the user which is being visited, in most cases it differs from Doer
|
||||
|
||||
Repo *Repository
|
||||
Comment *issues_model.Comment
|
||||
Org *APIOrganization
|
||||
Package *Package
|
||||
}
|
||||
|
|
|
@ -400,6 +400,7 @@ func repoAssignment(ctx *Context, repo *repo_model.Repository) {
|
|||
ctx.Data["PushMirrors"] = pushMirrors
|
||||
ctx.Data["RepoName"] = ctx.Repo.Repository.Name
|
||||
ctx.Data["IsEmptyRepo"] = ctx.Repo.Repository.IsEmpty
|
||||
ctx.Data["DefaultWikiBranchName"] = setting.Repository.DefaultBranch
|
||||
}
|
||||
|
||||
// RepoIDAssignment returns a handler which assigns the repo to the context.
|
||||
|
|
|
@ -38,22 +38,14 @@ func NewInternalToken() (string, error) {
|
|||
return internalToken, nil
|
||||
}
|
||||
|
||||
// NewJwtSecret generates a new value intended to be used for JWT secrets.
|
||||
func NewJwtSecret() ([]byte, error) {
|
||||
// NewJwtSecret generates a new base64 encoded value intended to be used for JWT secrets.
|
||||
func NewJwtSecret() ([]byte, string, error) {
|
||||
bytes := make([]byte, 32)
|
||||
_, err := io.ReadFull(rand.Reader, bytes)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return bytes, nil
|
||||
}
|
||||
|
||||
// NewJwtSecretBase64 generates a new base64 encoded value intended to be used for JWT secrets.
|
||||
func NewJwtSecretBase64() ([]byte, string, error) {
|
||||
bytes, err := NewJwtSecret()
|
||||
_, err := rand.Read(bytes)
|
||||
if err != nil {
|
||||
return nil, "", err
|
||||
}
|
||||
|
||||
return bytes, base64.RawURLEncoding.EncodeToString(bytes), nil
|
||||
}
|
||||
|
||||
|
|
|
@ -515,6 +515,62 @@ func GetCommitFileStatus(ctx context.Context, repoPath, commitID string) (*Commi
|
|||
return fileStatus, nil
|
||||
}
|
||||
|
||||
func parseCommitRenames(renames *[][2]string, stdout io.Reader) {
|
||||
rd := bufio.NewReader(stdout)
|
||||
for {
|
||||
// Skip (R || three digits || NULL byte)
|
||||
_, err := rd.Discard(5)
|
||||
if err != nil {
|
||||
if err != io.EOF {
|
||||
log.Error("Unexpected error whilst reading from git log --name-status. Error: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
oldFileName, err := rd.ReadString('\x00')
|
||||
if err != nil {
|
||||
if err != io.EOF {
|
||||
log.Error("Unexpected error whilst reading from git log --name-status. Error: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
newFileName, err := rd.ReadString('\x00')
|
||||
if err != nil {
|
||||
if err != io.EOF {
|
||||
log.Error("Unexpected error whilst reading from git log --name-status. Error: %v", err)
|
||||
}
|
||||
return
|
||||
}
|
||||
oldFileName = strings.TrimSuffix(oldFileName, "\x00")
|
||||
newFileName = strings.TrimSuffix(newFileName, "\x00")
|
||||
*renames = append(*renames, [2]string{oldFileName, newFileName})
|
||||
}
|
||||
}
|
||||
|
||||
// GetCommitFileRenames returns the renames that the commit contains.
|
||||
func GetCommitFileRenames(ctx context.Context, repoPath, commitID string) ([][2]string, error) {
|
||||
renames := [][2]string{}
|
||||
stdout, w := io.Pipe()
|
||||
done := make(chan struct{})
|
||||
go func() {
|
||||
parseCommitRenames(&renames, stdout)
|
||||
close(done)
|
||||
}()
|
||||
|
||||
stderr := new(bytes.Buffer)
|
||||
err := NewCommand(ctx, "show", "--name-status", "--pretty=format:", "-z", "--diff-filter=R").AddDynamicArguments(commitID).Run(&RunOpts{
|
||||
Dir: repoPath,
|
||||
Stdout: w,
|
||||
Stderr: stderr,
|
||||
})
|
||||
w.Close() // Close writer to exit parsing goroutine
|
||||
if err != nil {
|
||||
return nil, ConcatenateError(err, stderr.String())
|
||||
}
|
||||
|
||||
<-done
|
||||
return renames, nil
|
||||
}
|
||||
|
||||
// GetFullCommitID returns full length (40) of commit ID by given short SHA in a repository.
|
||||
func GetFullCommitID(ctx context.Context, repoPath, shortID string) (string, error) {
|
||||
commitID, _, err := NewCommand(ctx, "rev-parse").AddDynamicArguments(shortID).RunStdString(&RunOpts{Dir: repoPath})
|
||||
|
|
|
@ -278,3 +278,30 @@ func TestGetCommitFileStatusMerges(t *testing.T) {
|
|||
assert.Equal(t, commitFileStatus.Removed, expected.Removed)
|
||||
assert.Equal(t, commitFileStatus.Modified, expected.Modified)
|
||||
}
|
||||
|
||||
func TestParseCommitRenames(t *testing.T) {
|
||||
testcases := []struct {
|
||||
output string
|
||||
renames [][2]string
|
||||
}{
|
||||
{
|
||||
output: "R090\x00renamed.txt\x00history.txt\x00",
|
||||
renames: [][2]string{{"renamed.txt", "history.txt"}},
|
||||
},
|
||||
{
|
||||
output: "R090\x00renamed.txt\x00history.txt\x00R000\x00corruptedstdouthere",
|
||||
renames: [][2]string{{"renamed.txt", "history.txt"}},
|
||||
},
|
||||
{
|
||||
output: "R100\x00renamed.txt\x00history.txt\x00R001\x00readme.md\x00README.md\x00",
|
||||
renames: [][2]string{{"renamed.txt", "history.txt"}, {"readme.md", "README.md"}},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testcase := range testcases {
|
||||
renames := [][2]string{}
|
||||
parseCommitRenames(&renames, strings.NewReader(testcase.output))
|
||||
|
||||
assert.Equal(t, testcase.renames, renames)
|
||||
}
|
||||
}
|
||||
|
|
|
@ -4,12 +4,11 @@
|
|||
package git
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
|
||||
"github.com/minio/sha256-simd"
|
||||
)
|
||||
|
||||
// Cache represents a caching interface
|
||||
|
|
|
@ -291,7 +291,7 @@ func (repo *Repository) CheckAttributeReader(commitID string) (*CheckAttributeRe
|
|||
}
|
||||
|
||||
checker := &CheckAttributeReader{
|
||||
Attributes: []string{"linguist-vendored", "linguist-generated", "linguist-language", "gitlab-language"},
|
||||
Attributes: []string{"linguist-vendored", "linguist-generated", "linguist-language", "gitlab-language", "linguist-documentation", "linguist-detectable"},
|
||||
Repo: repo,
|
||||
IndexFile: indexFilename,
|
||||
WorkTree: worktree,
|
||||
|
|
|
@ -13,6 +13,18 @@ const (
|
|||
bigFileSize int64 = 1024 * 1024 // 1 MiB
|
||||
)
|
||||
|
||||
type LinguistBoolAttrib struct {
|
||||
Value string
|
||||
}
|
||||
|
||||
func (attrib *LinguistBoolAttrib) IsTrue() bool {
|
||||
return attrib.Value == "set" || attrib.Value == "true"
|
||||
}
|
||||
|
||||
func (attrib *LinguistBoolAttrib) IsFalse() bool {
|
||||
return attrib.Value == "unset" || attrib.Value == "false"
|
||||
}
|
||||
|
||||
// mergeLanguageStats mergers language names with different cases. The name with most upper case letters is used.
|
||||
func mergeLanguageStats(stats map[string]int64) map[string]int64 {
|
||||
names := map[string]struct {
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||
// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
//go:build gogit
|
||||
|
@ -57,23 +58,25 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
|
|||
return nil
|
||||
}
|
||||
|
||||
notVendored := false
|
||||
notGenerated := false
|
||||
isVendored := LinguistBoolAttrib{}
|
||||
isGenerated := LinguistBoolAttrib{}
|
||||
isDocumentation := LinguistBoolAttrib{}
|
||||
isDetectable := LinguistBoolAttrib{}
|
||||
|
||||
if checker != nil {
|
||||
attrs, err := checker.CheckPath(f.Name)
|
||||
if err == nil {
|
||||
if vendored, has := attrs["linguist-vendored"]; has {
|
||||
if vendored == "set" || vendored == "true" {
|
||||
return nil
|
||||
}
|
||||
notVendored = vendored == "false"
|
||||
isVendored = LinguistBoolAttrib{Value: vendored}
|
||||
}
|
||||
if generated, has := attrs["linguist-generated"]; has {
|
||||
if generated == "set" || generated == "true" {
|
||||
return nil
|
||||
}
|
||||
notGenerated = generated == "false"
|
||||
isGenerated = LinguistBoolAttrib{Value: generated}
|
||||
}
|
||||
if documentation, has := attrs["linguist-documentation"]; has {
|
||||
isDocumentation = LinguistBoolAttrib{Value: documentation}
|
||||
}
|
||||
if detectable, has := attrs["linguist-detectable"]; has {
|
||||
isDetectable = LinguistBoolAttrib{Value: detectable}
|
||||
}
|
||||
if language, has := attrs["linguist-language"]; has && language != "unspecified" && language != "" {
|
||||
// group languages, such as Pug -> HTML; SCSS -> CSS
|
||||
|
@ -105,8 +108,11 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
|
|||
}
|
||||
}
|
||||
|
||||
if (!notVendored && analyze.IsVendor(f.Name)) || enry.IsDotFile(f.Name) ||
|
||||
enry.IsDocumentation(f.Name) || enry.IsConfiguration(f.Name) {
|
||||
if isDetectable.IsFalse() || isVendored.IsTrue() || isDocumentation.IsTrue() ||
|
||||
(!isVendored.IsFalse() && analyze.IsVendor(f.Name)) ||
|
||||
enry.IsDotFile(f.Name) ||
|
||||
enry.IsConfiguration(f.Name) ||
|
||||
(!isDocumentation.IsFalse() && enry.IsDocumentation(f.Name)) {
|
||||
return nil
|
||||
}
|
||||
|
||||
|
@ -115,12 +121,11 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
|
|||
if f.Size <= bigFileSize {
|
||||
content, _ = readFile(f, fileSizeLimit)
|
||||
}
|
||||
if !notGenerated && enry.IsGenerated(f.Name, content) {
|
||||
if !isGenerated.IsTrue() && enry.IsGenerated(f.Name, content) {
|
||||
return nil
|
||||
}
|
||||
|
||||
// TODO: Use .gitattributes file for linguist overrides
|
||||
|
||||
language := analyze.GetCodeLanguage(f.Name, content)
|
||||
if language == enry.OtherLanguage || language == "" {
|
||||
return nil
|
||||
|
@ -136,6 +141,13 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
|
|||
if !checked {
|
||||
langtype := enry.GetLanguageType(language)
|
||||
included = langtype == enry.Programming || langtype == enry.Markup
|
||||
if !included {
|
||||
if isDetectable.IsTrue() {
|
||||
included = true
|
||||
} else {
|
||||
return nil
|
||||
}
|
||||
}
|
||||
includedLanguage[language] = included
|
||||
}
|
||||
if included {
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||
// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
//go:build !gogit
|
||||
|
@ -90,23 +91,25 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
|
|||
continue
|
||||
}
|
||||
|
||||
notVendored := false
|
||||
notGenerated := false
|
||||
isVendored := LinguistBoolAttrib{}
|
||||
isGenerated := LinguistBoolAttrib{}
|
||||
isDocumentation := LinguistBoolAttrib{}
|
||||
isDetectable := LinguistBoolAttrib{}
|
||||
|
||||
if checker != nil {
|
||||
attrs, err := checker.CheckPath(f.Name())
|
||||
if err == nil {
|
||||
if vendored, has := attrs["linguist-vendored"]; has {
|
||||
if vendored == "set" || vendored == "true" {
|
||||
continue
|
||||
}
|
||||
notVendored = vendored == "false"
|
||||
isVendored = LinguistBoolAttrib{Value: vendored}
|
||||
}
|
||||
if generated, has := attrs["linguist-generated"]; has {
|
||||
if generated == "set" || generated == "true" {
|
||||
continue
|
||||
}
|
||||
notGenerated = generated == "false"
|
||||
isGenerated = LinguistBoolAttrib{Value: generated}
|
||||
}
|
||||
if documentation, has := attrs["linguist-documentation"]; has {
|
||||
isDocumentation = LinguistBoolAttrib{Value: documentation}
|
||||
}
|
||||
if detectable, has := attrs["linguist-detectable"]; has {
|
||||
isDetectable = LinguistBoolAttrib{Value: detectable}
|
||||
}
|
||||
if language, has := attrs["linguist-language"]; has && language != "unspecified" && language != "" {
|
||||
// group languages, such as Pug -> HTML; SCSS -> CSS
|
||||
|
@ -139,8 +142,11 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
|
|||
}
|
||||
}
|
||||
|
||||
if (!notVendored && analyze.IsVendor(f.Name())) || enry.IsDotFile(f.Name()) ||
|
||||
enry.IsDocumentation(f.Name()) || enry.IsConfiguration(f.Name()) {
|
||||
if isDetectable.IsFalse() || isVendored.IsTrue() || isDocumentation.IsTrue() ||
|
||||
(!isVendored.IsFalse() && analyze.IsVendor(f.Name())) ||
|
||||
enry.IsDotFile(f.Name()) ||
|
||||
enry.IsConfiguration(f.Name()) ||
|
||||
(!isDocumentation.IsFalse() && enry.IsDocumentation(f.Name())) {
|
||||
continue
|
||||
}
|
||||
|
||||
|
@ -173,7 +179,7 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
|
|||
return nil, err
|
||||
}
|
||||
}
|
||||
if !notGenerated && enry.IsGenerated(f.Name(), content) {
|
||||
if !isGenerated.IsTrue() && enry.IsGenerated(f.Name(), content) {
|
||||
continue
|
||||
}
|
||||
|
||||
|
@ -194,6 +200,13 @@ func (repo *Repository) GetLanguageStats(commitID string) (map[string]int64, err
|
|||
if !checked {
|
||||
langType := enry.GetLanguageType(language)
|
||||
included = langType == enry.Programming || langType == enry.Markup
|
||||
if !included {
|
||||
if isDetectable.IsTrue() {
|
||||
included = true
|
||||
} else {
|
||||
continue
|
||||
}
|
||||
}
|
||||
includedLanguage[language] = included
|
||||
}
|
||||
if included {
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
// Copyright 2015 The Gogs Authors. All rights reserved.
|
||||
// Copyright 2019 The Gitea Authors. All rights reserved.
|
||||
// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package git
|
||||
|
||||
import "strings"
|
||||
|
||||
// GetBlobByPath get the blob object according the path
|
||||
func (t *Tree) GetBlobByPath(relpath string) (*Blob, error) {
|
||||
entry, err := t.GetTreeEntryByPath(relpath)
|
||||
|
@ -17,3 +20,21 @@ func (t *Tree) GetBlobByPath(relpath string) (*Blob, error) {
|
|||
|
||||
return nil, ErrNotExist{"", relpath}
|
||||
}
|
||||
|
||||
// GetBlobByFoldedPath returns the blob object at relpath, regardless of the
|
||||
// case of relpath. If there are multiple files with the same case-insensitive
|
||||
// name, the first one found will be returned.
|
||||
func (t *Tree) GetBlobByFoldedPath(relpath string) (*Blob, error) {
|
||||
entries, err := t.ListEntries()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
for _, entry := range entries {
|
||||
if strings.EqualFold(entry.Name(), relpath) {
|
||||
return t.GetBlobByPath(entry.Name())
|
||||
}
|
||||
}
|
||||
|
||||
return nil, ErrNotExist{"", relpath}
|
||||
}
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
package lfs
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"hash"
|
||||
|
@ -12,8 +13,6 @@ import (
|
|||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/storage"
|
||||
|
||||
"github.com/minio/sha256-simd"
|
||||
)
|
||||
|
||||
var (
|
||||
|
|
|
@ -4,6 +4,7 @@
|
|||
package lfs
|
||||
|
||||
import (
|
||||
"crypto/sha256"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
|
@ -12,8 +13,6 @@ import (
|
|||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
|
||||
"github.com/minio/sha256-simd"
|
||||
)
|
||||
|
||||
const (
|
||||
|
|
|
@ -29,12 +29,17 @@ func CleanValue(value []byte) []byte {
|
|||
value = bytes.TrimSpace(value)
|
||||
rs := bytes.Runes(value)
|
||||
result := make([]rune, 0, len(rs))
|
||||
needsDash := false
|
||||
for _, r := range rs {
|
||||
if unicode.IsLetter(r) || unicode.IsNumber(r) || r == '_' || r == '-' {
|
||||
switch {
|
||||
case unicode.IsLetter(r) || unicode.IsNumber(r) || r == '_':
|
||||
if needsDash && len(result) > 0 {
|
||||
result = append(result, '-')
|
||||
}
|
||||
needsDash = false
|
||||
result = append(result, unicode.ToLower(r))
|
||||
}
|
||||
if unicode.IsSpace(r) {
|
||||
result = append(result, '-')
|
||||
default:
|
||||
needsDash = true
|
||||
}
|
||||
}
|
||||
return []byte(string(result))
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// Copyright 2023 The Forgejo Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
package common
|
||||
|
||||
|
@ -15,44 +16,45 @@ func TestCleanValue(t *testing.T) {
|
|||
}{
|
||||
// Github behavior test cases
|
||||
{"", ""},
|
||||
{"test(0)", "test0"},
|
||||
{"test!1", "test1"},
|
||||
{"test:2", "test2"},
|
||||
{"test*3", "test3"},
|
||||
{"test!4", "test4"},
|
||||
{"test:5", "test5"},
|
||||
{"test*6", "test6"},
|
||||
{"test:6 a", "test6-a"},
|
||||
{"test:6 !b", "test6-b"},
|
||||
{"test:ad # df", "testad--df"},
|
||||
{"test:ad #23 df 2*/*", "testad-23-df-2"},
|
||||
{"test:ad 23 df 2*/*", "testad-23-df-2"},
|
||||
{"test:ad # 23 df 2*/*", "testad--23-df-2"},
|
||||
{"test.0.1", "test-0-1"},
|
||||
{"test(0)", "test-0"},
|
||||
{"test!1", "test-1"},
|
||||
{"test:2", "test-2"},
|
||||
{"test*3", "test-3"},
|
||||
{"test!4", "test-4"},
|
||||
{"test:5", "test-5"},
|
||||
{"test*6", "test-6"},
|
||||
{"test:6 a", "test-6-a"},
|
||||
{"test:6 !b", "test-6-b"},
|
||||
{"test:ad # df", "test-ad-df"},
|
||||
{"test:ad #23 df 2*/*", "test-ad-23-df-2"},
|
||||
{"test:ad 23 df 2*/*", "test-ad-23-df-2"},
|
||||
{"test:ad # 23 df 2*/*", "test-ad-23-df-2"},
|
||||
{"Anchors in Markdown", "anchors-in-markdown"},
|
||||
{"a_b_c", "a_b_c"},
|
||||
{"a-b-c", "a-b-c"},
|
||||
{"a-b-c----", "a-b-c----"},
|
||||
{"test:6a", "test6a"},
|
||||
{"test:a6", "testa6"},
|
||||
{"tes a a a a", "tes-a-a---a--a"},
|
||||
{" tes a a a a ", "tes-a-a---a--a"},
|
||||
{"a-b-c----", "a-b-c"},
|
||||
{"test:6a", "test-6a"},
|
||||
{"test:a6", "test-a6"},
|
||||
{"tes a a a a", "tes-a-a-a-a"},
|
||||
{" tes a a a a ", "tes-a-a-a-a"},
|
||||
{"Header with \"double quotes\"", "header-with-double-quotes"},
|
||||
{"Placeholder to force scrolling on link's click", "placeholder-to-force-scrolling-on-links-click"},
|
||||
{"Placeholder to force scrolling on link's click", "placeholder-to-force-scrolling-on-link-s-click"},
|
||||
{"tes()", "tes"},
|
||||
{"tes(0)", "tes0"},
|
||||
{"tes{0}", "tes0"},
|
||||
{"tes[0]", "tes0"},
|
||||
{"test【0】", "test0"},
|
||||
{"tes…@a", "tesa"},
|
||||
{"tes(0)", "tes-0"},
|
||||
{"tes{0}", "tes-0"},
|
||||
{"tes[0]", "tes-0"},
|
||||
{"test【0】", "test-0"},
|
||||
{"tes…@a", "tes-a"},
|
||||
{"tes¥& a", "tes-a"},
|
||||
{"tes= a", "tes-a"},
|
||||
{"tes|a", "tesa"},
|
||||
{"tes\\a", "tesa"},
|
||||
{"tes/a", "tesa"},
|
||||
{"tes|a", "tes-a"},
|
||||
{"tes\\a", "tes-a"},
|
||||
{"tes/a", "tes-a"},
|
||||
{"a啊啊b", "a啊啊b"},
|
||||
{"c🤔️🤔️d", "cd"},
|
||||
{"a⚡a", "aa"},
|
||||
{"e.~f", "ef"},
|
||||
{"c🤔️🤔️d", "c-d"},
|
||||
{"a⚡a", "a-a"},
|
||||
{"e.~f", "e-f"},
|
||||
}
|
||||
for _, test := range tests {
|
||||
assert.Equal(t, []byte(test.expect), CleanValue([]byte(test.param)), test.param)
|
||||
|
|
|
@ -524,6 +524,18 @@ func TestMathBlock(t *testing.T) {
|
|||
"$$a$$",
|
||||
`<pre class="code-block is-loading"><code class="chroma language-math display">a</code></pre>` + nl,
|
||||
},
|
||||
{
|
||||
`\[a b\]`,
|
||||
`<pre class="code-block is-loading"><code class="chroma language-math display">a b</code></pre>` + nl,
|
||||
},
|
||||
{
|
||||
`\[a b]`,
|
||||
`<p>[a b]</p>` + nl,
|
||||
},
|
||||
{
|
||||
`$$a`,
|
||||
`<p>$$a</p>` + nl,
|
||||
},
|
||||
}
|
||||
|
||||
for _, test := range testcases {
|
||||
|
@ -534,6 +546,204 @@ func TestMathBlock(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestFootnote(t *testing.T) {
|
||||
testcases := []struct {
|
||||
testcase string
|
||||
expected string
|
||||
}{
|
||||
{
|
||||
`Citation needed[^0].
|
||||
[^0]: Source`,
|
||||
`<p>Citation needed<sup id="fnref:user-content-0"><a href="#fn:user-content-0" rel="nofollow">1</a></sup>.</p>
|
||||
<div>
|
||||
<hr/>
|
||||
<ol>
|
||||
<li id="fn:user-content-0">
|
||||
<p>Source <a href="#fnref:user-content-0" rel="nofollow">↩︎</a></p>
|
||||
</li>
|
||||
</ol>
|
||||
</div>
|
||||
`,
|
||||
},
|
||||
{
|
||||
`Citation needed[^0]`,
|
||||
`<p>Citation needed[^0]</p>
|
||||
`,
|
||||
},
|
||||
{
|
||||
`Citation needed[^1], Citation needed twice[^3]
|
||||
[^3]: Source`,
|
||||
`<p>Citation needed[^1], Citation needed twice<sup id="fnref:user-content-3"><a href="#fn:user-content-3" rel="nofollow">1</a></sup></p>
|
||||
<div>
|
||||
<hr/>
|
||||
<ol>
|
||||
<li id="fn:user-content-3">
|
||||
<p>Source <a href="#fnref:user-content-3" rel="nofollow">↩︎</a></p>
|
||||
</li>
|
||||
</ol>
|
||||
</div>
|
||||
`,
|
||||
},
|
||||
{
|
||||
`Citation needed[^0]
|
||||
[^1]: Source`,
|
||||
`<p>Citation needed[^0]</p>
|
||||
`,
|
||||
},
|
||||
{
|
||||
`Citation needed[^0]
|
||||
[^0]: Source 1
|
||||
[^0]: Source 2`,
|
||||
`<p>Citation needed<sup id="fnref:user-content-0"><a href="#fn:user-content-0" rel="nofollow">1</a></sup></p>
|
||||
<div>
|
||||
<hr/>
|
||||
<ol>
|
||||
<li id="fn:user-content-0">
|
||||
<p>Source 1 <a href="#fnref:user-content-0" rel="nofollow">↩︎</a></p>
|
||||
</li>
|
||||
</ol>
|
||||
</div>
|
||||
`,
|
||||
},
|
||||
{
|
||||
`Citation needed![^0]
|
||||
[^0]: Source`,
|
||||
`<p>Citation needed<sup id="fnref:user-content-0"><a href="#fn:user-content-0" rel="nofollow">1</a></sup></p>
|
||||
<div>
|
||||
<hr/>
|
||||
<ol>
|
||||
<li id="fn:user-content-0">
|
||||
<p>Source <a href="#fnref:user-content-0" rel="nofollow">↩︎</a></p>
|
||||
</li>
|
||||
</ol>
|
||||
</div>
|
||||
`,
|
||||
},
|
||||
{
|
||||
`Trigger [^`,
|
||||
`<p>Trigger [^</p>
|
||||
`,
|
||||
},
|
||||
{
|
||||
`Trigger 2 [^0`,
|
||||
`<p>Trigger 2 [^0</p>
|
||||
`,
|
||||
},
|
||||
{
|
||||
`Citation needed[^0]
|
||||
[^0]: Source with citation needed[^1]
|
||||
[^1]: Source`,
|
||||
`<p>Citation needed<sup id="fnref:user-content-0"><a href="#fn:user-content-0" rel="nofollow">1</a></sup></p>
|
||||
<div>
|
||||
<hr/>
|
||||
<ol>
|
||||
<li id="fn:user-content-0">
|
||||
<p>Source with citation needed<sup id="fnref:user-content-1"><a href="#fn:user-content-1" rel="nofollow">2</a></sup> <a href="#fnref:user-content-0" rel="nofollow">↩︎</a></p>
|
||||
</li>
|
||||
<li id="fn:user-content-1">
|
||||
<p>Source <a href="#fnref:user-content-1" rel="nofollow">↩︎</a></p>
|
||||
</li>
|
||||
</ol>
|
||||
</div>
|
||||
`,
|
||||
},
|
||||
{
|
||||
`Citation needed[^#]
|
||||
[^#]: Source`,
|
||||
`<p>Citation needed<sup id="fnref:user-content-1"><a href="#fn:user-content-1" rel="nofollow">1</a></sup></p>
|
||||
<div>
|
||||
<hr/>
|
||||
<ol>
|
||||
<li id="fn:user-content-1">
|
||||
<p>Source <a href="#fnref:user-content-1" rel="nofollow">↩︎</a></p>
|
||||
</li>
|
||||
</ol>
|
||||
</div>
|
||||
`,
|
||||
},
|
||||
{
|
||||
`Citation needed[^0]
|
||||
[^0]: Source`,
|
||||
`<p>Citation needed[^0]<br/>
|
||||
[^0]: Source</p>
|
||||
`,
|
||||
},
|
||||
{
|
||||
`[^0]: Source
|
||||
|
||||
Citation needed[^0].`,
|
||||
`<p>Citation needed<sup id="fnref:user-content-0"><a href="#fn:user-content-0" rel="nofollow">1</a></sup>.</p>
|
||||
<div>
|
||||
<hr/>
|
||||
<ol>
|
||||
<li id="fn:user-content-0">
|
||||
<p>Source <a href="#fnref:user-content-0" rel="nofollow">↩︎</a></p>
|
||||
</li>
|
||||
</ol>
|
||||
</div>
|
||||
`,
|
||||
},
|
||||
{
|
||||
`Citation needed[^]
|
||||
[^]: Source`,
|
||||
`<p>Citation needed[^]<br/>
|
||||
[^]: Source</p>
|
||||
`,
|
||||
},
|
||||
{
|
||||
`Citation needed[^0]
|
||||
[^0] Source`,
|
||||
`<p>Citation needed[^0]<br/>
|
||||
[^0] Source</p>
|
||||
`,
|
||||
},
|
||||
{
|
||||
`Citation needed[^0]
|
||||
[^0 Source`,
|
||||
`<p>Citation needed[^0]<br/>
|
||||
[^0 Source</p>
|
||||
`,
|
||||
},
|
||||
{
|
||||
`Citation needed[^0] [^0]: Source`,
|
||||
`<p>Citation needed[^0] [^0]: Source</p>
|
||||
`,
|
||||
},
|
||||
{
|
||||
`Citation needed[^Source here 0 # 9-3]
|
||||
[^Source here 0 # 9-3]: Source`,
|
||||
`<p>Citation needed<sup id="fnref:user-content-source-here-0-9-3"><a href="#fn:user-content-source-here-0-9-3" rel="nofollow">1</a></sup></p>
|
||||
<div>
|
||||
<hr/>
|
||||
<ol>
|
||||
<li id="fn:user-content-source-here-0-9-3">
|
||||
<p>Source <a href="#fnref:user-content-source-here-0-9-3" rel="nofollow">↩︎</a></p>
|
||||
</li>
|
||||
</ol>
|
||||
</div>
|
||||
`,
|
||||
},
|
||||
{
|
||||
`Citation needed[^0]
|
||||
[^0]:`,
|
||||
`<p>Citation needed<sup id="fnref:user-content-0"><a href="#fn:user-content-0" rel="nofollow">1</a></sup></p>
|
||||
<div>
|
||||
<hr/>
|
||||
<ol>
|
||||
<li id="fn:user-content-0">
|
||||
<a href="#fnref:user-content-0" rel="nofollow">↩︎</a></li>
|
||||
</ol>
|
||||
</div>
|
||||
`,
|
||||
},
|
||||
}
|
||||
for _, test := range testcases {
|
||||
res, err := markdown.RenderString(&markup.RenderContext{Ctx: git.DefaultContext}, test.testcase)
|
||||
assert.NoError(t, err, "Unexpected error in testcase: %q", test.testcase)
|
||||
assert.Equal(t, test.expected, res, "Unexpected result in testcase %q", test.testcase)
|
||||
}
|
||||
}
|
||||
|
||||
func TestTaskList(t *testing.T) {
|
||||
testcases := []struct {
|
||||
testcase string
|
||||
|
|
|
@ -55,10 +55,7 @@ func (b *blockParser) Open(parent ast.Node, reader text.Reader, pc parser.Contex
|
|||
return node, parser.Close | parser.NoChildren
|
||||
}
|
||||
|
||||
reader.Advance(segment.Len() - 1)
|
||||
segment.Start += 2
|
||||
node.Lines().Append(segment)
|
||||
return node, parser.NoChildren
|
||||
return nil, parser.NoChildren
|
||||
}
|
||||
|
||||
// Continue parses the current line and returns a result of parsing.
|
||||
|
|
|
@ -135,7 +135,12 @@ type Writer struct {
|
|||
|
||||
const mailto = "mailto:"
|
||||
|
||||
func (r *Writer) resolveLink(l org.RegularLink) string {
|
||||
func (r *Writer) resolveLink(node org.Node) string {
|
||||
l, ok := node.(org.RegularLink)
|
||||
if !ok {
|
||||
l = org.RegularLink{URL: strings.TrimPrefix(org.String(node), "file:")}
|
||||
}
|
||||
|
||||
link := html.EscapeString(l.URL)
|
||||
if l.Protocol == "file" {
|
||||
link = link[len("file:"):]
|
||||
|
@ -162,14 +167,14 @@ func (r *Writer) WriteRegularLink(l org.RegularLink) {
|
|||
if l.Description == nil {
|
||||
fmt.Fprintf(r, `<img src="%s" alt="%s" />`, link, link)
|
||||
} else {
|
||||
imageSrc := r.resolveLink(l.Description[0].(org.RegularLink))
|
||||
imageSrc := r.resolveLink(l.Description[0])
|
||||
fmt.Fprintf(r, `<a href="%s"><img src="%s" alt="%s" /></a>`, link, imageSrc, imageSrc)
|
||||
}
|
||||
case "video":
|
||||
if l.Description == nil {
|
||||
fmt.Fprintf(r, `<video src="%s">%s</video>`, link, link)
|
||||
} else {
|
||||
videoSrc := r.resolveLink(l.Description[0].(org.RegularLink))
|
||||
videoSrc := r.resolveLink(l.Description[0])
|
||||
fmt.Fprintf(r, `<a href="%s"><video src="%s">%s</video></a>`, link, videoSrc, videoSrc)
|
||||
}
|
||||
default:
|
||||
|
|
|
@ -80,6 +80,12 @@ func TestRender_Media(t *testing.T) {
|
|||
`<p><img src="https://example.com/example.svg" alt="https://example.com/example.svg" /></p>`)
|
||||
test("[[https://example.com/example.mp4]]",
|
||||
`<p><video src="https://example.com/example.mp4">https://example.com/example.mp4</video></p>`)
|
||||
|
||||
// Text description.
|
||||
test("[[file:./lem-post.png][file:./lem-post.png]]",
|
||||
`<p><a href="http://localhost:3000/gogits/gogs/lem-post.png"><img src="http://localhost:3000/gogits/gogs/lem-post.png" alt="http://localhost:3000/gogits/gogs/lem-post.png" /></a></p>`)
|
||||
test("[[file:./lem-post.mp4][file:./lem-post.mp4]]",
|
||||
`<p><a href="http://localhost:3000/gogits/gogs/lem-post.mp4"><video src="http://localhost:3000/gogits/gogs/lem-post.mp4">http://localhost:3000/gogits/gogs/lem-post.mp4</video></a></p>`)
|
||||
}
|
||||
|
||||
func TestRender_Source(t *testing.T) {
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
// Copyright 2019 The Gitea Authors. All rights reserved.
|
||||
// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package repository
|
||||
|
@ -99,7 +100,6 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User,
|
|||
Mirror: true,
|
||||
Quiet: true,
|
||||
Timeout: migrateTimeout,
|
||||
Branch: "master",
|
||||
SkipTLSVerify: setting.Migrations.SkipTLSVerify,
|
||||
}); err != nil {
|
||||
log.Warn("Clone wiki: %v", err)
|
||||
|
@ -107,6 +107,30 @@ func MigrateRepositoryGitData(ctx context.Context, u *user_model.User,
|
|||
return repo, fmt.Errorf("Failed to remove %s: %w", wikiPath, err)
|
||||
}
|
||||
} else {
|
||||
// Figure out the branch of the wiki we just cloned. We assume
|
||||
// that the default branch is to be used, and we'll use the same
|
||||
// name as the source.
|
||||
gitRepo, err := git.OpenRepository(ctx, wikiPath)
|
||||
if err != nil {
|
||||
log.Warn("Failed to open wiki repository during migration: %v", err)
|
||||
if err := util.RemoveAll(wikiPath); err != nil {
|
||||
return repo, fmt.Errorf("Failed to remove %s: %w", wikiPath, err)
|
||||
}
|
||||
return repo, err
|
||||
}
|
||||
defer gitRepo.Close()
|
||||
|
||||
branch, err := gitRepo.GetDefaultBranch()
|
||||
if err != nil {
|
||||
log.Warn("Failed to get the default branch of a migrated wiki repo: %v", err)
|
||||
if err := util.RemoveAll(wikiPath); err != nil {
|
||||
return repo, fmt.Errorf("Failed to remove %s: %w", wikiPath, err)
|
||||
}
|
||||
|
||||
return repo, err
|
||||
}
|
||||
repo.WikiBranch = branch
|
||||
|
||||
if err := git.WriteCommitGraph(ctx, wikiPath); err != nil {
|
||||
return repo, err
|
||||
}
|
||||
|
|
|
@ -7,13 +7,12 @@ import (
|
|||
"crypto/aes"
|
||||
"crypto/cipher"
|
||||
"crypto/rand"
|
||||
"crypto/sha256"
|
||||
"encoding/base64"
|
||||
"encoding/hex"
|
||||
"errors"
|
||||
"fmt"
|
||||
"io"
|
||||
|
||||
"github.com/minio/sha256-simd"
|
||||
)
|
||||
|
||||
// AesEncrypt encrypts text and given key with AES.
|
||||
|
|
|
@ -5,8 +5,9 @@ package setting
|
|||
|
||||
// Admin settings
|
||||
var Admin struct {
|
||||
DisableRegularOrgCreation bool
|
||||
DefaultEmailNotification string
|
||||
DisableRegularOrgCreation bool
|
||||
DefaultEmailNotification string
|
||||
SendNotificationEmailOnNewUser bool
|
||||
}
|
||||
|
||||
func loadAdminFrom(rootCfg ConfigProvider) {
|
||||
|
|
24
modules/setting/badges.go
Normal file
24
modules/setting/badges.go
Normal file
|
@ -0,0 +1,24 @@
|
|||
// Copyright 2023 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package setting
|
||||
|
||||
import (
|
||||
"text/template"
|
||||
)
|
||||
|
||||
// Badges settings
|
||||
var Badges = struct {
|
||||
Enabled bool `ini:"ENABLED"`
|
||||
GeneratorURLTemplate string `ini:"GENERATOR_URL_TEMPLATE"`
|
||||
GeneratorURLTemplateTemplate *template.Template `ini:"-"`
|
||||
}{
|
||||
Enabled: true,
|
||||
GeneratorURLTemplate: "https://img.shields.io/badge/{{.label}}-{{.text}}-{{.color}}",
|
||||
}
|
||||
|
||||
func loadBadgesFrom(rootCfg ConfigProvider) {
|
||||
mustMapSetting(rootCfg, "badges", &Badges)
|
||||
|
||||
Badges.GeneratorURLTemplateTemplate = template.Must(template.New("").Parse(Badges.GeneratorURLTemplate))
|
||||
}
|
|
@ -25,26 +25,27 @@ var (
|
|||
|
||||
// Database holds the database settings
|
||||
Database = struct {
|
||||
Type DatabaseType
|
||||
Host string
|
||||
Name string
|
||||
User string
|
||||
Passwd string
|
||||
Schema string
|
||||
SSLMode string
|
||||
Path string
|
||||
LogSQL bool
|
||||
MysqlCharset string
|
||||
CharsetCollation string
|
||||
Timeout int // seconds
|
||||
SQLiteJournalMode string
|
||||
DBConnectRetries int
|
||||
DBConnectBackoff time.Duration
|
||||
MaxIdleConns int
|
||||
MaxOpenConns int
|
||||
ConnMaxLifetime time.Duration
|
||||
IterateBufferSize int
|
||||
AutoMigration bool
|
||||
Type DatabaseType
|
||||
Host string
|
||||
Name string
|
||||
User string
|
||||
Passwd string
|
||||
Schema string
|
||||
SSLMode string
|
||||
Path string
|
||||
LogSQL bool
|
||||
MysqlCharset string
|
||||
CharsetCollation string
|
||||
Timeout int // seconds
|
||||
SQLiteJournalMode string
|
||||
DBConnectRetries int
|
||||
DBConnectBackoff time.Duration
|
||||
MaxIdleConns int
|
||||
MaxOpenConns int
|
||||
ConnMaxLifetime time.Duration
|
||||
IterateBufferSize int
|
||||
AutoMigration bool
|
||||
SlowQueryThreshold time.Duration
|
||||
}{
|
||||
Timeout: 500,
|
||||
IterateBufferSize: 50,
|
||||
|
@ -87,6 +88,13 @@ func loadDBSetting(rootCfg ConfigProvider) {
|
|||
Database.DBConnectRetries = sec.Key("DB_RETRIES").MustInt(10)
|
||||
Database.DBConnectBackoff = sec.Key("DB_RETRY_BACKOFF").MustDuration(3 * time.Second)
|
||||
Database.AutoMigration = sec.Key("AUTO_MIGRATION").MustBool(true)
|
||||
|
||||
deprecatedSetting(rootCfg, "database", "SLOW_QUERY_TRESHOLD", "database", "SLOW_QUERY_THRESHOLD", "1.23")
|
||||
if sec.HasKey("SLOW_QUERY_TRESHOLD") && !sec.HasKey("SLOW_QUERY_THRESHOLD") {
|
||||
Database.SlowQueryThreshold = sec.Key("SLOW_QUERY_TRESHOLD").MustDuration(5 * time.Second)
|
||||
} else {
|
||||
Database.SlowQueryThreshold = sec.Key("SLOW_QUERY_THRESHOLD").MustDuration(5 * time.Second)
|
||||
}
|
||||
}
|
||||
|
||||
// DBConnStr returns database connection string
|
||||
|
|
|
@ -64,7 +64,7 @@ func loadLFSFrom(rootCfg ConfigProvider) error {
|
|||
LFS.JWTSecretBase64 = loadSecret(rootCfg.Section("server"), "LFS_JWT_SECRET_URI", "LFS_JWT_SECRET")
|
||||
LFS.JWTSecretBytes, err = util.Base64FixedDecode(base64.RawURLEncoding, []byte(LFS.JWTSecretBase64), 32)
|
||||
if err != nil {
|
||||
LFS.JWTSecretBytes, LFS.JWTSecretBase64, err = generate.NewJwtSecretBase64()
|
||||
LFS.JWTSecretBytes, LFS.JWTSecretBase64, err = generate.NewJwtSecret()
|
||||
if err != nil {
|
||||
return fmt.Errorf("error generating JWT Secret for custom config: %v", err)
|
||||
}
|
||||
|
|
|
@ -138,12 +138,11 @@ func loadOAuth2From(rootCfg ConfigProvider) {
|
|||
|
||||
if InstallLock {
|
||||
if _, err := util.Base64FixedDecode(base64.RawURLEncoding, []byte(OAuth2.JWTSecretBase64), 32); err != nil {
|
||||
key, err := generate.NewJwtSecret()
|
||||
_, OAuth2.JWTSecretBase64, err = generate.NewJwtSecret()
|
||||
if err != nil {
|
||||
log.Fatal("error generating JWT secret: %v", err)
|
||||
}
|
||||
|
||||
OAuth2.JWTSecretBase64 = base64.RawURLEncoding.EncodeToString(key)
|
||||
saveCfg, err := rootCfg.PrepareSaving()
|
||||
if err != nil {
|
||||
log.Fatal("save oauth2.JWT_SECRET failed: %v", err)
|
||||
|
|
|
@ -7,6 +7,7 @@ import (
|
|||
"os/exec"
|
||||
"path"
|
||||
"path/filepath"
|
||||
"slices"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
|
@ -19,6 +20,8 @@ const (
|
|||
RepoCreatingPublic = "public"
|
||||
)
|
||||
|
||||
var RecognisedRepositoryDownloadOrCloneMethods = []string{"download-zip", "download-targz", "download-bundle", "vscode-clone", "vscodium-clone", "cite"}
|
||||
|
||||
// ItemsPerPage maximum items per page in forks, watchers and stars of a repo
|
||||
const ItemsPerPage = 40
|
||||
|
||||
|
@ -43,6 +46,7 @@ var (
|
|||
DisabledRepoUnits []string
|
||||
DefaultRepoUnits []string
|
||||
DefaultForkRepoUnits []string
|
||||
DownloadOrCloneMethods []string
|
||||
PrefixArchiveFiles bool
|
||||
DisableMigrations bool
|
||||
DisableStars bool `ini:"DISABLE_STARS"`
|
||||
|
@ -109,6 +113,9 @@ var (
|
|||
Wiki []string
|
||||
DefaultTrustModel string
|
||||
} `ini:"repository.signing"`
|
||||
|
||||
SettableFlags []string
|
||||
EnableFlags bool
|
||||
}{
|
||||
DetectedCharsetsOrder: []string{
|
||||
"UTF-8",
|
||||
|
@ -151,7 +158,7 @@ var (
|
|||
DefaultPrivate: RepoCreatingLastUserVisibility,
|
||||
DefaultPushCreatePrivate: true,
|
||||
MaxCreationLimit: -1,
|
||||
PreferredLicenses: []string{"Apache License 2.0", "MIT License"},
|
||||
PreferredLicenses: []string{"Apache-2.0", "MIT"},
|
||||
DisableHTTPGit: false,
|
||||
AccessControlAllowOrigin: "",
|
||||
UseCompatSSHURI: false,
|
||||
|
@ -161,6 +168,7 @@ var (
|
|||
DisabledRepoUnits: []string{},
|
||||
DefaultRepoUnits: []string{},
|
||||
DefaultForkRepoUnits: []string{},
|
||||
DownloadOrCloneMethods: []string{"download-zip", "download-targz", "download-bundle", "vscode-clone"},
|
||||
PrefixArchiveFiles: true,
|
||||
DisableMigrations: false,
|
||||
DisableStars: false,
|
||||
|
@ -265,6 +273,8 @@ var (
|
|||
Wiki: []string{"never"},
|
||||
DefaultTrustModel: "collaborator",
|
||||
},
|
||||
|
||||
EnableFlags: false,
|
||||
}
|
||||
RepoRootPath string
|
||||
ScriptType = "bash"
|
||||
|
@ -361,4 +371,12 @@ func loadRepositoryFrom(rootCfg ConfigProvider) {
|
|||
if err := loadRepoArchiveFrom(rootCfg); err != nil {
|
||||
log.Fatal("loadRepoArchiveFrom: %v", err)
|
||||
}
|
||||
|
||||
for _, method := range Repository.DownloadOrCloneMethods {
|
||||
if !slices.Contains(RecognisedRepositoryDownloadOrCloneMethods, method) {
|
||||
log.Error("Unrecognised repository download or clone method: %s", method)
|
||||
}
|
||||
}
|
||||
|
||||
Repository.EnableFlags = sec.Key("ENABLE_FLAGS").MustBool()
|
||||
}
|
||||
|
|
|
@ -68,6 +68,7 @@ var Service = struct {
|
|||
DefaultKeepEmailPrivate bool
|
||||
DefaultAllowCreateOrganization bool
|
||||
DefaultUserIsRestricted bool
|
||||
AllowDotsInUsernames bool
|
||||
EnableTimetracking bool
|
||||
DefaultEnableTimetracking bool
|
||||
DefaultEnableDependencies bool
|
||||
|
@ -180,6 +181,7 @@ func loadServiceFrom(rootCfg ConfigProvider) {
|
|||
Service.DefaultKeepEmailPrivate = sec.Key("DEFAULT_KEEP_EMAIL_PRIVATE").MustBool()
|
||||
Service.DefaultAllowCreateOrganization = sec.Key("DEFAULT_ALLOW_CREATE_ORGANIZATION").MustBool(true)
|
||||
Service.DefaultUserIsRestricted = sec.Key("DEFAULT_USER_IS_RESTRICTED").MustBool(false)
|
||||
Service.AllowDotsInUsernames = sec.Key("ALLOW_DOTS_IN_USERNAMES").MustBool(true)
|
||||
Service.EnableTimetracking = sec.Key("ENABLE_TIMETRACKING").MustBool(true)
|
||||
if Service.EnableTimetracking {
|
||||
Service.DefaultEnableTimetracking = sec.Key("DEFAULT_ENABLE_TIMETRACKING").MustBool(true)
|
||||
|
|
|
@ -147,6 +147,7 @@ func loadCommonSettingsFrom(cfg ConfigProvider) error {
|
|||
loadUIFrom(cfg)
|
||||
loadAdminFrom(cfg)
|
||||
loadAPIFrom(cfg)
|
||||
loadBadgesFrom(cfg)
|
||||
loadMetricsFrom(cfg)
|
||||
loadCamoFrom(cfg)
|
||||
loadI18nFrom(cfg)
|
||||
|
|
|
@ -402,6 +402,16 @@ func (p *PullRequestPayload) JSONPayload() ([]byte, error) {
|
|||
return json.MarshalIndent(p, "", " ")
|
||||
}
|
||||
|
||||
type HookScheduleAction string
|
||||
|
||||
const (
|
||||
HookScheduleCreated HookScheduleAction = "schedule"
|
||||
)
|
||||
|
||||
type SchedulePayload struct {
|
||||
Action HookScheduleAction `json:"action"`
|
||||
}
|
||||
|
||||
// ReviewPayload FIXME
|
||||
type ReviewPayload struct {
|
||||
Type string `json:"type"`
|
||||
|
|
|
@ -89,6 +89,9 @@ type CreatePullReviewComment struct {
|
|||
NewLineNum int64 `json:"new_position"`
|
||||
}
|
||||
|
||||
// CreatePullReviewCommentOptions are options to create a pull review comment
|
||||
type CreatePullReviewCommentOptions CreatePullReviewComment
|
||||
|
||||
// SubmitPullReviewOptions are options to submit a pending pull review
|
||||
type SubmitPullReviewOptions struct {
|
||||
Event ReviewStateType `json:"event"`
|
||||
|
|
|
@ -88,6 +88,7 @@ type Repository struct {
|
|||
ExternalTracker *ExternalTracker `json:"external_tracker,omitempty"`
|
||||
HasWiki bool `json:"has_wiki"`
|
||||
ExternalWiki *ExternalWiki `json:"external_wiki,omitempty"`
|
||||
WikiBranch string `json:"wiki_branch,omitempty"`
|
||||
HasPullRequests bool `json:"has_pull_requests"`
|
||||
HasProjects bool `json:"has_projects"`
|
||||
HasReleases bool `json:"has_releases"`
|
||||
|
@ -175,6 +176,8 @@ type EditRepoOption struct {
|
|||
ExternalWiki *ExternalWiki `json:"external_wiki,omitempty"`
|
||||
// sets the default branch for this repository.
|
||||
DefaultBranch *string `json:"default_branch,omitempty"`
|
||||
// sets the branch used for this repository's wiki.
|
||||
WikiBranch *string `json:"wiki_branch,omitempty"`
|
||||
// either `true` to allow pull requests, or `false` to prevent pull request.
|
||||
HasPullRequests *bool `json:"has_pull_requests,omitempty"`
|
||||
// either `true` to enable project unit, or `false` to disable them.
|
||||
|
|
9
modules/structs/repo_flags.go
Normal file
9
modules/structs/repo_flags.go
Normal file
|
@ -0,0 +1,9 @@
|
|||
// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package structs
|
||||
|
||||
// ReplaceFlagsOption options when replacing the flags of a repository
|
||||
type ReplaceFlagsOption struct {
|
||||
Flags []string `json:"flags"`
|
||||
}
|
|
@ -96,6 +96,9 @@ func NewFuncMap() template.FuncMap {
|
|||
"AppDomain": func() string { // documented in mail-templates.md
|
||||
return setting.Domain
|
||||
},
|
||||
"RepoFlagsEnabled": func() bool {
|
||||
return setting.Repository.EnableFlags
|
||||
},
|
||||
"AssetVersion": func() string {
|
||||
return setting.AssetVersion
|
||||
},
|
||||
|
|
|
@ -55,13 +55,15 @@ func (lc *LogChecker) checkLogEvent(event *log.EventFormatted) {
|
|||
|
||||
var checkerIndex int64
|
||||
|
||||
func NewLogChecker(namePrefix string) (logChecker *LogChecker, cancel func()) {
|
||||
func NewLogChecker(namePrefix string, level log.Level) (logChecker *LogChecker, cancel func()) {
|
||||
logger := log.GetManager().GetLogger(namePrefix)
|
||||
newCheckerIndex := atomic.AddInt64(&checkerIndex, 1)
|
||||
writerName := namePrefix + "-" + fmt.Sprint(newCheckerIndex)
|
||||
|
||||
lc := &LogChecker{}
|
||||
lc.EventWriterBaseImpl = log.NewEventWriterBase(writerName, "test-log-checker", log.WriterMode{})
|
||||
lc.EventWriterBaseImpl = log.NewEventWriterBase(writerName, "test-log-checker", log.WriterMode{
|
||||
Level: level,
|
||||
})
|
||||
logger.AddWriters(lc)
|
||||
return lc, func() { _ = logger.RemoveWriter(writerName) }
|
||||
}
|
||||
|
|
|
@ -12,8 +12,8 @@ import (
|
|||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestLogChecker(t *testing.T) {
|
||||
lc, cleanup := NewLogChecker(log.DEFAULT)
|
||||
func TestLogCheckerInfo(t *testing.T) {
|
||||
lc, cleanup := NewLogChecker(log.DEFAULT, log.INFO)
|
||||
defer cleanup()
|
||||
|
||||
lc.Filter("First", "Third").StopMark("End")
|
||||
|
@ -24,11 +24,13 @@ func TestLogChecker(t *testing.T) {
|
|||
assert.False(t, stopped)
|
||||
|
||||
log.Info("First")
|
||||
log.Debug("Third")
|
||||
filtered, stopped = lc.Check(100 * time.Millisecond)
|
||||
assert.ElementsMatch(t, []bool{true, false}, filtered)
|
||||
assert.False(t, stopped)
|
||||
|
||||
log.Info("Second")
|
||||
log.Debug("Third")
|
||||
filtered, stopped = lc.Check(100 * time.Millisecond)
|
||||
assert.ElementsMatch(t, []bool{true, false}, filtered)
|
||||
assert.False(t, stopped)
|
||||
|
@ -43,3 +45,14 @@ func TestLogChecker(t *testing.T) {
|
|||
assert.ElementsMatch(t, []bool{true, true}, filtered)
|
||||
assert.True(t, stopped)
|
||||
}
|
||||
|
||||
func TestLogCheckerDebug(t *testing.T) {
|
||||
lc, cleanup := NewLogChecker(log.DEFAULT, log.DEBUG)
|
||||
defer cleanup()
|
||||
|
||||
lc.StopMark("End")
|
||||
|
||||
log.Debug("End")
|
||||
_, stopped := lc.Check(100 * time.Millisecond)
|
||||
assert.True(t, stopped)
|
||||
}
|
||||
|
|
|
@ -7,10 +7,9 @@ import (
|
|||
"crypto"
|
||||
"crypto/rand"
|
||||
"crypto/rsa"
|
||||
"crypto/sha256"
|
||||
"crypto/x509"
|
||||
"encoding/pem"
|
||||
|
||||
"github.com/minio/sha256-simd"
|
||||
)
|
||||
|
||||
// GenerateKeyPair generates a public and private keypair
|
||||
|
|
|
@ -7,12 +7,12 @@ import (
|
|||
"crypto"
|
||||
"crypto/rand"
|
||||
"crypto/rsa"
|
||||
"crypto/sha256"
|
||||
"crypto/x509"
|
||||
"encoding/pem"
|
||||
"regexp"
|
||||
"testing"
|
||||
|
||||
"github.com/minio/sha256-simd"
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
|
|
|
@ -117,13 +117,20 @@ func IsValidExternalTrackerURLFormat(uri string) bool {
|
|||
}
|
||||
|
||||
var (
|
||||
validUsernamePattern = regexp.MustCompile(`^[\da-zA-Z][-.\w]*$`)
|
||||
invalidUsernamePattern = regexp.MustCompile(`[-._]{2,}|[-._]$`) // No consecutive or trailing non-alphanumeric chars
|
||||
validUsernamePatternWithDots = regexp.MustCompile(`^[\da-zA-Z][-.\w]*$`)
|
||||
validUsernamePatternWithoutDots = regexp.MustCompile(`^[\da-zA-Z][-\w]*$`)
|
||||
|
||||
// No consecutive or trailing non-alphanumeric chars, catches both cases
|
||||
invalidUsernamePattern = regexp.MustCompile(`[-._]{2,}|[-._]$`)
|
||||
)
|
||||
|
||||
// IsValidUsername checks if username is valid
|
||||
func IsValidUsername(name string) bool {
|
||||
// It is difficult to find a single pattern that is both readable and effective,
|
||||
// but it's easier to use positive and negative checks.
|
||||
return validUsernamePattern.MatchString(name) && !invalidUsernamePattern.MatchString(name)
|
||||
if setting.Service.AllowDotsInUsernames {
|
||||
return validUsernamePatternWithDots.MatchString(name) && !invalidUsernamePattern.MatchString(name)
|
||||
}
|
||||
|
||||
return validUsernamePatternWithoutDots.MatchString(name) && !invalidUsernamePattern.MatchString(name)
|
||||
}
|
||||
|
|
|
@ -155,7 +155,8 @@ func Test_IsValidExternalTrackerURLFormat(t *testing.T) {
|
|||
}
|
||||
}
|
||||
|
||||
func TestIsValidUsername(t *testing.T) {
|
||||
func TestIsValidUsernameAllowDots(t *testing.T) {
|
||||
setting.Service.AllowDotsInUsernames = true
|
||||
tests := []struct {
|
||||
arg string
|
||||
want bool
|
||||
|
@ -185,3 +186,31 @@ func TestIsValidUsername(t *testing.T) {
|
|||
})
|
||||
}
|
||||
}
|
||||
|
||||
func TestIsValidUsernameBanDots(t *testing.T) {
|
||||
setting.Service.AllowDotsInUsernames = false
|
||||
defer func() {
|
||||
setting.Service.AllowDotsInUsernames = true
|
||||
}()
|
||||
|
||||
tests := []struct {
|
||||
arg string
|
||||
want bool
|
||||
}{
|
||||
{arg: "a", want: true},
|
||||
{arg: "abc", want: true},
|
||||
{arg: "0.b-c", want: false},
|
||||
{arg: "a.b-c_d", want: false},
|
||||
{arg: ".abc", want: false},
|
||||
{arg: "abc.", want: false},
|
||||
{arg: "a..bc", want: false},
|
||||
{arg: "a...bc", want: false},
|
||||
{arg: "a.-bc", want: false},
|
||||
{arg: "a._bc", want: false},
|
||||
}
|
||||
for _, tt := range tests {
|
||||
t.Run(tt.arg, func(t *testing.T) {
|
||||
assert.Equalf(t, tt.want, IsValidUsername(tt.arg), "IsValidUsername[AllowDotsInUsernames=false](%v)", tt.arg)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
|
|
@ -147,6 +147,16 @@ func toHandlerProvider(handler any) func(next http.Handler) http.Handler {
|
|||
}
|
||||
}
|
||||
|
||||
if hp, ok := handler.(func(next http.Handler) http.HandlerFunc); ok {
|
||||
return func(next http.Handler) http.Handler {
|
||||
h := hp(next) // this handle could be dynamically generated, so we can't use it for debug info
|
||||
return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
|
||||
routing.UpdateFuncInfo(req.Context(), funcInfo)
|
||||
h.ServeHTTP(resp, req)
|
||||
})
|
||||
}
|
||||
}
|
||||
|
||||
provider := func(next http.Handler) http.Handler {
|
||||
return http.HandlerFunc(func(respOrig http.ResponseWriter, req *http.Request) {
|
||||
// wrap the response writer to check whether the response has been written
|
||||
|
|
|
@ -8,6 +8,7 @@ import (
|
|||
"reflect"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/translation"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/modules/validation"
|
||||
|
@ -135,7 +136,11 @@ func Validate(errs binding.Errors, data map[string]any, f Form, l translation.Lo
|
|||
case validation.ErrRegexPattern:
|
||||
data["ErrorMsg"] = trName + l.Tr("form.regex_pattern_error", errs[0].Message)
|
||||
case validation.ErrUsername:
|
||||
data["ErrorMsg"] = trName + l.Tr("form.username_error")
|
||||
if setting.Service.AllowDotsInUsernames {
|
||||
data["ErrorMsg"] = trName + l.Tr("form.username_error")
|
||||
} else {
|
||||
data["ErrorMsg"] = trName + l.Tr("form.username_error_no_dots")
|
||||
}
|
||||
case validation.ErrInvalidGroupTeamMap:
|
||||
data["ErrorMsg"] = trName + l.Tr("form.invalid_group_team_map_error", errs[0].Message)
|
||||
default:
|
||||
|
|
|
@ -53,6 +53,7 @@ func CommonTemplateContextData() ContextData {
|
|||
"ShowMilestonesDashboardPage": setting.Service.ShowMilestonesDashboardPage,
|
||||
"ShowFooterVersion": setting.Other.ShowFooterVersion,
|
||||
"DisableDownloadSourceArchives": setting.Repository.DisableDownloadSourceArchives,
|
||||
"DownloadOrCloneMethods": setting.Repository.DownloadOrCloneMethods,
|
||||
|
||||
"EnableSwagger": setting.API.EnableSwagger,
|
||||
"EnableOpenIDSignIn": setting.Service.EnableOpenIDSignIn,
|
||||
|
|
Loading…
Add table
Add a link
Reference in a new issue