.+?))?\\s"
- ]
- }
]
}
diff --git a/routers/api/actions/artifacts_chunks.go b/routers/api/actions/artifacts_chunks.go
index a15fa4fd1e..c0af750d7b 100644
--- a/routers/api/actions/artifacts_chunks.go
+++ b/routers/api/actions/artifacts_chunks.go
@@ -51,11 +51,11 @@ func saveUploadChunkBase(st storage.ObjectStorage, ctx *ArtifactContext,
log.Info("[artifact] check chunk md5, sum: %s, header: %s", chunkMd5String, reqMd5String)
// if md5 not match, delete the chunk
if reqMd5String != chunkMd5String {
- checkErr = fmt.Errorf("md5 not match")
+ checkErr = errors.New("md5 not match")
}
}
if writtenSize != contentSize {
- checkErr = errors.Join(checkErr, fmt.Errorf("contentSize not match body size"))
+ checkErr = errors.Join(checkErr, errors.New("contentSize not match body size"))
}
if checkErr != nil {
if err := st.Delete(storagePath); err != nil {
@@ -261,7 +261,7 @@ func mergeChunksForArtifact(ctx *ArtifactContext, chunks []*chunkFileItem, st st
return fmt.Errorf("save merged file error: %v", err)
}
if written != artifact.FileCompressedSize {
- return fmt.Errorf("merged file size is not equal to chunk length")
+ return errors.New("merged file size is not equal to chunk length")
}
defer func() {
diff --git a/routers/api/actions/artifactsv4.go b/routers/api/actions/artifactsv4.go
index c6bc39e7d2..dee5f1b2f3 100644
--- a/routers/api/actions/artifactsv4.go
+++ b/routers/api/actions/artifactsv4.go
@@ -166,8 +166,8 @@ func (r artifactV4Routes) buildSignature(endp, expires, artifactName string, tas
mac.Write([]byte(endp))
mac.Write([]byte(expires))
mac.Write([]byte(artifactName))
- mac.Write([]byte(fmt.Sprint(taskID)))
- mac.Write([]byte(fmt.Sprint(artifactID)))
+ fmt.Fprint(mac, taskID)
+ fmt.Fprint(mac, artifactID)
return mac.Sum(nil)
}
diff --git a/routers/api/actions/runner/runner.go b/routers/api/actions/runner/runner.go
index 8aae69f463..a971cd3fbf 100644
--- a/routers/api/actions/runner/runner.go
+++ b/routers/api/actions/runner/runner.go
@@ -178,7 +178,7 @@ func (s *Service) UpdateTask(
) (*connect.Response[runnerv1.UpdateTaskResponse], error) {
runner := GetRunner(ctx)
- task, err := actions_model.UpdateTaskByState(ctx, runner.ID, req.Msg.State)
+ task, err := actions_service.UpdateTaskByState(ctx, runner.ID, req.Msg.State)
if err != nil {
return nil, connect.NewError(connect.CodeInternal, fmt.Errorf("update task: %w", err))
}
diff --git a/routers/api/packages/api.go b/routers/api/packages/api.go
index ebd081ae77..79e61cf352 100644
--- a/routers/api/packages/api.go
+++ b/routers/api/packages/api.go
@@ -48,13 +48,14 @@ func reqPackageAccess(accessMode perm.AccessMode) func(ctx *context.Context) {
if ok { // it's a personal access token but not oauth2 token
scopeMatched := false
var err error
- if accessMode == perm.AccessModeRead {
+ switch accessMode {
+ case perm.AccessModeRead:
scopeMatched, err = scope.HasScope(auth_model.AccessTokenScopeReadPackage)
if err != nil {
ctx.Error(http.StatusInternalServerError, "HasScope", err.Error())
return
}
- } else if accessMode == perm.AccessModeWrite {
+ case perm.AccessModeWrite:
scopeMatched, err = scope.HasScope(auth_model.AccessTokenScopeWritePackage)
if err != nil {
ctx.Error(http.StatusInternalServerError, "HasScope", err.Error())
diff --git a/routers/api/packages/chef/auth.go b/routers/api/packages/chef/auth.go
index fc552c1f1b..7263cf13bb 100644
--- a/routers/api/packages/chef/auth.go
+++ b/routers/api/packages/chef/auth.go
@@ -12,6 +12,7 @@ import (
"crypto/x509"
"encoding/base64"
"encoding/pem"
+ "errors"
"fmt"
"hash"
"math/big"
@@ -121,7 +122,7 @@ func verifyTimestamp(req *http.Request) error {
}
if diff > maxTimeDifference {
- return fmt.Errorf("time difference")
+ return errors.New("time difference")
}
return nil
@@ -147,7 +148,7 @@ func getSignVersion(req *http.Request) (string, error) {
version := m[1]
m = algorithmPattern.FindStringSubmatch(hdr)
- if len(m) == 2 && m[1] != "sha1" && !(m[1] == "sha256" && version == "1.3") {
+ if len(m) == 2 && m[1] != "sha1" && (m[1] != "sha256" || version != "1.3") {
return "", util.NewInvalidArgumentErrorf("unsupported algorithm")
}
@@ -190,7 +191,7 @@ func getAuthorizationData(req *http.Request) ([]byte, error) {
tmp := make([]string, len(valueList))
for k, v := range valueList {
if k > len(tmp) {
- return nil, fmt.Errorf("invalid X-Ops-Authorization headers")
+ return nil, errors.New("invalid X-Ops-Authorization headers")
}
tmp[k-1] = v
}
@@ -267,7 +268,7 @@ func verifyDataOld(signature, data []byte, pub *rsa.PublicKey) error {
}
if !slices.Equal(out[skip:], data) {
- return fmt.Errorf("could not verify signature")
+ return errors.New("could not verify signature")
}
return nil
diff --git a/routers/api/packages/chef/chef.go b/routers/api/packages/chef/chef.go
index 909817861e..13419b9a95 100644
--- a/routers/api/packages/chef/chef.go
+++ b/routers/api/packages/chef/chef.go
@@ -139,7 +139,7 @@ func EnumeratePackages(ctx *context.Context) {
})
}
- skip, _ := opts.Paginator.GetSkipTake()
+ skip, _ := opts.GetSkipTake()
ctx.JSON(http.StatusOK, &Result{
Start: skip,
diff --git a/routers/api/packages/container/container.go b/routers/api/packages/container/container.go
index 5276dd5706..191a4aa455 100644
--- a/routers/api/packages/container/container.go
+++ b/routers/api/packages/container/container.go
@@ -4,6 +4,7 @@
package container
import (
+ "bytes"
"errors"
"fmt"
"io"
@@ -62,9 +63,6 @@ func setResponseHeaders(resp http.ResponseWriter, h *containerHeaders) {
if h.ContentType != "" {
resp.Header().Set("Content-Type", h.ContentType)
}
- if h.ContentLength != 0 {
- resp.Header().Set("Content-Length", strconv.FormatInt(h.ContentLength, 10))
- }
if h.UploadUUID != "" {
resp.Header().Set("Docker-Upload-Uuid", h.UploadUUID)
}
@@ -72,17 +70,29 @@ func setResponseHeaders(resp http.ResponseWriter, h *containerHeaders) {
resp.Header().Set("Docker-Content-Digest", h.ContentDigest)
resp.Header().Set("ETag", fmt.Sprintf(`"%s"`, h.ContentDigest))
}
+ if h.ContentLength >= 0 {
+ resp.Header().Set("Content-Length", strconv.FormatInt(h.ContentLength, 10))
+ }
resp.Header().Set("Docker-Distribution-Api-Version", "registry/2.0")
resp.WriteHeader(h.Status)
}
func jsonResponse(ctx *context.Context, status int, obj any) {
- setResponseHeaders(ctx.Resp, &containerHeaders{
- Status: status,
- ContentType: "application/json",
- })
- if err := json.NewEncoder(ctx.Resp).Encode(obj); err != nil {
+ // Buffer the JSON content first to calculate correct Content-Length
+ var buf bytes.Buffer
+ if err := json.NewEncoder(&buf).Encode(obj); err != nil {
log.Error("JSON encode: %v", err)
+ return
+ }
+
+ setResponseHeaders(ctx.Resp, &containerHeaders{
+ Status: status,
+ ContentType: "application/json",
+ ContentLength: int64(buf.Len()),
+ })
+
+ if _, err := buf.WriteTo(ctx.Resp); err != nil {
+ log.Error("JSON write: %v", err)
}
}
@@ -691,33 +701,30 @@ func DeleteManifest(ctx *context.Context) {
func serveBlob(ctx *context.Context, pfd *packages_model.PackageFileDescriptor) {
serveDirectReqParams := make(url.Values)
serveDirectReqParams.Set("response-content-type", pfd.Properties.GetByName(container_module.PropertyMediaType))
- s, u, _, err := packages_service.GetPackageBlobStream(ctx, pfd.File, pfd.Blob, serveDirectReqParams)
+ s, u, pf, err := packages_service.GetPackageBlobStream(ctx, pfd.File, pfd.Blob, serveDirectReqParams)
if err != nil {
+ if errors.Is(err, packages_model.ErrPackageFileNotExist) {
+ apiError(ctx, http.StatusNotFound, err)
+ return
+ }
apiError(ctx, http.StatusInternalServerError, err)
return
}
- headers := &containerHeaders{
- ContentDigest: pfd.Properties.GetByName(container_module.PropertyDigest),
- ContentType: pfd.Properties.GetByName(container_module.PropertyMediaType),
- ContentLength: pfd.Blob.Size,
- Status: http.StatusOK,
+ opts := &context.ServeHeaderOptions{
+ ContentType: pfd.Properties.GetByName(container_module.PropertyMediaType),
+ RedirectStatusCode: http.StatusTemporaryRedirect,
+ AdditionalHeaders: map[string][]string{
+ "Docker-Distribution-Api-Version": {"registry/2.0"},
+ },
}
- if u != nil {
- headers.Status = http.StatusTemporaryRedirect
- headers.Location = u.String()
-
- setResponseHeaders(ctx.Resp, headers)
- return
+ if d := pfd.Properties.GetByName(container_module.PropertyDigest); d != "" {
+ opts.AdditionalHeaders["Docker-Content-Digest"] = []string{d}
+ opts.AdditionalHeaders["ETag"] = []string{fmt.Sprintf(`"%s"`, d)}
}
- defer s.Close()
-
- setResponseHeaders(ctx.Resp, headers)
- if _, err := io.Copy(ctx.Resp, s); err != nil {
- log.Error("Error whilst copying content to response: %v", err)
- }
+ helper.ServePackageFile(ctx, s, u, pf, opts)
}
// https://github.com/opencontainers/distribution-spec/blob/main/spec.md#content-discovery
@@ -725,7 +732,7 @@ func GetTagList(ctx *context.Context) {
image := ctx.Params("image")
if _, err := packages_model.GetPackageByName(ctx, ctx.Package.Owner.ID, packages_model.TypeContainer, image); err != nil {
- if err == packages_model.ErrPackageNotExist {
+ if errors.Is(err, packages_model.ErrPackageNotExist) {
apiErrorDefined(ctx, errNameUnknown)
} else {
apiError(ctx, http.StatusInternalServerError, err)
diff --git a/routers/api/packages/container/container_test.go b/routers/api/packages/container/container_test.go
new file mode 100644
index 0000000000..2ed38d846d
--- /dev/null
+++ b/routers/api/packages/container/container_test.go
@@ -0,0 +1,124 @@
+// Copyright 2025 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package container
+
+import (
+ "net/http"
+ "net/http/httptest"
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestSetResponseHeaders(t *testing.T) {
+ t.Run("Content-Length for empty content", func(t *testing.T) {
+ recorder := httptest.NewRecorder()
+
+ setResponseHeaders(recorder, &containerHeaders{
+ Status: http.StatusOK,
+ ContentLength: 0, // Empty blob
+ ContentDigest: "sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ })
+
+ assert.Equal(t, "0", recorder.Header().Get("Content-Length"))
+ assert.Equal(t, "sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", recorder.Header().Get("Docker-Content-Digest"))
+ assert.Equal(t, "registry/2.0", recorder.Header().Get("Docker-Distribution-Api-Version"))
+ assert.Equal(t, http.StatusOK, recorder.Code)
+ })
+
+ t.Run("Content-Length for non-empty content", func(t *testing.T) {
+ recorder := httptest.NewRecorder()
+
+ setResponseHeaders(recorder, &containerHeaders{
+ Status: http.StatusOK,
+ ContentLength: 1024,
+ ContentDigest: "sha256:abcd1234",
+ })
+
+ assert.Equal(t, "1024", recorder.Header().Get("Content-Length"))
+ assert.Equal(t, "sha256:abcd1234", recorder.Header().Get("Docker-Content-Digest"))
+ })
+
+ t.Run("All headers set correctly", func(t *testing.T) {
+ recorder := httptest.NewRecorder()
+
+ setResponseHeaders(recorder, &containerHeaders{
+ Status: http.StatusAccepted,
+ ContentLength: 512,
+ ContentDigest: "sha256:test123",
+ ContentType: "application/vnd.oci.image.manifest.v1+json",
+ Location: "/v2/test/repo/blobs/uploads/uuid123",
+ Range: "0-511",
+ UploadUUID: "uuid123",
+ })
+
+ assert.Equal(t, "512", recorder.Header().Get("Content-Length"))
+ assert.Equal(t, "sha256:test123", recorder.Header().Get("Docker-Content-Digest"))
+ assert.Equal(t, "application/vnd.oci.image.manifest.v1+json", recorder.Header().Get("Content-Type"))
+ assert.Equal(t, "/v2/test/repo/blobs/uploads/uuid123", recorder.Header().Get("Location"))
+ assert.Equal(t, "0-511", recorder.Header().Get("Range"))
+ assert.Equal(t, "uuid123", recorder.Header().Get("Docker-Upload-Uuid"))
+ assert.Equal(t, "registry/2.0", recorder.Header().Get("Docker-Distribution-Api-Version"))
+ assert.Equal(t, `"sha256:test123"`, recorder.Header().Get("ETag"))
+ assert.Equal(t, http.StatusAccepted, recorder.Code)
+ })
+}
+
+// TestResponseHeadersForEmptyBlobs tests the core fix for ORAS empty blob support
+func TestResponseHeadersForEmptyBlobs(t *testing.T) {
+ t.Run("Content-Length set for empty blob", func(t *testing.T) {
+ recorder := httptest.NewRecorder()
+
+ // This tests the main fix: empty blobs should have Content-Length: 0
+ setResponseHeaders(recorder, &containerHeaders{
+ Status: http.StatusOK,
+ ContentLength: 0, // Empty blob (like empty config in ORAS artifacts)
+ ContentDigest: "sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855",
+ })
+
+ // The key fix: Content-Length should be set even for 0-byte blobs
+ assert.Equal(t, "0", recorder.Header().Get("Content-Length"))
+ assert.Equal(t, "sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855", recorder.Header().Get("Docker-Content-Digest"))
+ assert.Equal(t, "registry/2.0", recorder.Header().Get("Docker-Distribution-Api-Version"))
+ assert.Equal(t, `"sha256:e3b0c44298fc1c149afbf4c8996fb92427ae41e4649b934ca495991b7852b855"`, recorder.Header().Get("ETag"))
+ assert.Equal(t, http.StatusOK, recorder.Code)
+ })
+
+ t.Run("Content-Length set for regular blob", func(t *testing.T) {
+ recorder := httptest.NewRecorder()
+
+ setResponseHeaders(recorder, &containerHeaders{
+ Status: http.StatusOK,
+ ContentLength: 1024,
+ ContentDigest: "sha256:abcd1234",
+ })
+
+ assert.Equal(t, "1024", recorder.Header().Get("Content-Length"))
+ assert.Equal(t, "sha256:abcd1234", recorder.Header().Get("Docker-Content-Digest"))
+ })
+
+ t.Run("All headers set correctly", func(t *testing.T) {
+ recorder := httptest.NewRecorder()
+
+ setResponseHeaders(recorder, &containerHeaders{
+ Status: http.StatusAccepted,
+ ContentLength: 512,
+ ContentDigest: "sha256:test123",
+ ContentType: "application/vnd.oci.image.manifest.v1+json",
+ Location: "/v2/test/repo/blobs/uploads/uuid123",
+ Range: "0-511",
+ UploadUUID: "uuid123",
+ })
+
+ assert.Equal(t, "512", recorder.Header().Get("Content-Length"))
+ assert.Equal(t, "sha256:test123", recorder.Header().Get("Docker-Content-Digest"))
+ assert.Equal(t, "application/vnd.oci.image.manifest.v1+json", recorder.Header().Get("Content-Type"))
+ assert.Equal(t, "/v2/test/repo/blobs/uploads/uuid123", recorder.Header().Get("Location"))
+ assert.Equal(t, "0-511", recorder.Header().Get("Range"))
+ assert.Equal(t, "uuid123", recorder.Header().Get("Docker-Upload-Uuid"))
+ assert.Equal(t, "registry/2.0", recorder.Header().Get("Docker-Distribution-Api-Version"))
+ assert.Equal(t, `"sha256:test123"`, recorder.Header().Get("ETag"))
+ assert.Equal(t, http.StatusAccepted, recorder.Code)
+ })
+}
diff --git a/routers/api/packages/generic/generic.go b/routers/api/packages/generic/generic.go
index 6e116e050d..b84b902d2b 100644
--- a/routers/api/packages/generic/generic.go
+++ b/routers/api/packages/generic/generic.go
@@ -155,7 +155,7 @@ func DeletePackage(ctx *context.Context) {
},
)
if err != nil {
- if err == packages_model.ErrPackageNotExist {
+ if errors.Is(err, packages_model.ErrPackageNotExist) {
apiError(ctx, http.StatusNotFound, err)
return
}
@@ -182,7 +182,7 @@ func DeletePackageFile(ctx *context.Context) {
return pv, pf, nil
}()
if err != nil {
- if err == packages_model.ErrPackageNotExist || err == packages_model.ErrPackageFileNotExist {
+ if errors.Is(err, packages_model.ErrPackageNotExist) || errors.Is(err, packages_model.ErrPackageFileNotExist) {
apiError(ctx, http.StatusNotFound, err)
return
}
diff --git a/routers/api/packages/helper/helper.go b/routers/api/packages/helper/helper.go
index 99c0867bbb..f9b91d9a09 100644
--- a/routers/api/packages/helper/helper.go
+++ b/routers/api/packages/helper/helper.go
@@ -39,16 +39,9 @@ func LogAndProcessError(ctx *context.Context, status int, obj any, cb func(strin
}
}
-// Serves the content of the package file
+// ServePackageFile Serves the content of the package file
// If the url is set it will redirect the request, otherwise the content is copied to the response.
func ServePackageFile(ctx *context.Context, s io.ReadSeekCloser, u *url.URL, pf *packages_model.PackageFile, forceOpts ...*context.ServeHeaderOptions) {
- if u != nil {
- ctx.Redirect(u.String())
- return
- }
-
- defer s.Close()
-
var opts *context.ServeHeaderOptions
if len(forceOpts) > 0 {
opts = forceOpts[0]
@@ -59,5 +52,12 @@ func ServePackageFile(ctx *context.Context, s io.ReadSeekCloser, u *url.URL, pf
}
}
+ if u != nil {
+ ctx.Redirect(u.String(), opts.RedirectStatusCode)
+ return
+ }
+
+ defer s.Close()
+
ctx.ServeContent(s, opts)
}
diff --git a/routers/api/packages/maven/maven.go b/routers/api/packages/maven/maven.go
index ea04a7b42e..30737f91dd 100644
--- a/routers/api/packages/maven/maven.go
+++ b/routers/api/packages/maven/maven.go
@@ -11,6 +11,7 @@ import (
"encoding/hex"
"encoding/xml"
"errors"
+ "fmt"
"io"
"net/http"
"path/filepath"
@@ -61,6 +62,12 @@ func apiError(ctx *context.Context, status int, obj any) {
})
}
+// buildPackageID creates a package ID from group and artifact ID
+// Refer to https://maven.apache.org/pom.html#Maven_Coordinates
+func buildPackageID(groupID, artifactID string) string {
+ return fmt.Sprintf("%s:%s", groupID, artifactID)
+}
+
// DownloadPackageFile serves the content of a package
func DownloadPackageFile(ctx *context.Context) {
handlePackageFile(ctx, true)
@@ -88,7 +95,7 @@ func handlePackageFile(ctx *context.Context, serveContent bool) {
func serveMavenMetadata(ctx *context.Context, params parameters) {
// /com/foo/project/maven-metadata.xml[.md5/.sha1/.sha256/.sha512]
- packageName := params.GroupID + "-" + params.ArtifactID
+ packageName := buildPackageID(params.GroupID, params.ArtifactID)
pvs, err := packages_model.GetVersionsByPackageName(ctx, ctx.Package.Owner.ID, packages_model.TypeMaven, packageName)
if err != nil {
apiError(ctx, http.StatusInternalServerError, err)
@@ -119,8 +126,8 @@ func serveMavenMetadata(ctx *context.Context, params parameters) {
latest := pds[len(pds)-1]
// http.TimeFormat required a UTC time, refer to https://pkg.go.dev/net/http#TimeFormat
- lastModifed := latest.Version.CreatedUnix.AsTime().UTC().Format(http.TimeFormat)
- ctx.Resp.Header().Set("Last-Modified", lastModifed)
+ lastModified := latest.Version.CreatedUnix.AsTime().UTC().Format(http.TimeFormat)
+ ctx.Resp.Header().Set("Last-Modified", lastModified)
ext := strings.ToLower(filepath.Ext(params.Filename))
if isChecksumExtension(ext) {
@@ -150,7 +157,7 @@ func serveMavenMetadata(ctx *context.Context, params parameters) {
}
func servePackageFile(ctx *context.Context, params parameters, serveContent bool) {
- packageName := params.GroupID + "-" + params.ArtifactID
+ packageName := buildPackageID(params.GroupID, params.ArtifactID)
pv, err := packages_model.GetVersionByNameAndVersion(ctx, ctx.Package.Owner.ID, packages_model.TypeMaven, packageName, params.Version)
if err != nil {
@@ -169,9 +176,9 @@ func servePackageFile(ctx *context.Context, params parameters, serveContent bool
filename = filename[:len(filename)-len(ext)]
}
- pf, err := packages_model.GetFileForVersionByName(ctx, pv.ID, filename, packages_model.EmptyFileKey)
+ pf, err := packages_model.GetFileForVersionByNameMatchCase(ctx, pv.ID, filename, packages_model.EmptyFileKey)
if err != nil {
- if err == packages_model.ErrPackageFileNotExist {
+ if errors.Is(err, packages_model.ErrPackageFileNotExist) {
apiError(ctx, http.StatusNotFound, err)
} else {
apiError(ctx, http.StatusInternalServerError, err)
@@ -247,7 +254,7 @@ func UploadPackageFile(ctx *context.Context) {
return
}
- packageName := params.GroupID + "-" + params.ArtifactID
+ packageName := buildPackageID(params.GroupID, params.ArtifactID)
mavenUploadLock.CheckIn(packageName)
defer mavenUploadLock.CheckOut(packageName)
@@ -283,9 +290,9 @@ func UploadPackageFile(ctx *context.Context) {
apiError(ctx, http.StatusInternalServerError, err)
return
}
- pf, err := packages_model.GetFileForVersionByName(ctx, pv.ID, params.Filename[:len(params.Filename)-len(ext)], packages_model.EmptyFileKey)
+ pf, err := packages_model.GetFileForVersionByNameMatchCase(ctx, pv.ID, params.Filename[:len(params.Filename)-len(ext)], packages_model.EmptyFileKey)
if err != nil {
- if err == packages_model.ErrPackageFileNotExist {
+ if errors.Is(err, packages_model.ErrPackageFileNotExist) {
apiError(ctx, http.StatusNotFound, err)
return
}
@@ -339,7 +346,7 @@ func UploadPackageFile(ctx *context.Context) {
if pvci.Metadata != nil {
pv, err := packages_model.GetVersionByNameAndVersion(ctx, pvci.Owner.ID, pvci.PackageType, pvci.Name, pvci.Version)
- if err != nil && err != packages_model.ErrPackageNotExist {
+ if err != nil && !errors.Is(err, packages_model.ErrPackageNotExist) {
apiError(ctx, http.StatusInternalServerError, err)
return
}
diff --git a/routers/api/packages/nuget/auth.go b/routers/api/packages/nuget/auth.go
index bab08bb1b8..92868bdef5 100644
--- a/routers/api/packages/nuget/auth.go
+++ b/routers/api/packages/nuget/auth.go
@@ -25,7 +25,7 @@ func (a *Auth) Name() string {
func (a *Auth) Verify(req *http.Request, w http.ResponseWriter, store auth.DataStore, sess auth.SessionStore) (*user_model.User, error) {
token, err := auth_model.GetAccessTokenBySHA(req.Context(), req.Header.Get("X-NuGet-ApiKey"))
if err != nil {
- if !(auth_model.IsErrAccessTokenNotExist(err) || auth_model.IsErrAccessTokenEmpty(err)) {
+ if !auth_model.IsErrAccessTokenNotExist(err) && !auth_model.IsErrAccessTokenEmpty(err) {
log.Error("GetAccessTokenBySHA: %v", err)
return nil, err
}
diff --git a/routers/api/packages/swift/swift.go b/routers/api/packages/swift/swift.go
index 304230a85e..a65bd31cd9 100644
--- a/routers/api/packages/swift/swift.go
+++ b/routers/api/packages/swift/swift.go
@@ -290,7 +290,24 @@ func DownloadManifest(ctx *context.Context) {
})
}
-// https://github.com/swiftlang/swift-package-manager/blob/main/Documentation/PackageRegistry/Registry.md#endpoint-6
+// formFileOptionalReadCloser returns (nil, nil) if the formKey is not present.
+func formFileOptionalReadCloser(ctx *context.Context, formKey string) (io.ReadCloser, error) {
+ multipartFile, _, err := ctx.Req.FormFile(formKey)
+ if err != nil && !errors.Is(err, http.ErrMissingFile) {
+ return nil, err
+ }
+ if multipartFile != nil {
+ return multipartFile, nil
+ }
+
+ content := ctx.Req.FormValue(formKey)
+ if content == "" {
+ return nil, nil
+ }
+ return io.NopCloser(strings.NewReader(content)), nil
+}
+
+// UploadPackageFile refers to https://github.com/swiftlang/swift-package-manager/blob/main/Documentation/PackageRegistry/Registry.md#endpoint-6
func UploadPackageFile(ctx *context.Context) {
packageScope := ctx.Params("scope")
packageName := ctx.Params("name")
@@ -304,9 +321,9 @@ func UploadPackageFile(ctx *context.Context) {
packageVersion := v.Core().String()
- file, _, err := ctx.Req.FormFile("source-archive")
- if err != nil {
- apiError(ctx, http.StatusBadRequest, err)
+ file, err := formFileOptionalReadCloser(ctx, "source-archive")
+ if file == nil || err != nil {
+ apiError(ctx, http.StatusBadRequest, "unable to read source-archive file")
return
}
defer file.Close()
@@ -318,10 +335,13 @@ func UploadPackageFile(ctx *context.Context) {
}
defer buf.Close()
- var mr io.Reader
- metadata := ctx.Req.FormValue("metadata")
- if metadata != "" {
- mr = strings.NewReader(metadata)
+ mr, err := formFileOptionalReadCloser(ctx, "metadata")
+ if err != nil {
+ apiError(ctx, http.StatusBadRequest, "unable to read metadata file")
+ return
+ }
+ if mr != nil {
+ defer mr.Close()
}
pck, err := swift_module.ParsePackage(buf, buf.Size(), mr)
diff --git a/routers/api/shared/middleware.go b/routers/api/shared/middleware.go
index f56acbe1bf..7d537f1ef9 100644
--- a/routers/api/shared/middleware.go
+++ b/routers/api/shared/middleware.go
@@ -30,7 +30,6 @@ func Middlewares() (stack []any) {
return append(stack,
context.APIContexter(),
- checkDeprecatedAuthMethods,
// Get user from session if logged in.
apiAuth(buildAuthGroup()),
verifyAuthWithOptions(&common.VerifyOptions{
@@ -127,13 +126,6 @@ func verifyAuthWithOptions(options *common.VerifyOptions) func(ctx *context.APIC
}
}
-// check for and warn against deprecated authentication options
-func checkDeprecatedAuthMethods(ctx *context.APIContext) {
- if ctx.FormString("token") != "" || ctx.FormString("access_token") != "" {
- ctx.Resp.Header().Set("Warning", "token and access_token API authentication is deprecated and will be removed in gitea 1.23. Please use AuthorizationHeaderToken instead. Existing queries will continue to work but without authorization.")
- }
-}
-
func securityHeaders() func(http.Handler) http.Handler {
return func(next http.Handler) http.Handler {
return http.HandlerFunc(func(resp http.ResponseWriter, req *http.Request) {
diff --git a/routers/api/v1/activitypub/actor.go b/routers/api/v1/activitypub/actor.go
index 7568a2a7c8..e49f277842 100644
--- a/routers/api/v1/activitypub/actor.go
+++ b/routers/api/v1/activitypub/actor.go
@@ -28,7 +28,7 @@ func Actor(ctx *context.APIContext) {
// "200":
// "$ref": "#/responses/ActivityPub"
- link := user_model.APActorUserAPActorID()
+ link := user_model.APServerActorID()
actor := ap.ActorNew(ap.IRI(link), ap.ApplicationType)
actor.PreferredUsername = ap.NaturalLanguageValuesNew()
@@ -46,7 +46,7 @@ func Actor(ctx *context.APIContext) {
actor.PublicKey.ID = ap.IRI(link + "#main-key")
actor.PublicKey.Owner = ap.IRI(link)
- publicKeyPem, err := activitypub.GetPublicKey(ctx, user_model.NewAPActorUser())
+ publicKeyPem, err := activitypub.GetPublicKey(ctx, user_model.NewAPServerActor())
if err != nil {
ctx.ServerError("GetPublicKey", err)
return
diff --git a/routers/api/v1/activitypub/reqsignature.go b/routers/api/v1/activitypub/reqsignature.go
index a9bb4bd868..b84fbe05fa 100644
--- a/routers/api/v1/activitypub/reqsignature.go
+++ b/routers/api/v1/activitypub/reqsignature.go
@@ -6,59 +6,134 @@ package activitypub
import (
"crypto"
"crypto/x509"
+ "database/sql"
"encoding/pem"
+ "errors"
"fmt"
- "io"
"net/http"
"net/url"
+ "forgejo.org/models/db"
+ "forgejo.org/models/forgefed"
+ "forgejo.org/models/user"
"forgejo.org/modules/activitypub"
- "forgejo.org/modules/httplib"
+ fm "forgejo.org/modules/forgefed"
"forgejo.org/modules/log"
"forgejo.org/modules/setting"
gitea_context "forgejo.org/services/context"
+ "forgejo.org/services/federation"
"github.com/42wim/httpsig"
ap "github.com/go-ap/activitypub"
)
-func getPublicKeyFromResponse(b []byte, keyID *url.URL) (p crypto.PublicKey, err error) {
- person := ap.PersonNew(ap.IRI(keyID.String()))
- err = person.UnmarshalJSON(b)
- if err != nil {
- return nil, fmt.Errorf("ActivityStreams type cannot be converted to one known to have publicKey property: %w", err)
- }
- pubKey := person.PublicKey
- if pubKey.ID.String() != keyID.String() {
- return nil, fmt.Errorf("cannot find publicKey with id: %s in %s", keyID, string(b))
- }
- pubKeyPem := pubKey.PublicKeyPem
+func decodePublicKeyPem(pubKeyPem string) ([]byte, error) {
block, _ := pem.Decode([]byte(pubKeyPem))
if block == nil || block.Type != "PUBLIC KEY" {
- return nil, fmt.Errorf("could not decode publicKeyPem to PUBLIC KEY pem block type")
+ return nil, errors.New("could not decode publicKeyPem to PUBLIC KEY pem block type")
}
- p, err = x509.ParsePKIXPublicKey(block.Bytes)
- return p, err
+
+ return block.Bytes, nil
}
-func fetch(iri *url.URL) (b []byte, err error) {
- req := httplib.NewRequest(iri.String(), http.MethodGet)
- req.Header("Accept", activitypub.ActivityStreamsContentType)
- req.Header("User-Agent", "Gitea/"+setting.AppVer)
- resp, err := req.Response()
+func getFederatedUser(ctx *gitea_context.APIContext, person *ap.Person, federationHost *forgefed.FederationHost) (*user.FederatedUser, error) {
+ personID, err := fm.NewPersonID(person.ID.String(), string(federationHost.NodeInfo.SoftwareName))
if err != nil {
return nil, err
}
- defer resp.Body.Close()
-
- if resp.StatusCode != http.StatusOK {
- return nil, fmt.Errorf("url IRI fetch [%s] failed with status (%d): %s", iri, resp.StatusCode, resp.Status)
+ _, federatedUser, err := user.FindFederatedUser(ctx, personID.ID, federationHost.ID)
+ if err != nil {
+ return nil, err
}
- b, err = io.ReadAll(io.LimitReader(resp.Body, setting.Federation.MaxSize))
- return b, err
+
+ if federatedUser != nil {
+ return federatedUser, nil
+ }
+
+ _, newFederatedUser, err := federation.CreateUserFromAP(ctx, personID, federationHost.ID)
+ if err != nil {
+ return nil, err
+ }
+
+ return newFederatedUser, nil
+}
+
+func storePublicKey(ctx *gitea_context.APIContext, person *ap.Person, pubKeyBytes []byte) error {
+ federationHost, err := federation.GetFederationHostForURI(ctx, person.ID.String())
+ if err != nil {
+ return err
+ }
+
+ if person.Type == ap.ActivityVocabularyType("Application") {
+ federationHost.KeyID = sql.NullString{
+ String: person.PublicKey.ID.String(),
+ Valid: true,
+ }
+
+ federationHost.PublicKey = sql.Null[sql.RawBytes]{
+ V: pubKeyBytes,
+ Valid: true,
+ }
+
+ _, err = db.GetEngine(ctx).ID(federationHost.ID).Update(federationHost)
+ if err != nil {
+ return err
+ }
+ } else if person.Type == ap.ActivityVocabularyType("Person") {
+ federatedUser, err := getFederatedUser(ctx, person, federationHost)
+ if err != nil {
+ return err
+ }
+
+ federatedUser.KeyID = sql.NullString{
+ String: person.PublicKey.ID.String(),
+ Valid: true,
+ }
+
+ federatedUser.PublicKey = sql.Null[sql.RawBytes]{
+ V: pubKeyBytes,
+ Valid: true,
+ }
+
+ _, err = db.GetEngine(ctx).ID(federatedUser.ID).Update(federatedUser)
+ if err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+func getPublicKeyFromResponse(b []byte, keyID *url.URL) (person *ap.Person, pubKeyBytes []byte, p crypto.PublicKey, err error) {
+ person = ap.PersonNew(ap.IRI(keyID.String()))
+ err = person.UnmarshalJSON(b)
+ if err != nil {
+ return nil, nil, nil, fmt.Errorf("ActivityStreams type cannot be converted to one known to have publicKey property: %w", err)
+ }
+
+ pubKey := person.PublicKey
+ if pubKey.ID.String() != keyID.String() {
+ return nil, nil, nil, fmt.Errorf("cannot find publicKey with id: %s in %s", keyID, string(b))
+ }
+
+ pubKeyBytes, err = decodePublicKeyPem(pubKey.PublicKeyPem)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ p, err = x509.ParsePKIXPublicKey(pubKeyBytes)
+ if err != nil {
+ return nil, nil, nil, err
+ }
+
+ return person, pubKeyBytes, p, err
}
func verifyHTTPSignatures(ctx *gitea_context.APIContext) (authenticated bool, err error) {
+ if !setting.Federation.SignatureEnforced {
+ return true, nil
+ }
+
r := ctx.Req
// 1. Figure out what key we need to verify
@@ -66,23 +141,78 @@ func verifyHTTPSignatures(ctx *gitea_context.APIContext) (authenticated bool, er
if err != nil {
return false, err
}
+
ID := v.KeyId()
idIRI, err := url.Parse(ID)
if err != nil {
return false, err
}
+
+ signatureAlgorithm := httpsig.Algorithm(setting.Federation.SignatureAlgorithms[0])
+
// 2. Fetch the public key of the other actor
- b, err := fetch(idIRI)
+ // Try if the signing actor is an already known federated user
+ _, federationUser, err := user.FindFederatedUserByKeyID(ctx, idIRI.String())
if err != nil {
return false, err
}
- pubKey, err := getPublicKeyFromResponse(b, idIRI)
+
+ if federationUser != nil && federationUser.PublicKey.Valid {
+ pubKey, err := x509.ParsePKIXPublicKey(federationUser.PublicKey.V)
+ if err != nil {
+ return false, err
+ }
+
+ authenticated = v.Verify(pubKey, signatureAlgorithm) == nil
+ return authenticated, err
+ }
+
+ // Try if the signing actor is an already known federation host
+ federationHost, err := forgefed.FindFederationHostByKeyID(ctx, idIRI.String())
if err != nil {
return false, err
}
- // 3. Verify the other actor's key
- algo := httpsig.Algorithm(setting.Federation.Algorithms[0])
- authenticated = v.Verify(pubKey, algo) == nil
+
+ if federationHost != nil && federationHost.PublicKey.Valid {
+ pubKey, err := x509.ParsePKIXPublicKey(federationHost.PublicKey.V)
+ if err != nil {
+ return false, err
+ }
+
+ authenticated = v.Verify(pubKey, signatureAlgorithm) == nil
+ return authenticated, err
+ }
+
+ // Fetch missing public key
+ actionsUser := user.NewAPServerActor()
+ clientFactory, err := activitypub.GetClientFactory(ctx)
+ if err != nil {
+ return false, err
+ }
+
+ apClient, err := clientFactory.WithKeys(ctx, actionsUser, actionsUser.APActorKeyID())
+ if err != nil {
+ return false, err
+ }
+
+ b, err := apClient.GetBody(idIRI.String())
+ if err != nil {
+ return false, err
+ }
+
+ person, pubKeyBytes, pubKey, err := getPublicKeyFromResponse(b, idIRI)
+ if err != nil {
+ return false, err
+ }
+
+ authenticated = v.Verify(pubKey, signatureAlgorithm) == nil
+ if authenticated {
+ err = storePublicKey(ctx, person, pubKeyBytes)
+ if err != nil {
+ return false, err
+ }
+ }
+
return authenticated, err
}
diff --git a/routers/api/v1/admin/quota_rule.go b/routers/api/v1/admin/quota_rule.go
index ea188107fa..c2bc6843e4 100644
--- a/routers/api/v1/admin/quota_rule.go
+++ b/routers/api/v1/admin/quota_rule.go
@@ -4,7 +4,7 @@
package admin
import (
- "fmt"
+ "errors"
"net/http"
quota_model "forgejo.org/models/quota"
@@ -83,7 +83,7 @@ func CreateQuotaRule(ctx *context.APIContext) {
form := web.GetForm(ctx).(*api.CreateQuotaRuleOptions)
if form.Limit == nil {
- ctx.Error(http.StatusUnprocessableEntity, "quota_model.ParseLimitSubject", fmt.Errorf("[Limit]: Required"))
+ ctx.Error(http.StatusUnprocessableEntity, "quota_model.ParseLimitSubject", errors.New("[Limit]: Required"))
return
}
diff --git a/routers/api/v1/admin/user.go b/routers/api/v1/admin/user.go
index 32e1dd414d..8aa67b3b0a 100644
--- a/routers/api/v1/admin/user.go
+++ b/routers/api/v1/admin/user.go
@@ -140,7 +140,6 @@ func CreateUser(ctx *context.APIContext) {
user_model.IsErrEmailAlreadyUsed(err) ||
db.IsErrNameReserved(err) ||
db.IsErrNameCharsNotAllowed(err) ||
- validation.IsErrEmailCharIsNotSupported(err) ||
validation.IsErrEmailInvalid(err) ||
db.IsErrNamePatternNotAllowed(err) {
ctx.Error(http.StatusUnprocessableEntity, "", err)
@@ -197,7 +196,7 @@ func EditUser(ctx *context.APIContext) {
// If either LoginSource or LoginName is given, the other must be present too.
if form.SourceID != nil || form.LoginName != nil {
if form.SourceID == nil || form.LoginName == nil {
- ctx.Error(http.StatusUnprocessableEntity, "LoginSourceAndLoginName", fmt.Errorf("source_id and login_name must be specified together"))
+ ctx.Error(http.StatusUnprocessableEntity, "LoginSourceAndLoginName", errors.New("source_id and login_name must be specified together"))
return
}
}
@@ -226,7 +225,7 @@ func EditUser(ctx *context.APIContext) {
if form.Email != nil {
if err := user_service.AdminAddOrSetPrimaryEmailAddress(ctx, ctx.ContextUser, *form.Email); err != nil {
switch {
- case validation.IsErrEmailCharIsNotSupported(err), validation.IsErrEmailInvalid(err):
+ case validation.IsErrEmailInvalid(err):
ctx.Error(http.StatusBadRequest, "EmailInvalid", err)
case user_model.IsErrEmailAlreadyUsed(err):
ctx.Error(http.StatusBadRequest, "EmailUsed", err)
@@ -305,7 +304,7 @@ func DeleteUser(ctx *context.APIContext) {
// admin should not delete themself
if ctx.ContextUser.ID == ctx.Doer.ID {
- ctx.Error(http.StatusUnprocessableEntity, "", fmt.Errorf("you cannot delete yourself"))
+ ctx.Error(http.StatusUnprocessableEntity, "", errors.New("you cannot delete yourself"))
return
}
diff --git a/routers/api/v1/api.go b/routers/api/v1/api.go
index 2eb1ee4229..bf08bdd249 100644
--- a/routers/api/v1/api.go
+++ b/routers/api/v1/api.go
@@ -22,8 +22,6 @@
//
// Security:
// - BasicAuth :
-// - Token :
-// - AccessToken :
// - AuthorizationHeaderToken :
// - SudoParam :
// - SudoHeader :
@@ -32,16 +30,6 @@
// SecurityDefinitions:
// BasicAuth:
// type: basic
-// Token:
-// type: apiKey
-// name: token
-// in: query
-// description: This authentication option is deprecated for removal in Gitea 1.23. Please use AuthorizationHeaderToken instead.
-// AccessToken:
-// type: apiKey
-// name: access_token
-// in: query
-// description: This authentication option is deprecated for removal in Gitea 1.23. Please use AuthorizationHeaderToken instead.
// AuthorizationHeaderToken:
// type: apiKey
// name: Authorization
@@ -203,19 +191,19 @@ func repoAssignment() func(ctx *context.APIContext) {
}
if task.IsForkPullRequest {
- ctx.Repo.Permission.AccessMode = perm.AccessModeRead
+ ctx.Repo.AccessMode = perm.AccessModeRead
} else {
- ctx.Repo.Permission.AccessMode = perm.AccessModeWrite
+ ctx.Repo.AccessMode = perm.AccessModeWrite
}
if err := ctx.Repo.Repository.LoadUnits(ctx); err != nil {
ctx.Error(http.StatusInternalServerError, "LoadUnits", err)
return
}
- ctx.Repo.Permission.Units = ctx.Repo.Repository.Units
- ctx.Repo.Permission.UnitsMode = make(map[unit.Type]perm.AccessMode)
+ ctx.Repo.Units = ctx.Repo.Repository.Units
+ ctx.Repo.UnitsMode = make(map[unit.Type]perm.AccessMode)
for _, u := range ctx.Repo.Repository.Units {
- ctx.Repo.Permission.UnitsMode[u.Type] = ctx.Repo.Permission.AccessMode
+ ctx.Repo.UnitsMode[u.Type] = ctx.Repo.AccessMode
}
} else {
ctx.Repo.Permission, err = access_model.GetUserRepoPermission(ctx, repo, ctx.Doer)
@@ -692,7 +680,7 @@ func mustEnableIssues(ctx *context.APIContext) {
}
func mustAllowPulls(ctx *context.APIContext) {
- if !(ctx.Repo.Repository.CanEnablePulls() && ctx.Repo.CanRead(unit.TypePullRequests)) {
+ if !ctx.Repo.Repository.CanEnablePulls() || !ctx.Repo.CanRead(unit.TypePullRequests) {
if ctx.Repo.Repository.CanEnablePulls() && log.IsTrace() {
if ctx.IsSigned {
log.Trace("Permission Denied: User %-v cannot read %-v in Repo %-v\n"+
@@ -716,7 +704,7 @@ func mustAllowPulls(ctx *context.APIContext) {
func mustEnableIssuesOrPulls(ctx *context.APIContext) {
if !ctx.Repo.CanRead(unit.TypeIssues) &&
- !(ctx.Repo.Repository.CanEnablePulls() && ctx.Repo.CanRead(unit.TypePullRequests)) {
+ (!ctx.Repo.Repository.CanEnablePulls() || !ctx.Repo.CanRead(unit.TypePullRequests)) {
if ctx.Repo.Repository.CanEnablePulls() && log.IsTrace() {
if ctx.IsSigned {
log.Trace("Permission Denied: User %-v cannot read %-v and %-v in Repo %-v\n"+
@@ -777,13 +765,13 @@ func bind[T any](_ T) any {
func individualPermsChecker(ctx *context.APIContext) {
// org permissions have been checked in context.OrgAssignment(), but individual permissions haven't been checked.
if ctx.ContextUser.IsIndividual() {
- switch {
- case ctx.ContextUser.Visibility == api.VisibleTypePrivate:
+ switch ctx.ContextUser.Visibility {
+ case api.VisibleTypePrivate:
if ctx.Doer == nil || (ctx.ContextUser.ID != ctx.Doer.ID && !ctx.Doer.IsAdmin) {
ctx.NotFound("Visit Project", nil)
return
}
- case ctx.ContextUser.Visibility == api.VisibleTypeLimited:
+ case api.VisibleTypeLimited:
if ctx.Doer == nil {
ctx.NotFound("Visit Project", nil)
return
@@ -840,22 +828,22 @@ func Routes() *web.Route {
m.Group("/activitypub", func() {
// deprecated, remove in 1.20, use /user-id/{user-id} instead
m.Group("/user/{username}", func() {
- m.Get("", activitypub.Person)
+ m.Get("", activitypub.ReqHTTPSignature(), activitypub.Person)
m.Post("/inbox", activitypub.ReqHTTPSignature(), activitypub.PersonInbox)
}, context.UserAssignmentAPI(), checkTokenPublicOnly())
m.Group("/user-id/{user-id}", func() {
- m.Get("", activitypub.Person)
+ m.Get("", activitypub.ReqHTTPSignature(), activitypub.Person)
m.Post("/inbox", activitypub.ReqHTTPSignature(), activitypub.PersonInbox)
}, context.UserIDAssignmentAPI(), checkTokenPublicOnly())
m.Group("/actor", func() {
m.Get("", activitypub.Actor)
- m.Post("/inbox", activitypub.ActorInbox)
+ m.Post("/inbox", activitypub.ReqHTTPSignature(), activitypub.ActorInbox)
})
m.Group("/repository-id/{repository-id}", func() {
- m.Get("", activitypub.Repository)
+ m.Get("", activitypub.ReqHTTPSignature(), activitypub.Repository)
m.Post("/inbox",
bind(forgefed.ForgeLike{}),
- // TODO: activitypub.ReqHTTPSignature(),
+ activitypub.ReqHTTPSignature(),
activitypub.RepositoryInbox)
}, context.RepositoryIDAssignmentAPI())
}, tokenRequiresScopes(auth_model.AccessTokenScopeCategoryActivityPub))
@@ -865,6 +853,7 @@ func Routes() *web.Route {
m.Group("", func() {
m.Get("/version", misc.Version)
m.Get("/signing-key.gpg", misc.SigningKey)
+ m.Get("/signing-key.ssh", misc.SSHSigningKey)
m.Post("/markup", reqToken(), bind(api.MarkupOption{}), misc.Markup)
m.Post("/markdown", reqToken(), bind(api.MarkdownOption{}), misc.Markdown)
m.Post("/markdown/raw", reqToken(), misc.MarkdownRaw)
@@ -1183,6 +1172,10 @@ func Routes() *web.Route {
}, reqToken(), reqAdmin())
m.Group("/actions", func() {
m.Get("/tasks", repo.ListActionTasks)
+ m.Group("/runs", func() {
+ m.Get("", repo.ListActionRuns)
+ m.Get("/{run_id}", repo.GetActionRun)
+ })
m.Group("/workflows", func() {
m.Group("/{workflowname}", func() {
@@ -1317,6 +1310,7 @@ func Routes() *web.Route {
m.Get("/refs", repo.GetGitAllRefs)
m.Get("/refs/*", repo.GetGitRefs)
m.Get("/trees/{sha}", repo.GetTree)
+ m.Get("/blobs", repo.GetBlobs)
m.Get("/blobs/{sha}", repo.GetBlob)
m.Get("/tags/{sha}", repo.GetAnnotatedTag)
m.Group("/notes/{sha}", func() {
@@ -1355,6 +1349,12 @@ func Routes() *web.Route {
m.Post("", bind(api.UpdateRepoAvatarOption{}), repo.UpdateAvatar)
m.Delete("", repo.DeleteAvatar)
}, reqAdmin(), reqToken())
+ m.Group("/sync_fork", func() {
+ m.Get("", reqRepoReader(unit.TypeCode), repo.SyncForkDefaultInfo)
+ m.Post("", mustNotBeArchived, reqRepoWriter(unit.TypeCode), repo.SyncForkDefault)
+ m.Get("/{branch}", reqRepoReader(unit.TypeCode), repo.SyncForkBranchInfo)
+ m.Post("/{branch}", mustNotBeArchived, reqRepoWriter(unit.TypeCode), repo.SyncForkBranch)
+ })
m.Get("/{ball_type:tarball|zipball|bundle}/*", reqRepoReader(unit.TypeCode), repo.DownloadArchive)
}, repoAssignment(), checkTokenPublicOnly())
@@ -1488,16 +1488,16 @@ func Routes() *web.Route {
m.Group("/{type}/{name}", func() {
m.Group("/{version}", func() {
m.Get("", packages.GetPackage)
- m.Delete("", reqPackageAccess(perm.AccessModeWrite), packages.DeletePackage)
+ m.Delete("", reqToken(), reqPackageAccess(perm.AccessModeWrite), packages.DeletePackage)
m.Get("/files", packages.ListPackageFiles)
})
- m.Post("/-/link/{repo_name}", reqPackageAccess(perm.AccessModeWrite), packages.LinkPackage)
- m.Post("/-/unlink", reqPackageAccess(perm.AccessModeWrite), packages.UnlinkPackage)
+ m.Post("/-/link/{repo_name}", reqToken(), reqPackageAccess(perm.AccessModeWrite), packages.LinkPackage)
+ m.Post("/-/unlink", reqToken(), reqPackageAccess(perm.AccessModeWrite), packages.UnlinkPackage)
})
m.Get("/", packages.ListPackages)
- }, reqToken(), tokenRequiresScopes(auth_model.AccessTokenScopeCategoryPackage), context.UserAssignmentAPI(), context.PackageAssignmentAPI(), reqPackageAccess(perm.AccessModeRead), checkTokenPublicOnly())
+ }, tokenRequiresScopes(auth_model.AccessTokenScopeCategoryPackage), context.UserAssignmentAPI(), context.PackageAssignmentAPI(), reqPackageAccess(perm.AccessModeRead), checkTokenPublicOnly())
// Organizations
m.Get("/user/orgs", reqToken(), tokenRequiresScopes(auth_model.AccessTokenScopeCategoryUser, auth_model.AccessTokenScopeCategoryOrganization), org.ListMyOrgs)
diff --git a/routers/api/v1/misc/signing.go b/routers/api/v1/misc/signing.go
index 945df6068f..9f829b8443 100644
--- a/routers/api/v1/misc/signing.go
+++ b/routers/api/v1/misc/signing.go
@@ -7,8 +7,11 @@ import (
"fmt"
"net/http"
+ "forgejo.org/modules/setting"
asymkey_service "forgejo.org/services/asymkey"
"forgejo.org/services/context"
+
+ "golang.org/x/crypto/ssh"
)
// SigningKey returns the public key of the default signing key if it exists
@@ -61,3 +64,29 @@ func SigningKey(ctx *context.APIContext) {
ctx.Error(http.StatusInternalServerError, "gpg export", fmt.Errorf("Error writing key content %w", err))
}
}
+
+// SSHSigningKey returns the public SSH key of the default signing key if it exists
+func SSHSigningKey(ctx *context.APIContext) {
+ // swagger:operation GET /signing-key.ssh miscellaneous getSSHSigningKey
+ // ---
+ // summary: Get default signing-key.ssh
+ // produces:
+ // - text/plain
+ // responses:
+ // "200":
+ // description: "SSH public key in OpenSSH authorized key format"
+ // schema:
+ // type: string
+ // "404":
+ // "$ref": "#/responses/notFound"
+
+ if setting.SSHInstanceKey == nil {
+ ctx.NotFound()
+ return
+ }
+
+ _, err := ctx.Write(ssh.MarshalAuthorizedKey(setting.SSHInstanceKey))
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "ssh export", err)
+ }
+}
diff --git a/routers/api/v1/org/org.go b/routers/api/v1/org/org.go
index fcf2c6b412..87bc27be63 100644
--- a/routers/api/v1/org/org.go
+++ b/routers/api/v1/org/org.go
@@ -398,7 +398,7 @@ func Edit(ctx *context.APIContext) {
ctx.Org.Organization.Email = ""
} else {
if err := user_service.ReplacePrimaryEmailAddress(ctx, ctx.Org.Organization.AsUser(), *form.Email); err != nil {
- if validation.IsErrEmailInvalid(err) || validation.IsErrEmailCharIsNotSupported(err) {
+ if validation.IsErrEmailInvalid(err) {
ctx.Error(http.StatusUnprocessableEntity, "ReplacePrimaryEmailAddress", err)
} else {
ctx.Error(http.StatusInternalServerError, "ReplacePrimaryEmailAddress", err)
diff --git a/routers/api/v1/repo/action.go b/routers/api/v1/repo/action.go
index a39d4836e1..dbc4933de6 100644
--- a/routers/api/v1/repo/action.go
+++ b/routers/api/v1/repo/action.go
@@ -5,6 +5,7 @@ package repo
import (
"errors"
+ "fmt"
"net/http"
actions_model "forgejo.org/models/actions"
@@ -694,3 +695,161 @@ func DispatchWorkflow(ctx *context.APIContext) {
ctx.JSON(http.StatusNoContent, nil)
}
}
+
+// ListActionRuns return a filtered list of ActionRun
+func ListActionRuns(ctx *context.APIContext) {
+ // swagger:operation GET /repos/{owner}/{repo}/actions/runs repository ListActionRuns
+ // ---
+ // summary: List a repository's action runs
+ // produces:
+ // - application/json
+ // parameters:
+ // - name: owner
+ // in: path
+ // description: owner of the repo
+ // type: string
+ // required: true
+ // - name: repo
+ // in: path
+ // description: name of the repo
+ // type: string
+ // required: true
+ // - name: page
+ // in: query
+ // description: page number of results to return (1-based)
+ // type: integer
+ // - name: limit
+ // in: query
+ // description: page size of results, default maximum page size is 50
+ // type: integer
+ // - name: event
+ // in: query
+ // description: Returns workflow run triggered by the specified events. For example, `push`, `pull_request` or `workflow_dispatch`.
+ // type: array
+ // items:
+ // type: string
+ // - name: status
+ // in: query
+ // description: |
+ // Returns workflow runs with the check run status or conclusion that is specified. For example, a conclusion can be success or a status can be in_progress. Only Forgejo Actions can set a status of waiting, pending, or requested.
+ // type: array
+ // items:
+ // type: string
+ // enum: [unknown, waiting, running, success, failure, cancelled, skipped, blocked]
+ // - name: run_number
+ // in: query
+ // description: |
+ // Returns the workflow run associated with the run number.
+ // type: integer
+ // format: int64
+ // - name: head_sha
+ // in: query
+ // description: Only returns workflow runs that are associated with the specified head_sha.
+ // type: string
+ // responses:
+ // "200":
+ // "$ref": "#/responses/ActionRunList"
+ // "400":
+ // "$ref": "#/responses/error"
+ // "403":
+ // "$ref": "#/responses/forbidden"
+
+ statusStrs := ctx.FormStrings("status")
+ statuses := make([]actions_model.Status, len(statusStrs))
+ for i, s := range statusStrs {
+ if status, exists := actions_model.StatusFromString(s); exists {
+ statuses[i] = status
+ } else {
+ ctx.Error(http.StatusBadRequest, "StatusFromString", fmt.Sprintf("unknown status: %s", s))
+ return
+ }
+ }
+
+ runs, total, err := db.FindAndCount[actions_model.ActionRun](ctx, &actions_model.FindRunJobOptions{
+ ListOptions: utils.GetListOptions(ctx),
+ OwnerID: ctx.Repo.Owner.ID,
+ RepoID: ctx.Repo.Repository.ID,
+ Events: ctx.FormStrings("event"),
+ Statuses: statuses,
+ RunNumber: ctx.FormInt64("run_number"),
+ CommitSHA: ctx.FormString("head_sha"),
+ })
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "ListActionRuns", err)
+ return
+ }
+
+ res := new(api.ListActionRunResponse)
+ res.TotalCount = total
+
+ res.Entries = make([]*api.ActionRun, len(runs))
+ for i, r := range runs {
+ if err := r.LoadAttributes(ctx); err != nil {
+ ctx.Error(http.StatusInternalServerError, "LoadAttributes", err)
+ return
+ }
+ cr := convert.ToActionRun(ctx, r, ctx.Doer)
+ res.Entries[i] = cr
+ }
+
+ ctx.JSON(http.StatusOK, &res)
+}
+
+// GetActionRun get one action instance
+func GetActionRun(ctx *context.APIContext) {
+ // swagger:operation GET /repos/{owner}/{repo}/actions/runs/{run_id} repository ActionRun
+ // ---
+ // summary: Get an action run
+ // produces:
+ // - application/json
+ // parameters:
+ // - name: owner
+ // in: path
+ // description: owner of the repo
+ // type: string
+ // required: true
+ // - name: repo
+ // in: path
+ // description: name of the repo
+ // type: string
+ // required: true
+ // - name: run_id
+ // in: path
+ // description: id of the action run
+ // type: integer
+ // format: int64
+ // required: true
+ // responses:
+ // "200":
+ // "$ref": "#/responses/ActionRun"
+ // "400":
+ // "$ref": "#/responses/error"
+ // "403":
+ // "$ref": "#/responses/forbidden"
+ // "404":
+ // "$ref": "#/responses/notFound"
+
+ run, err := actions_model.GetRunByID(ctx, ctx.ParamsInt64(":run_id"))
+ if err != nil {
+ if errors.Is(err, util.ErrNotExist) {
+ ctx.Error(http.StatusNotFound, "GetRunById", err)
+ } else {
+ ctx.Error(http.StatusInternalServerError, "GetRunByID", err)
+ }
+ return
+ }
+
+ // Action runs lives in its own table, therefore we check that the
+ // run with the requested ID is owned by the repository
+ if ctx.Repo.Repository.ID != run.RepoID {
+ ctx.Error(http.StatusNotFound, "GetRunById", util.ErrNotExist)
+ return
+ }
+
+ if err := run.LoadAttributes(ctx); err != nil {
+ ctx.Error(http.StatusInternalServerError, "LoadAttributes", err)
+ return
+ }
+
+ ctx.JSON(http.StatusOK, convert.ToActionRun(ctx, run, ctx.Doer))
+}
diff --git a/routers/api/v1/repo/blob.go b/routers/api/v1/repo/blob.go
index 8ed57d4787..63baec2025 100644
--- a/routers/api/v1/repo/blob.go
+++ b/routers/api/v1/repo/blob.go
@@ -5,11 +5,54 @@ package repo
import (
"net/http"
+ "strings"
"forgejo.org/services/context"
files_service "forgejo.org/services/repository/files"
)
+// GetBlobs gets multiple blobs of a repository.
+func GetBlobs(ctx *context.APIContext) {
+ // swagger:operation GET /repos/{owner}/{repo}/git/blobs repository GetBlobs
+ // ---
+ // summary: Gets multiplbe blobs of a repository.
+ // produces:
+ // - application/json
+ // parameters:
+ // - name: owner
+ // in: path
+ // description: owner of the repo
+ // type: string
+ // required: true
+ // - name: repo
+ // in: path
+ // description: name of the repo
+ // type: string
+ // required: true
+ // - name: shas
+ // in: query
+ // description: a comma separated list of blob-sha (mind the overall URL-length limit of ~2,083 chars)
+ // type: string
+ // required: true
+ // responses:
+ // "200":
+ // "$ref": "#/responses/GitBlobList"
+ // "400":
+ // "$ref": "#/responses/error"
+
+ shas := ctx.FormString("shas")
+ if len(shas) == 0 {
+ ctx.Error(http.StatusBadRequest, "", "shas not provided")
+ return
+ }
+
+ if blobs, err := files_service.GetBlobsBySHA(ctx, ctx.Repo.Repository, ctx.Repo.GitRepo, strings.Split(shas, ",")); err != nil {
+ ctx.Error(http.StatusBadRequest, "", err)
+ } else {
+ ctx.JSON(http.StatusOK, blobs)
+ }
+}
+
// GetBlob get the blob of a repository file.
func GetBlob(ctx *context.APIContext) {
// swagger:operation GET /repos/{owner}/{repo}/git/blobs/{sha} repository GetBlob
@@ -30,12 +73,12 @@ func GetBlob(ctx *context.APIContext) {
// required: true
// - name: sha
// in: path
- // description: sha of the commit
+ // description: sha of the blob to retrieve
// type: string
// required: true
// responses:
// "200":
- // "$ref": "#/responses/GitBlobResponse"
+ // "$ref": "#/responses/GitBlob"
// "400":
// "$ref": "#/responses/error"
// "404":
diff --git a/routers/api/v1/repo/branch.go b/routers/api/v1/repo/branch.go
index 6263360a8e..7c9593d625 100644
--- a/routers/api/v1/repo/branch.go
+++ b/routers/api/v1/repo/branch.go
@@ -6,7 +6,6 @@ package repo
import (
"errors"
- "fmt"
"net/http"
"forgejo.org/models"
@@ -151,7 +150,7 @@ func DeleteBranch(ctx *context.APIContext) {
}
if ctx.Repo.Repository.IsMirror {
- ctx.Error(http.StatusForbidden, "IsMirrored", fmt.Errorf("can not delete branch of an mirror repository"))
+ ctx.Error(http.StatusForbidden, "IsMirrored", errors.New("can not delete branch of an mirror repository"))
return
}
@@ -160,9 +159,9 @@ func DeleteBranch(ctx *context.APIContext) {
case git.IsErrBranchNotExist(err):
ctx.NotFound(err)
case errors.Is(err, repo_service.ErrBranchIsDefault):
- ctx.Error(http.StatusForbidden, "DefaultBranch", fmt.Errorf("can not delete default branch"))
+ ctx.Error(http.StatusForbidden, "DefaultBranch", errors.New("can not delete default branch"))
case errors.Is(err, git_model.ErrBranchIsProtected):
- ctx.Error(http.StatusForbidden, "IsProtectedBranch", fmt.Errorf("branch protected"))
+ ctx.Error(http.StatusForbidden, "IsProtectedBranch", errors.New("branch protected"))
default:
ctx.Error(http.StatusInternalServerError, "DeleteBranch", err)
}
@@ -231,9 +230,9 @@ func CreateBranch(ctx *context.APIContext) {
ctx.Error(http.StatusInternalServerError, "GetCommit", err)
return
}
- } else if len(opt.OldBranchName) > 0 { //nolint
- if ctx.Repo.GitRepo.IsBranchExist(opt.OldBranchName) { //nolint
- oldCommit, err = ctx.Repo.GitRepo.GetBranchCommit(opt.OldBranchName) //nolint
+ } else if len(opt.OldBranchName) > 0 { //nolint:staticcheck
+ if ctx.Repo.GitRepo.IsBranchExist(opt.OldBranchName) { //nolint:staticcheck
+ oldCommit, err = ctx.Repo.GitRepo.GetBranchCommit(opt.OldBranchName) //nolint:staticcheck
if err != nil {
ctx.Error(http.StatusInternalServerError, "GetBranchCommit", err)
return
diff --git a/routers/api/v1/repo/compare.go b/routers/api/v1/repo/compare.go
index 9c941ea07f..7fc59ea171 100644
--- a/routers/api/v1/repo/compare.go
+++ b/routers/api/v1/repo/compare.go
@@ -64,7 +64,7 @@ func CompareDiff(ctx *context.APIContext) {
}
}
- _, headGitRepo, ci, _, _ := parseCompareInfo(ctx, api.CreatePullRequestOption{
+ headRepository, headGitRepo, ci, _, _ := parseCompareInfo(ctx, api.CreatePullRequestOption{
Base: infos[0],
Head: infos[1],
})
@@ -80,7 +80,7 @@ func CompareDiff(ctx *context.APIContext) {
apiFiles := []*api.CommitAffectedFiles{}
userCache := make(map[string]*user_model.User)
for i := 0; i < len(ci.Commits); i++ {
- apiCommit, err := convert.ToCommit(ctx, ctx.Repo.Repository, ctx.Repo.GitRepo, ci.Commits[i], userCache,
+ apiCommit, err := convert.ToCommit(ctx, headRepository, headGitRepo, ci.Commits[i], userCache,
convert.ToCommitOptions{
Stat: true,
Verification: verification,
diff --git a/routers/api/v1/repo/file.go b/routers/api/v1/repo/file.go
index fb71d76388..549fe9fae0 100644
--- a/routers/api/v1/repo/file.go
+++ b/routers/api/v1/repo/file.go
@@ -437,7 +437,7 @@ func canWriteFiles(ctx *context.APIContext, branch string) bool {
// canReadFiles returns true if repository is readable and user has proper access level.
func canReadFiles(r *context.Repository) bool {
- return r.Permission.CanRead(unit.TypeCode)
+ return r.CanRead(unit.TypeCode)
}
func base64Reader(s string) (io.ReadSeeker, error) {
@@ -480,6 +480,8 @@ func ChangeFiles(ctx *context.APIContext) {
// "$ref": "#/responses/error"
// "404":
// "$ref": "#/responses/notFound"
+ // "409":
+ // "$ref": "#/responses/conflict"
// "413":
// "$ref": "#/responses/quotaExceeded"
// "422":
@@ -584,6 +586,8 @@ func CreateFile(ctx *context.APIContext) {
// "$ref": "#/responses/error"
// "404":
// "$ref": "#/responses/notFound"
+ // "409":
+ // "$ref": "#/responses/conflict"
// "413":
// "$ref": "#/responses/quotaExceeded"
// "422":
@@ -684,6 +688,8 @@ func UpdateFile(ctx *context.APIContext) {
// "$ref": "#/responses/error"
// "404":
// "$ref": "#/responses/notFound"
+ // "409":
+ // "$ref": "#/responses/conflict"
// "413":
// "$ref": "#/responses/quotaExceeded"
// "422":
@@ -692,7 +698,7 @@ func UpdateFile(ctx *context.APIContext) {
// "$ref": "#/responses/repoArchivedError"
apiOpts := web.GetForm(ctx).(*api.UpdateFileOptions)
if ctx.Repo.Repository.IsEmpty {
- ctx.Error(http.StatusUnprocessableEntity, "RepoIsEmpty", fmt.Errorf("repo is empty"))
+ ctx.Error(http.StatusUnprocessableEntity, "RepoIsEmpty", errors.New("repo is empty"))
return
}
@@ -757,11 +763,19 @@ func handleCreateOrUpdateFileError(ctx *context.APIContext, err error) {
ctx.Error(http.StatusForbidden, "Access", err)
return
}
- if git_model.IsErrBranchAlreadyExists(err) || models.IsErrFilenameInvalid(err) || models.IsErrSHADoesNotMatch(err) ||
- models.IsErrFilePathInvalid(err) || models.IsErrRepoFileAlreadyExists(err) {
+ if git_model.IsErrBranchAlreadyExists(err) ||
+ models.IsErrFilenameInvalid(err) ||
+ models.IsErrSHAOrCommitIDNotProvided(err) ||
+ models.IsErrFilePathInvalid(err) ||
+ models.IsErrRepoFileAlreadyExists(err) {
ctx.Error(http.StatusUnprocessableEntity, "Invalid", err)
return
}
+ if models.IsErrCommitIDDoesNotMatch(err) ||
+ models.IsErrSHADoesNotMatch(err) {
+ ctx.Error(http.StatusConflict, "Conflict", err)
+ return
+ }
if git_model.IsErrBranchNotExist(err) || git.IsErrBranchNotExist(err) {
ctx.Error(http.StatusNotFound, "BranchDoesNotExist", err)
return
diff --git a/routers/api/v1/repo/hook_test.go b/routers/api/v1/repo/hook_test.go
index 77c86388f5..52d2245f03 100644
--- a/routers/api/v1/repo/hook_test.go
+++ b/routers/api/v1/repo/hook_test.go
@@ -25,7 +25,7 @@ func TestTestHook(t *testing.T) {
defer ctx.Repo.GitRepo.Close()
contexttest.LoadRepoCommit(t, ctx)
TestHook(ctx)
- assert.EqualValues(t, http.StatusNoContent, ctx.Resp.Status())
+ assert.Equal(t, http.StatusNoContent, ctx.Resp.Status())
unittest.AssertExistsAndLoadBean(t, &webhook.HookTask{
HookID: 1,
diff --git a/routers/api/v1/repo/issue.go b/routers/api/v1/repo/issue.go
index 5495c4a6ba..442e109843 100644
--- a/routers/api/v1/repo/issue.go
+++ b/routers/api/v1/repo/issue.go
@@ -121,6 +121,12 @@ func SearchIssues(ctx *context.APIContext) {
// description: Number of items per page
// type: integer
// minimum: 0
+ // - name: sort
+ // in: query
+ // description: Type of sort
+ // type: string
+ // enum: [relevance, latest, oldest, recentupdate, leastupdate, mostcomment, leastcomment, nearduedate, farduedate]
+ // default: latest
// responses:
// "200":
// "$ref": "#/responses/IssueList"
@@ -276,7 +282,7 @@ func SearchIssues(ctx *context.APIContext) {
IsClosed: isClosed,
IncludedAnyLabelIDs: includedAnyLabels,
MilestoneIDs: includedMilestones,
- SortBy: issue_indexer.SortByCreatedDesc,
+ SortBy: issue_indexer.ParseSortBy(ctx.FormString("sort"), issue_indexer.SortByCreatedDesc),
}
if since != 0 {
@@ -305,9 +311,10 @@ func SearchIssues(ctx *context.APIContext) {
}
}
- // FIXME: It's unsupported to sort by priority repo when searching by indexer,
- // it's indeed an regression, but I think it is worth to support filtering by indexer first.
- _ = ctx.FormInt64("priority_repo_id")
+ priorityRepoID := ctx.FormInt64("priority_repo_id")
+ if priorityRepoID > 0 {
+ searchOpt.PriorityRepoID = optional.Some(priorityRepoID)
+ }
ids, total, err := issue_indexer.SearchIssues(ctx, searchOpt)
if err != nil {
diff --git a/routers/api/v1/repo/issue_dependency.go b/routers/api/v1/repo/issue_dependency.go
index bed1e7ecf9..7bf1d3c67c 100644
--- a/routers/api/v1/repo/issue_dependency.go
+++ b/routers/api/v1/repo/issue_dependency.go
@@ -72,7 +72,7 @@ func GetIssueDependencies(ctx *context.APIContext) {
}
// 1. We must be able to read this issue
- if !ctx.Repo.Permission.CanReadIssuesOrPulls(issue.IsPull) {
+ if !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull) {
ctx.NotFound()
return
}
@@ -88,7 +88,7 @@ func GetIssueDependencies(ctx *context.APIContext) {
limit = setting.API.MaxResponseItems
}
- canWrite := ctx.Repo.Permission.CanWriteIssuesOrPulls(issue.IsPull)
+ canWrite := ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)
blockerIssues := make([]*issues_model.Issue, 0, limit)
@@ -123,7 +123,7 @@ func GetIssueDependencies(ctx *context.APIContext) {
}
// check permission
- if !perm.CanReadIssuesOrPulls(blocker.Issue.IsPull) {
+ if !perm.CanReadIssuesOrPulls(blocker.IsPull) {
if !canWrite {
hiddenBlocker := &issues_model.DependencyInfo{
Issue: issues_model.Issue{
@@ -134,19 +134,19 @@ func GetIssueDependencies(ctx *context.APIContext) {
} else {
confidentialBlocker := &issues_model.DependencyInfo{
Issue: issues_model.Issue{
- RepoID: blocker.Issue.RepoID,
+ RepoID: blocker.RepoID,
Index: blocker.Index,
Title: blocker.Title,
IsClosed: blocker.IsClosed,
IsPull: blocker.IsPull,
},
Repository: repo_model.Repository{
- ID: blocker.Issue.Repo.ID,
- Name: blocker.Issue.Repo.Name,
- OwnerName: blocker.Issue.Repo.OwnerName,
+ ID: blocker.Repo.ID,
+ Name: blocker.Repo.Name,
+ OwnerName: blocker.Repo.OwnerName,
},
}
- confidentialBlocker.Issue.Repo = &confidentialBlocker.Repository
+ confidentialBlocker.Repo = &confidentialBlocker.Repository
blocker = confidentialBlocker
}
}
@@ -323,7 +323,7 @@ func GetIssueBlocks(ctx *context.APIContext) {
return
}
- if !ctx.Repo.Permission.CanReadIssuesOrPulls(issue.IsPull) {
+ if !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull) {
ctx.NotFound()
return
}
@@ -373,11 +373,11 @@ func GetIssueBlocks(ctx *context.APIContext) {
repoPerms[depMeta.RepoID] = perm
}
- if !perm.CanReadIssuesOrPulls(depMeta.Issue.IsPull) {
+ if !perm.CanReadIssuesOrPulls(depMeta.IsPull) {
continue
}
- depMeta.Issue.Repo = &depMeta.Repository
+ depMeta.Repo = &depMeta.Repository
issues = append(issues, &depMeta.Issue)
}
diff --git a/routers/api/v1/repo/issue_label.go b/routers/api/v1/repo/issue_label.go
index 85af1149ff..3b2935305c 100644
--- a/routers/api/v1/repo/issue_label.go
+++ b/routers/api/v1/repo/issue_label.go
@@ -5,7 +5,7 @@
package repo
import (
- "fmt"
+ "errors"
"net/http"
"reflect"
@@ -352,12 +352,12 @@ func prepareForReplaceOrAdd(ctx *context.APIContext, form api.IssueLabelsOption)
labelNames = append(labelNames, rv.String())
default:
ctx.Error(http.StatusBadRequest, "InvalidLabel", "a label must be an integer or a string")
- return nil, nil, fmt.Errorf("invalid label")
+ return nil, nil, errors.New("invalid label")
}
}
if len(labelIDs) > 0 && len(labelNames) > 0 {
ctx.Error(http.StatusBadRequest, "InvalidLabels", "labels should be an array of strings or integers")
- return nil, nil, fmt.Errorf("invalid labels")
+ return nil, nil, errors.New("invalid labels")
}
if len(labelNames) > 0 {
repoLabelIDs, err := issues_model.GetLabelIDsInRepoByNames(ctx, ctx.Repo.Repository.ID, labelNames)
diff --git a/routers/api/v1/repo/issue_tracked_time.go b/routers/api/v1/repo/issue_tracked_time.go
index 7d88b1b2cd..61875b577c 100644
--- a/routers/api/v1/repo/issue_tracked_time.go
+++ b/routers/api/v1/repo/issue_tracked_time.go
@@ -4,6 +4,7 @@
package repo
import (
+ "errors"
"fmt"
"net/http"
"time"
@@ -116,7 +117,7 @@ func ListTrackedTimes(ctx *context.APIContext) {
if opts.UserID == 0 {
opts.UserID = ctx.Doer.ID
} else {
- ctx.Error(http.StatusForbidden, "", fmt.Errorf("query by user not allowed; not enough rights"))
+ ctx.Error(http.StatusForbidden, "", errors.New("query by user not allowed; not enough rights"))
return
}
}
@@ -437,7 +438,7 @@ func ListTrackedTimesByUser(ctx *context.APIContext) {
}
if !ctx.IsUserRepoAdmin() && !ctx.Doer.IsAdmin && ctx.Doer.ID != user.ID {
- ctx.Error(http.StatusForbidden, "", fmt.Errorf("query by user not allowed; not enough rights"))
+ ctx.Error(http.StatusForbidden, "", errors.New("query by user not allowed; not enough rights"))
return
}
@@ -545,7 +546,7 @@ func ListTrackedTimesByRepository(ctx *context.APIContext) {
if opts.UserID == 0 {
opts.UserID = ctx.Doer.ID
} else {
- ctx.Error(http.StatusForbidden, "", fmt.Errorf("query by user not allowed; not enough rights"))
+ ctx.Error(http.StatusForbidden, "", errors.New("query by user not allowed; not enough rights"))
return
}
}
diff --git a/routers/api/v1/repo/migrate.go b/routers/api/v1/repo/migrate.go
index 75e772dfe2..a848a950db 100644
--- a/routers/api/v1/repo/migrate.go
+++ b/routers/api/v1/repo/migrate.go
@@ -123,12 +123,12 @@ func Migrate(ctx *context.APIContext) {
gitServiceType := convert.ToGitServiceType(form.Service)
if form.Mirror && setting.Mirror.DisableNewPull {
- ctx.Error(http.StatusForbidden, "MirrorsGlobalDisabled", fmt.Errorf("the site administrator has disabled the creation of new pull mirrors"))
+ ctx.Error(http.StatusForbidden, "MirrorsGlobalDisabled", errors.New("the site administrator has disabled the creation of new pull mirrors"))
return
}
if setting.Repository.DisableMigrations {
- ctx.Error(http.StatusForbidden, "MigrationsGlobalDisabled", fmt.Errorf("the site administrator has disabled migrations"))
+ ctx.Error(http.StatusForbidden, "MigrationsGlobalDisabled", errors.New("the site administrator has disabled migrations"))
return
}
diff --git a/routers/api/v1/repo/notes.go b/routers/api/v1/repo/notes.go
index 87903d9f36..f3ceeaeacf 100644
--- a/routers/api/v1/repo/notes.go
+++ b/routers/api/v1/repo/notes.go
@@ -4,6 +4,7 @@
package repo
import (
+ "errors"
"fmt"
"net/http"
@@ -63,7 +64,7 @@ func GetNote(ctx *context.APIContext) {
func getNote(ctx *context.APIContext, identifier string) {
if ctx.Repo.GitRepo == nil {
- ctx.InternalServerError(fmt.Errorf("no open git repo"))
+ ctx.InternalServerError(errors.New("no open git repo"))
return
}
diff --git a/routers/api/v1/repo/pull.go b/routers/api/v1/repo/pull.go
index 25c85e7531..e7ff533d6a 100644
--- a/routers/api/v1/repo/pull.go
+++ b/routers/api/v1/repo/pull.go
@@ -47,7 +47,7 @@ import (
func ListPullRequests(ctx *context.APIContext) {
// swagger:operation GET /repos/{owner}/{repo}/pulls repository repoListPullRequests
// ---
- // summary: List a repo's pull requests
+ // summary: List a repo's pull requests. If a pull request is selected but fails to be retrieved for any reason, it will be a null value in the list of results.
// produces:
// - application/json
// parameters:
@@ -71,7 +71,7 @@ func ListPullRequests(ctx *context.APIContext) {
// in: query
// description: Type of sort
// type: string
- // enum: [oldest, recentupdate, leastupdate, mostcomment, leastcomment, priority]
+ // enum: [oldest, recentupdate, recentclose, leastupdate, mostcomment, leastcomment, priority]
// - name: milestone
// in: query
// description: ID of the milestone
@@ -1050,11 +1050,11 @@ func MergePullRequest(ctx *context.APIContext) {
if err := repo_service.DeleteBranchAfterMerge(ctx, ctx.Doer, pr, headRepo); err != nil {
switch {
case errors.Is(err, repo_service.ErrBranchIsDefault):
- ctx.Error(http.StatusForbidden, "DefaultBranch", fmt.Errorf("the head branch is the default branch"))
+ ctx.Error(http.StatusForbidden, "DefaultBranch", errors.New("the head branch is the default branch"))
case errors.Is(err, git_model.ErrBranchIsProtected):
- ctx.Error(http.StatusForbidden, "IsProtectedBranch", fmt.Errorf("the head branch is protected"))
+ ctx.Error(http.StatusForbidden, "IsProtectedBranch", errors.New("the head branch is protected"))
case errors.Is(err, util.ErrPermissionDenied):
- ctx.Error(http.StatusForbidden, "HeadBranch", fmt.Errorf("insufficient permission to delete head branch"))
+ ctx.Error(http.StatusForbidden, "HeadBranch", errors.New("insufficient permission to delete head branch"))
default:
ctx.Error(http.StatusInternalServerError, "DeleteBranchAfterMerge", err)
}
@@ -1084,7 +1084,6 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption)
err error
)
- // If there is no head repository, it means pull request between same repository.
headInfos := strings.Split(form.Head, ":")
if len(headInfos) == 1 {
isSameRepo = true
@@ -1094,7 +1093,7 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption)
headUser, err = user_model.GetUserByName(ctx, headInfos[0])
if err != nil {
if user_model.IsErrUserNotExist(err) {
- ctx.NotFound("GetUserByName")
+ ctx.NotFound(fmt.Errorf("the owner %s does not exist", headInfos[0]))
} else {
ctx.Error(http.StatusInternalServerError, "GetUserByName", err)
}
@@ -1104,7 +1103,7 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption)
// The head repository can also point to the same repo
isSameRepo = ctx.Repo.Owner.ID == headUser.ID
} else {
- ctx.NotFound()
+ ctx.NotFound(fmt.Errorf("the head part of {basehead} %s must contain zero or one colon (:) but contains %d", form.Head, len(headInfos)-1))
return nil, nil, nil, "", ""
}
@@ -1116,16 +1115,10 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption)
baseIsBranch := ctx.Repo.GitRepo.IsBranchExist(baseBranch)
baseIsTag := ctx.Repo.GitRepo.IsTagExist(baseBranch)
if !baseIsCommit && !baseIsBranch && !baseIsTag {
- // Check for short SHA usage
- if baseCommit, _ := ctx.Repo.GitRepo.GetCommit(baseBranch); baseCommit != nil {
- baseBranch = baseCommit.ID.String()
- } else {
- ctx.NotFound("BaseNotExist")
- return nil, nil, nil, "", ""
- }
+ ctx.NotFound(fmt.Errorf("could not find '%s' to be a commit, branch or tag in the base repository %s/%s", baseBranch, baseRepo.Owner.Name, baseRepo.Name))
+ return nil, nil, nil, "", ""
}
- // Check if current user has fork of repository or in the same repository.
headRepo := repo_model.GetForkedRepo(ctx, headUser.ID, baseRepo.ID)
if headRepo == nil && !isSameRepo {
err := baseRepo.GetBaseRepo(ctx)
@@ -1134,13 +1127,11 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption)
return nil, nil, nil, "", ""
}
- // Check if baseRepo's base repository is the same as headUser's repository.
if baseRepo.BaseRepo == nil || baseRepo.BaseRepo.OwnerID != headUser.ID {
log.Trace("parseCompareInfo[%d]: does not have fork or in same repository", baseRepo.ID)
- ctx.NotFound("GetBaseRepo")
+ ctx.NotFound(fmt.Errorf("%[1]s does not have a fork of %[2]s/%[3]s and %[2]s/%[3]s is not a fork of a repository from %[1]s", headUser.Name, baseRepo.Owner.Name, baseRepo.Name))
return nil, nil, nil, "", ""
}
- // Assign headRepo so it can be used below.
headRepo = baseRepo.BaseRepo
}
@@ -1194,32 +1185,27 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption)
return nil, nil, nil, "", ""
}
- // Check if head branch is valid.
- headIsCommit := headGitRepo.IsBranchExist(headBranch)
- headIsBranch := headGitRepo.IsTagExist(headBranch)
- headIsTag := headGitRepo.IsCommitExist(baseBranch)
- if !headIsCommit && !headIsBranch && !headIsTag {
- // Check if headBranch is short sha commit hash
- if headCommit, _ := headGitRepo.GetCommit(headBranch); headCommit != nil {
- headBranch = headCommit.ID.String()
- } else {
- headGitRepo.Close()
- ctx.NotFound("IsRefExist", nil)
- return nil, nil, nil, "", ""
- }
- }
-
baseBranchRef := baseBranch
if baseIsBranch {
baseBranchRef = git.BranchPrefix + baseBranch
} else if baseIsTag {
baseBranchRef = git.TagPrefix + baseBranch
}
+
+ // Check if head branch is valid.
+ headIsCommit := headGitRepo.IsCommitExist(headBranch)
+ headIsBranch := headGitRepo.IsBranchExist(headBranch)
+ headIsTag := headGitRepo.IsTagExist(headBranch)
+ if !headIsCommit && !headIsBranch && !headIsTag {
+ ctx.NotFound(fmt.Errorf("could not find '%s' to be a commit, branch or tag in the head repository %s/%s", headBranch, headRepo.Owner.Name, headRepo.Name))
+ return nil, nil, nil, "", ""
+ }
+
headBranchRef := headBranch
if headIsBranch {
- headBranchRef = headBranch
+ headBranchRef = git.BranchPrefix + headBranch
} else if headIsTag {
- headBranchRef = headBranch
+ headBranchRef = git.TagPrefix + headBranch
}
compareInfo, err := headGitRepo.GetCompareInfo(repo_model.RepoPath(baseRepo.Owner.Name, baseRepo.Name), baseBranchRef, headBranchRef, false, false)
@@ -1628,7 +1614,7 @@ func GetPullRequestFiles(ctx *context.APIContext) {
maxLines := setting.Git.MaxGitDiffLines
// FIXME: If there are too many files in the repo, may cause some unpredictable issues.
- diff, err := gitdiff.GetDiff(ctx, baseGitRepo,
+ diff, _, err := gitdiff.GetDiffSimple(ctx, baseGitRepo,
&gitdiff.DiffOptions{
BeforeCommitID: startCommitID,
AfterCommitID: endCommitID,
diff --git a/routers/api/v1/repo/pull_review.go b/routers/api/v1/repo/pull_review.go
index aa4c7318a2..830a62bf54 100644
--- a/routers/api/v1/repo/pull_review.go
+++ b/routers/api/v1/repo/pull_review.go
@@ -4,6 +4,7 @@
package repo
import (
+ "errors"
"fmt"
"net/http"
"strings"
@@ -581,7 +582,7 @@ func SubmitPullReview(ctx *context.APIContext) {
}
if review.Type != issues_model.ReviewTypePending {
- ctx.Error(http.StatusUnprocessableEntity, "", fmt.Errorf("only a pending review can be submitted"))
+ ctx.Error(http.StatusUnprocessableEntity, "", errors.New("only a pending review can be submitted"))
return
}
@@ -593,7 +594,7 @@ func SubmitPullReview(ctx *context.APIContext) {
// if review stay pending return
if reviewType == issues_model.ReviewTypePending {
- ctx.Error(http.StatusUnprocessableEntity, "", fmt.Errorf("review stay pending"))
+ ctx.Error(http.StatusUnprocessableEntity, "", errors.New("review stay pending"))
return
}
@@ -634,7 +635,7 @@ func preparePullReviewType(ctx *context.APIContext, pr *issues_model.PullRequest
case api.ReviewStateApproved:
// can not approve your own PR
if pr.Issue.IsPoster(ctx.Doer.ID) {
- ctx.Error(http.StatusUnprocessableEntity, "", fmt.Errorf("approve your own pull is not allowed"))
+ ctx.Error(http.StatusUnprocessableEntity, "", errors.New("approve your own pull is not allowed"))
return -1, true
}
reviewType = issues_model.ReviewTypeApprove
@@ -643,7 +644,7 @@ func preparePullReviewType(ctx *context.APIContext, pr *issues_model.PullRequest
case api.ReviewStateRequestChanges:
// can not reject your own PR
if pr.Issue.IsPoster(ctx.Doer.ID) {
- ctx.Error(http.StatusUnprocessableEntity, "", fmt.Errorf("reject your own pull is not allowed"))
+ ctx.Error(http.StatusUnprocessableEntity, "", errors.New("reject your own pull is not allowed"))
return -1, true
}
reviewType = issues_model.ReviewTypeReject
diff --git a/routers/api/v1/repo/release.go b/routers/api/v1/repo/release.go
index 68254a530a..0bf958b523 100644
--- a/routers/api/v1/repo/release.go
+++ b/routers/api/v1/repo/release.go
@@ -4,6 +4,7 @@
package repo
import (
+ "errors"
"fmt"
"net/http"
@@ -226,7 +227,7 @@ func CreateRelease(ctx *context.APIContext) {
form := web.GetForm(ctx).(*api.CreateReleaseOption)
if ctx.Repo.Repository.IsEmpty {
- ctx.Error(http.StatusUnprocessableEntity, "RepoIsEmpty", fmt.Errorf("repo is empty"))
+ ctx.Error(http.StatusUnprocessableEntity, "RepoIsEmpty", errors.New("repo is empty"))
return
}
rel, err := repo_model.GetRelease(ctx, ctx.Repo.Repository.ID, form.TagName)
@@ -267,7 +268,7 @@ func CreateRelease(ctx *context.APIContext) {
}
} else {
if !rel.IsTag {
- ctx.Error(http.StatusConflict, "GetRelease", "Release is has no Tag")
+ ctx.Error(http.StatusConflict, "GetRelease", "Release has no Tag")
return
}
diff --git a/routers/api/v1/repo/repo.go b/routers/api/v1/repo/repo.go
index e8c3965ff3..3d6a40e9ab 100644
--- a/routers/api/v1/repo/repo.go
+++ b/routers/api/v1/repo/repo.go
@@ -5,13 +5,13 @@
package repo
import (
+ "errors"
"fmt"
"net/http"
"slices"
"strings"
"time"
- actions_model "forgejo.org/models/actions"
activities_model "forgejo.org/models/activities"
"forgejo.org/models/db"
"forgejo.org/models/organization"
@@ -724,7 +724,7 @@ func updateBasicProperties(ctx *context.APIContext, opts api.EditRepoOption) err
visibilityChanged = repo.IsPrivate != *opts.Private
// when ForcePrivate enabled, you could change public repo to private, but only admin users can change private to public
if visibilityChanged && setting.Repository.ForcePrivate && !*opts.Private && !ctx.Doer.IsAdmin {
- err := fmt.Errorf("cannot change private repository to public")
+ err := errors.New("cannot change private repository to public")
ctx.Error(http.StatusUnprocessableEntity, "Force Private enabled", err)
return err
}
@@ -792,12 +792,12 @@ func updateRepoUnits(ctx *context.APIContext, owner string, repo *repo_model.Rep
if newHasIssues && opts.ExternalTracker != nil && !unit_model.TypeExternalTracker.UnitGlobalDisabled() {
// Check that values are valid
if !validation.IsValidExternalURL(opts.ExternalTracker.ExternalTrackerURL) {
- err := fmt.Errorf("External tracker URL not valid")
+ err := errors.New("External tracker URL not valid")
ctx.Error(http.StatusUnprocessableEntity, "Invalid external tracker URL", err)
return err
}
if len(opts.ExternalTracker.ExternalTrackerFormat) != 0 && !validation.IsValidExternalTrackerURLFormat(opts.ExternalTracker.ExternalTrackerFormat) {
- err := fmt.Errorf("External tracker URL format not valid")
+ err := errors.New("External tracker URL format not valid")
ctx.Error(http.StatusUnprocessableEntity, "Invalid external tracker URL format", err)
return err
}
@@ -868,7 +868,7 @@ func updateRepoUnits(ctx *context.APIContext, owner string, repo *repo_model.Rep
if newHasWiki && opts.ExternalWiki != nil && !unit_model.TypeExternalWiki.UnitGlobalDisabled() {
// Check that values are valid
if !validation.IsValidExternalURL(opts.ExternalWiki.ExternalWikiURL) {
- err := fmt.Errorf("External wiki URL not valid")
+ err := errors.New("External wiki URL not valid")
ctx.Error(http.StatusUnprocessableEntity, "", "Invalid external wiki URL")
return err
}
@@ -1052,7 +1052,7 @@ func updateRepoArchivedState(ctx *context.APIContext, opts api.EditRepoOption) e
// archive / un-archive
if opts.Archived != nil {
if repo.IsMirror {
- err := fmt.Errorf("repo is a mirror, cannot archive/un-archive")
+ err := errors.New("repo is a mirror, cannot archive/un-archive")
ctx.Error(http.StatusUnprocessableEntity, err.Error(), err)
return err
}
@@ -1062,7 +1062,7 @@ func updateRepoArchivedState(ctx *context.APIContext, opts api.EditRepoOption) e
ctx.Error(http.StatusInternalServerError, "ArchiveRepoState", err)
return err
}
- if err := actions_model.CleanRepoScheduleTasks(ctx, repo, true); err != nil {
+ if err := actions_service.CleanRepoScheduleTasks(ctx, repo, true); err != nil {
log.Error("CleanRepoScheduleTasks for archived repo %s/%s: %v", ctx.Repo.Owner.Name, repo.Name, err)
}
log.Trace("Repository was archived: %s/%s", ctx.Repo.Owner.Name, repo.Name)
diff --git a/routers/api/v1/repo/repo_test.go b/routers/api/v1/repo/repo_test.go
index 69eeb1cfdf..024376c146 100644
--- a/routers/api/v1/repo/repo_test.go
+++ b/routers/api/v1/repo/repo_test.go
@@ -58,7 +58,7 @@ func TestRepoEdit(t *testing.T) {
web.SetForm(ctx, &opts)
Edit(ctx)
- assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.Equal(t, http.StatusOK, ctx.Resp.Status())
unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{
ID: 1,
}, unittest.Cond("name = ? AND is_archived = 1", *opts.Name))
@@ -78,7 +78,7 @@ func TestRepoEditNameChange(t *testing.T) {
web.SetForm(ctx, &opts)
Edit(ctx)
- assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.Equal(t, http.StatusOK, ctx.Resp.Status())
unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{
ID: 1,
diff --git a/routers/api/v1/repo/sync_fork.go b/routers/api/v1/repo/sync_fork.go
new file mode 100644
index 0000000000..c3a9bd26ba
--- /dev/null
+++ b/routers/api/v1/repo/sync_fork.go
@@ -0,0 +1,185 @@
+// Copyright 2025 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repo
+
+import (
+ "net/http"
+
+ git_model "forgejo.org/models/git"
+ "forgejo.org/services/context"
+ repo_service "forgejo.org/services/repository"
+)
+
+func getSyncForkInfo(ctx *context.APIContext, branch string) {
+ if !ctx.Repo.Repository.IsFork {
+ ctx.Error(http.StatusBadRequest, "NoFork", "The Repo must be a fork")
+ return
+ }
+
+ syncForkInfo, err := repo_service.GetSyncForkInfo(ctx, ctx.Repo.Repository, branch)
+ if err != nil {
+ if git_model.IsErrBranchNotExist(err) {
+ ctx.NotFound(err, branch)
+ return
+ }
+
+ ctx.Error(http.StatusInternalServerError, "GetSyncForkInfo", err)
+ return
+ }
+
+ ctx.JSON(http.StatusOK, syncForkInfo)
+}
+
+// SyncForkBranchInfo returns information about syncing the default fork branch with the base branch
+func SyncForkDefaultInfo(ctx *context.APIContext) {
+ // swagger:operation GET /repos/{owner}/{repo}/sync_fork repository repoSyncForkDefaultInfo
+ // ---
+ // summary: Gets information about syncing the fork default branch with the base branch
+ // produces:
+ // - application/json
+ // parameters:
+ // - name: owner
+ // in: path
+ // description: owner of the repo
+ // type: string
+ // required: true
+ // - name: repo
+ // in: path
+ // description: name of the repo
+ // type: string
+ // required: true
+ // responses:
+ // "200":
+ // "$ref": "#/responses/SyncForkInfo"
+ // "400":
+ // "$ref": "#/responses/error"
+ // "404":
+ // "$ref": "#/responses/notFound"
+ getSyncForkInfo(ctx, ctx.Repo.Repository.DefaultBranch)
+}
+
+// SyncForkBranchInfo returns information about syncing a fork branch with the base branch
+func SyncForkBranchInfo(ctx *context.APIContext) {
+ // swagger:operation GET /repos/{owner}/{repo}/sync_fork/{branch} repository repoSyncForkBranchInfo
+ // ---
+ // summary: Gets information about syncing a fork branch with the base branch
+ // produces:
+ // - application/json
+ // parameters:
+ // - name: owner
+ // in: path
+ // description: owner of the repo
+ // type: string
+ // required: true
+ // - name: repo
+ // in: path
+ // description: name of the repo
+ // type: string
+ // required: true
+ // - name: branch
+ // in: path
+ // description: The branch
+ // type: string
+ // required: true
+ // responses:
+ // "200":
+ // "$ref": "#/responses/SyncForkInfo"
+ // "400":
+ // "$ref": "#/responses/error"
+ // "404":
+ // "$ref": "#/responses/notFound"
+ getSyncForkInfo(ctx, ctx.Params("branch"))
+}
+
+func syncForkBranch(ctx *context.APIContext, branch string) {
+ if !ctx.Repo.Repository.IsFork {
+ ctx.Error(http.StatusBadRequest, "NoFork", "The Repo must be a fork")
+ return
+ }
+
+ syncForkInfo, err := repo_service.GetSyncForkInfo(ctx, ctx.Repo.Repository, branch)
+ if err != nil {
+ if git_model.IsErrBranchNotExist(err) {
+ ctx.NotFound(err, branch)
+ return
+ }
+
+ ctx.Error(http.StatusInternalServerError, "GetSyncForkInfo", err)
+ return
+ }
+
+ if !syncForkInfo.Allowed {
+ ctx.Error(http.StatusBadRequest, "NotAllowed", "You can't sync this branch")
+ return
+ }
+
+ err = repo_service.SyncFork(ctx, ctx.Doer, ctx.Repo.Repository, branch)
+ if err != nil {
+ ctx.Error(http.StatusInternalServerError, "SyncFork", err)
+ return
+ }
+
+ ctx.Status(http.StatusNoContent)
+}
+
+// SyncForkBranch syncs the default of a fork with the base branch
+func SyncForkDefault(ctx *context.APIContext) {
+ // swagger:operation POST /repos/{owner}/{repo}/sync_fork repository repoSyncForkDefault
+ // ---
+ // summary: Syncs the default branch of a fork with the base branch
+ // produces:
+ // - application/json
+ // parameters:
+ // - name: owner
+ // in: path
+ // description: owner of the repo
+ // type: string
+ // required: true
+ // - name: repo
+ // in: path
+ // description: name of the repo
+ // type: string
+ // required: true
+ // responses:
+ // "204":
+ // "$ref": "#/responses/empty"
+ // "400":
+ // "$ref": "#/responses/error"
+ // "404":
+ // "$ref": "#/responses/notFound"
+ syncForkBranch(ctx, ctx.Repo.Repository.DefaultBranch)
+}
+
+// SyncForkBranch syncs a fork branch with the base branch
+func SyncForkBranch(ctx *context.APIContext) {
+ // swagger:operation POST /repos/{owner}/{repo}/sync_fork/{branch} repository repoSyncForkBranch
+ // ---
+ // summary: Syncs a fork branch with the base branch
+ // produces:
+ // - application/json
+ // parameters:
+ // - name: owner
+ // in: path
+ // description: owner of the repo
+ // type: string
+ // required: true
+ // - name: repo
+ // in: path
+ // description: name of the repo
+ // type: string
+ // required: true
+ // - name: branch
+ // in: path
+ // description: The branch
+ // type: string
+ // required: true
+ // responses:
+ // "204":
+ // "$ref": "#/responses/empty"
+ // "400":
+ // "$ref": "#/responses/error"
+ // "404":
+ // "$ref": "#/responses/notFound"
+ syncForkBranch(ctx, ctx.Params("branch"))
+}
diff --git a/routers/api/v1/repo/transfer.go b/routers/api/v1/repo/transfer.go
index 3b6cb4d3f2..72cfeaf902 100644
--- a/routers/api/v1/repo/transfer.go
+++ b/routers/api/v1/repo/transfer.go
@@ -238,7 +238,7 @@ func acceptOrRejectRepoTransfer(ctx *context.APIContext, accept bool) error {
if !repoTransfer.CanUserAcceptTransfer(ctx, ctx.Doer) {
ctx.Error(http.StatusForbidden, "CanUserAcceptTransfer", nil)
- return fmt.Errorf("user does not have permissions to do this")
+ return errors.New("user does not have permissions to do this")
}
if accept {
diff --git a/routers/api/v1/repo/wiki.go b/routers/api/v1/repo/wiki.go
index bb4cf0f211..7b6a00408a 100644
--- a/routers/api/v1/repo/wiki.go
+++ b/routers/api/v1/repo/wiki.go
@@ -5,6 +5,7 @@ package repo
import (
"encoding/base64"
+ "errors"
"fmt"
"net/http"
"net/url"
@@ -506,11 +507,8 @@ func findWikiRepoCommit(ctx *context.APIContext) (*git.Repository, *git.Commit)
// given tree entry, encoded with base64. Writes to ctx if an error occurs.
func wikiContentsByEntry(ctx *context.APIContext, entry *git.TreeEntry) string {
blob := entry.Blob()
- if blob.Size() > setting.API.DefaultMaxBlobSize {
- return ""
- }
- content, err := blob.GetBlobContentBase64()
- if err != nil {
+ content, err := blob.GetContentBase64(setting.API.DefaultMaxBlobSize)
+ if err != nil && !errors.As(err, &git.BlobTooLargeError{}) {
ctx.Error(http.StatusInternalServerError, "GetBlobContentBase64", err)
return ""
}
diff --git a/routers/api/v1/swagger/repo.go b/routers/api/v1/swagger/repo.go
index 445e3417fb..cd4832e15f 100644
--- a/routers/api/v1/swagger/repo.go
+++ b/routers/api/v1/swagger/repo.go
@@ -231,11 +231,18 @@ type swaggerGitTreeResponse struct {
Body api.GitTreeResponse `json:"body"`
}
-// GitBlobResponse
-// swagger:response GitBlobResponse
-type swaggerGitBlobResponse struct {
+// GitBlob
+// swagger:response GitBlob
+type swaggerGitBlob struct {
// in: body
- Body api.GitBlobResponse `json:"body"`
+ Body api.GitBlob `json:"body"`
+}
+
+// GitBlobList
+// swagger:response GitBlobList
+type swaggerGitBlobList struct {
+ // in: body
+ Body []api.GitBlob `json:"body"`
}
// Commit
@@ -448,3 +455,24 @@ type swaggerCompare struct {
// in:body
Body api.Compare `json:"body"`
}
+
+// SyncForkInfo
+// swagger:response SyncForkInfo
+type swaggerSyncForkInfo struct {
+ // in:body
+ Body []api.SyncForkInfo `json:"body"`
+}
+
+// ActionRunList
+// swagger:response ActionRunList
+type swaggerActionRunList struct {
+ // in:body
+ Body api.ListActionRunResponse `json:"body"`
+}
+
+// ActionRun
+// swagger:response ActionRun
+type swaggerActionRun struct {
+ // in:body
+ Body api.ActionRun `json:"body"`
+}
diff --git a/routers/api/v1/user/email.go b/routers/api/v1/user/email.go
index 7b18ea97b0..03d8d14b90 100644
--- a/routers/api/v1/user/email.go
+++ b/routers/api/v1/user/email.go
@@ -75,14 +75,11 @@ func AddEmail(ctx *context.APIContext) {
if err := user_service.AddEmailAddresses(ctx, ctx.Doer, form.Emails); err != nil {
if user_model.IsErrEmailAlreadyUsed(err) {
ctx.Error(http.StatusUnprocessableEntity, "", "Email address has been used: "+err.(user_model.ErrEmailAlreadyUsed).Email)
- } else if validation.IsErrEmailCharIsNotSupported(err) || validation.IsErrEmailInvalid(err) {
+ } else if validation.IsErrEmailInvalid(err) {
email := ""
if typedError, ok := err.(validation.ErrEmailInvalid); ok {
email = typedError.Email
}
- if typedError, ok := err.(validation.ErrEmailCharIsNotSupported); ok {
- email = typedError.Email
- }
errMsg := fmt.Sprintf("Email address %q invalid", email)
ctx.Error(http.StatusUnprocessableEntity, "", errMsg)
diff --git a/routers/api/v1/user/gpg_key.go b/routers/api/v1/user/gpg_key.go
index 1581358b66..886e33b205 100644
--- a/routers/api/v1/user/gpg_key.go
+++ b/routers/api/v1/user/gpg_key.go
@@ -4,6 +4,7 @@
package user
import (
+ "errors"
"fmt"
"net/http"
"strings"
@@ -143,7 +144,7 @@ func GetGPGKey(ctx *context.APIContext) {
// CreateUserGPGKey creates new GPG key to given user by ID.
func CreateUserGPGKey(ctx *context.APIContext, form api.CreateGPGKeyOption, uid int64) {
if user_model.IsFeatureDisabledWithLoginType(ctx.Doer, setting.UserFeatureManageGPGKeys) {
- ctx.NotFound("Not Found", fmt.Errorf("gpg keys setting is not allowed to be visited"))
+ ctx.NotFound("Not Found", errors.New("gpg keys setting is not allowed to be visited"))
return
}
@@ -298,7 +299,7 @@ func DeleteGPGKey(ctx *context.APIContext) {
// "$ref": "#/responses/notFound"
if user_model.IsFeatureDisabledWithLoginType(ctx.Doer, setting.UserFeatureManageGPGKeys) {
- ctx.NotFound("Not Found", fmt.Errorf("gpg keys setting is not allowed to be visited"))
+ ctx.NotFound("Not Found", errors.New("gpg keys setting is not allowed to be visited"))
return
}
diff --git a/routers/api/v1/user/key.go b/routers/api/v1/user/key.go
index 3aecf5fc3a..d8b5dfdfe9 100644
--- a/routers/api/v1/user/key.go
+++ b/routers/api/v1/user/key.go
@@ -5,7 +5,7 @@ package user
import (
std_ctx "context"
- "fmt"
+ "errors"
"net/http"
asymkey_model "forgejo.org/models/asymkey"
@@ -24,9 +24,10 @@ import (
// appendPrivateInformation appends the owner and key type information to api.PublicKey
func appendPrivateInformation(ctx std_ctx.Context, apiKey *api.PublicKey, key *asymkey_model.PublicKey, defaultUser *user_model.User) (*api.PublicKey, error) {
- if key.Type == asymkey_model.KeyTypeDeploy {
+ switch key.Type {
+ case asymkey_model.KeyTypeDeploy:
apiKey.KeyType = "deploy"
- } else if key.Type == asymkey_model.KeyTypeUser {
+ case asymkey_model.KeyTypeUser:
apiKey.KeyType = "user"
if defaultUser.ID == key.OwnerID {
@@ -38,7 +39,7 @@ func appendPrivateInformation(ctx std_ctx.Context, apiKey *api.PublicKey, key *a
}
apiKey.Owner = convert.ToUser(ctx, user, user)
}
- } else {
+ default:
apiKey.KeyType = "unknown"
}
apiKey.ReadOnly = key.Mode == perm.AccessModeRead
@@ -208,7 +209,7 @@ func GetPublicKey(ctx *context.APIContext) {
// CreateUserPublicKey creates new public key to given user by ID.
func CreateUserPublicKey(ctx *context.APIContext, form api.CreateKeyOption, uid int64) {
if user_model.IsFeatureDisabledWithLoginType(ctx.Doer, setting.UserFeatureManageSSHKeys) {
- ctx.NotFound("Not Found", fmt.Errorf("ssh keys setting is not allowed to be visited"))
+ ctx.NotFound("Not Found", errors.New("ssh keys setting is not allowed to be visited"))
return
}
@@ -284,7 +285,7 @@ func DeletePublicKey(ctx *context.APIContext) {
// "$ref": "#/responses/notFound"
if user_model.IsFeatureDisabledWithLoginType(ctx.Doer, setting.UserFeatureManageSSHKeys) {
- ctx.NotFound("Not Found", fmt.Errorf("ssh keys setting is not allowed to be visited"))
+ ctx.NotFound("Not Found", errors.New("ssh keys setting is not allowed to be visited"))
return
}
diff --git a/routers/api/v1/user/repo.go b/routers/api/v1/user/repo.go
index 7b326812a7..94dd3931e4 100644
--- a/routers/api/v1/user/repo.go
+++ b/routers/api/v1/user/repo.go
@@ -155,7 +155,7 @@ func ListMyRepos(ctx *context.APIContext) {
results[i] = convert.ToRepo(ctx, repo, permission)
}
- ctx.SetLinkHeader(int(count), opts.ListOptions.PageSize)
+ ctx.SetLinkHeader(int(count), opts.PageSize)
ctx.SetTotalCountHeader(count)
ctx.JSON(http.StatusOK, &results)
}
diff --git a/routers/api/v1/utils/git.go b/routers/api/v1/utils/git.go
index 5359a54899..65a8994405 100644
--- a/routers/api/v1/utils/git.go
+++ b/routers/api/v1/utils/git.go
@@ -5,6 +5,7 @@ package utils
import (
gocontext "context"
+ "errors"
"fmt"
"net/http"
@@ -50,7 +51,7 @@ func ResolveRefOrSha(ctx *context.APIContext, ref string) string {
// GetGitRefs return git references based on filter
func GetGitRefs(ctx *context.APIContext, filter string) ([]*git.Reference, string, error) {
if ctx.Repo.GitRepo == nil {
- return nil, "", fmt.Errorf("no open git repo found in context")
+ return nil, "", errors.New("no open git repo found in context")
}
if len(filter) > 0 {
filter = "refs/" + filter
diff --git a/routers/api/v1/utils/hook.go b/routers/api/v1/utils/hook.go
index d882845008..fc4b3293ac 100644
--- a/routers/api/v1/utils/hook.go
+++ b/routers/api/v1/utils/hook.go
@@ -16,6 +16,7 @@ import (
"forgejo.org/modules/setting"
api "forgejo.org/modules/structs"
"forgejo.org/modules/util"
+ "forgejo.org/modules/validation"
webhook_module "forgejo.org/modules/webhook"
"forgejo.org/services/context"
webhook_service "forgejo.org/services/webhook"
@@ -93,6 +94,10 @@ func checkCreateHookOption(ctx *context.APIContext, form *api.CreateHookOption)
ctx.Error(http.StatusUnprocessableEntity, "", "Invalid content type")
return false
}
+ if !validation.IsValidURL(form.Config["url"]) {
+ ctx.Error(http.StatusUnprocessableEntity, "", "Invalid url")
+ return false
+ }
return true
}
@@ -322,6 +327,10 @@ func EditRepoHook(ctx *context.APIContext, form *api.EditHookOption, hookID int6
func editHook(ctx *context.APIContext, form *api.EditHookOption, w *webhook.Webhook) bool {
if form.Config != nil {
if url, ok := form.Config["url"]; ok {
+ if !validation.IsValidURL(url) {
+ ctx.Error(http.StatusUnprocessableEntity, "", "Invalid url")
+ return false
+ }
w.URL = url
}
if ct, ok := form.Config["content_type"]; ok {
diff --git a/routers/api/v1/utils/hook_test.go b/routers/api/v1/utils/hook_test.go
new file mode 100644
index 0000000000..3d0e6db079
--- /dev/null
+++ b/routers/api/v1/utils/hook_test.go
@@ -0,0 +1,86 @@
+// Copyright 2025 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package utils
+
+import (
+ "net/http"
+ "testing"
+
+ "forgejo.org/models/unittest"
+ "forgejo.org/modules/structs"
+ "forgejo.org/services/contexttest"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestTestHookValidation(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+
+ t.Run("Test Validation", func(t *testing.T) {
+ ctx, _ := contexttest.MockAPIContext(t, "user2/repo1/hooks")
+ contexttest.LoadRepo(t, ctx, 1)
+ contexttest.LoadGitRepo(t, ctx)
+ contexttest.LoadRepoCommit(t, ctx)
+ contexttest.LoadUser(t, ctx, 2)
+
+ checkCreateHookOption(ctx, &structs.CreateHookOption{
+ Type: "gitea",
+ Config: map[string]string{
+ "content_type": "json",
+ "url": "https://example.com/webhook",
+ },
+ })
+ assert.Equal(t, 0, ctx.Resp.WrittenStatus()) // not written yet
+ })
+
+ t.Run("Test Validation with invalid URL", func(t *testing.T) {
+ ctx, _ := contexttest.MockAPIContext(t, "user2/repo1/hooks")
+ contexttest.LoadRepo(t, ctx, 1)
+ contexttest.LoadGitRepo(t, ctx)
+ contexttest.LoadRepoCommit(t, ctx)
+ contexttest.LoadUser(t, ctx, 2)
+
+ checkCreateHookOption(ctx, &structs.CreateHookOption{
+ Type: "gitea",
+ Config: map[string]string{
+ "content_type": "json",
+ "url": "example.com/webhook",
+ },
+ })
+ assert.Equal(t, http.StatusUnprocessableEntity, ctx.Resp.WrittenStatus())
+ })
+
+ t.Run("Test Validation with invalid webhook type", func(t *testing.T) {
+ ctx, _ := contexttest.MockAPIContext(t, "user2/repo1/hooks")
+ contexttest.LoadRepo(t, ctx, 1)
+ contexttest.LoadGitRepo(t, ctx)
+ contexttest.LoadRepoCommit(t, ctx)
+ contexttest.LoadUser(t, ctx, 2)
+
+ checkCreateHookOption(ctx, &structs.CreateHookOption{
+ Type: "unknown",
+ Config: map[string]string{
+ "content_type": "json",
+ "url": "example.com/webhook",
+ },
+ })
+ assert.Equal(t, http.StatusUnprocessableEntity, ctx.Resp.WrittenStatus())
+ })
+
+ t.Run("Test Validation with empty content type", func(t *testing.T) {
+ ctx, _ := contexttest.MockAPIContext(t, "user2/repo1/hooks")
+ contexttest.LoadRepo(t, ctx, 1)
+ contexttest.LoadGitRepo(t, ctx)
+ contexttest.LoadRepoCommit(t, ctx)
+ contexttest.LoadUser(t, ctx, 2)
+
+ checkCreateHookOption(ctx, &structs.CreateHookOption{
+ Type: "unknown",
+ Config: map[string]string{
+ "url": "https://example.com/webhook",
+ },
+ })
+ assert.Equal(t, http.StatusUnprocessableEntity, ctx.Resp.WrittenStatus())
+ })
+}
diff --git a/routers/api/v1/utils/main_test.go b/routers/api/v1/utils/main_test.go
new file mode 100644
index 0000000000..f243572436
--- /dev/null
+++ b/routers/api/v1/utils/main_test.go
@@ -0,0 +1,21 @@
+// Copyright 2018 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package utils
+
+import (
+ "testing"
+
+ "forgejo.org/models/unittest"
+ "forgejo.org/modules/setting"
+ webhook_service "forgejo.org/services/webhook"
+)
+
+func TestMain(m *testing.M) {
+ unittest.MainTest(m, &unittest.TestOptions{
+ SetUp: func() error {
+ setting.LoadQueueSettings()
+ return webhook_service.Init()
+ },
+ })
+}
diff --git a/routers/common/db.go b/routers/common/db.go
index 0646071264..ec31ced1bf 100644
--- a/routers/common/db.go
+++ b/routers/common/db.go
@@ -5,7 +5,7 @@ package common
import (
"context"
- "fmt"
+ "errors"
"time"
"forgejo.org/models/db"
@@ -24,11 +24,11 @@ func InitDBEngine(ctx context.Context) (err error) {
for i := 0; i < setting.Database.DBConnectRetries; i++ {
select {
case <-ctx.Done():
- return fmt.Errorf("Aborted due to shutdown:\nin retry ORM engine initialization")
+ return errors.New("Aborted due to shutdown:\nin retry ORM engine initialization")
default:
}
log.Info("ORM engine initialization attempt #%d/%d...", i+1, setting.Database.DBConnectRetries)
- if err = db.InitEngineWithMigration(ctx, migrateWithSetting); err == nil {
+ if err = db.InitEngineWithMigration(ctx, func(eng db.Engine) error { return migrateWithSetting(eng.(*xorm.Engine)) }); err == nil {
break
} else if i == setting.Database.DBConnectRetries-1 {
return err
diff --git a/routers/install/install.go b/routers/install/install.go
index b9333a9e16..f64f395a7f 100644
--- a/routers/install/install.go
+++ b/routers/install/install.go
@@ -361,7 +361,8 @@ func SubmitInstall(ctx *context.Context) {
}
// Init the engine with migration
- if err = db.InitEngineWithMigration(ctx, migrations.Migrate); err != nil {
+ // Wrap migrations.Migrate into a function of type func(db.Engine) error to fix diagnostics.
+ if err = db.InitEngineWithMigration(ctx, migrations.WrapperMigrate); err != nil {
db.UnsetDefaultEngine()
ctx.Data["Err_DbSetting"] = true
ctx.RenderWithErr(ctx.Tr("install.invalid_db_setting", err), tplInstall, &form)
@@ -407,11 +408,7 @@ func SubmitInstall(ctx *context.Context) {
if form.LFSRootPath != "" {
cfg.Section("server").Key("LFS_START_SERVER").SetValue("true")
cfg.Section("lfs").Key("PATH").SetValue(form.LFSRootPath)
- var lfsJwtSecret string
- if _, lfsJwtSecret, err = generate.NewJwtSecret(); err != nil {
- ctx.RenderWithErr(ctx.Tr("install.lfs_jwt_secret_failed", err), tplInstall, &form)
- return
- }
+ _, lfsJwtSecret := generate.NewJwtSecret()
cfg.Section("server").Key("LFS_JWT_SECRET").SetValue(lfsJwtSecret)
} else {
cfg.Section("server").Key("LFS_START_SERVER").SetValue("false")
@@ -482,11 +479,7 @@ func SubmitInstall(ctx *context.Context) {
// FIXME: at the moment, no matter oauth2 is enabled or not, it must generate a "oauth2 JWT_SECRET"
// see the "loadOAuth2From" in "setting/oauth2.go"
if !cfg.Section("oauth2").HasKey("JWT_SECRET") && !cfg.Section("oauth2").HasKey("JWT_SECRET_URI") {
- _, jwtSecretBase64, err := generate.NewJwtSecret()
- if err != nil {
- ctx.RenderWithErr(ctx.Tr("install.secret_key_failed", err), tplInstall, &form)
- return
- }
+ _, jwtSecretBase64 := generate.NewJwtSecret()
cfg.Section("oauth2").Key("JWT_SECRET").SetValue(jwtSecretBase64)
}
@@ -587,7 +580,7 @@ func SubmitInstall(ctx *context.Context) {
go func() {
// Sleep for a while to make sure the user's browser has loaded the post-install page and its assets (images, css, js)
- // What if this duration is not long enough? That's impossible -- if the user can't load the simple page in time, how could they install or use Gitea in the future ....
+ // What if this duration is not long enough? That's impossible -- if the user can't load the simple page in time, how could they install or use Forgejo in the future ....
time.Sleep(3 * time.Second)
// Now get the http.Server from this request and shut it down
diff --git a/routers/install/routes_test.go b/routers/install/routes_test.go
index a504cf1baa..9b10f05b3b 100644
--- a/routers/install/routes_test.go
+++ b/routers/install/routes_test.go
@@ -19,18 +19,18 @@ func TestRoutes(t *testing.T) {
w := httptest.NewRecorder()
req := httptest.NewRequest("GET", "/", nil)
r.ServeHTTP(w, req)
- assert.EqualValues(t, 200, w.Code)
+ assert.Equal(t, 200, w.Code)
assert.Contains(t, w.Body.String(), `class="page-content install"`)
w = httptest.NewRecorder()
req = httptest.NewRequest("GET", "/no-such", nil)
r.ServeHTTP(w, req)
- assert.EqualValues(t, 404, w.Code)
+ assert.Equal(t, 404, w.Code)
w = httptest.NewRecorder()
- req = httptest.NewRequest("GET", "/assets/img/gitea.svg", nil)
+ req = httptest.NewRequest("GET", "/assets/img/forgejo.svg", nil)
r.ServeHTTP(w, req)
- assert.EqualValues(t, 200, w.Code)
+ assert.Equal(t, 200, w.Code)
}
func TestMain(m *testing.M) {
diff --git a/routers/private/hook_post_receive.go b/routers/private/hook_post_receive.go
index c7748b01c8..a856a7a00a 100644
--- a/routers/private/hook_post_receive.go
+++ b/routers/private/hook_post_receive.go
@@ -205,7 +205,7 @@ func HookPostReceive(ctx *gitea_context.PrivateContext) {
// post update for agit pull request
// FIXME: use pr.Flow to test whether it's an Agit PR or a GH PR
- if git.SupportProcReceive && refFullName.IsPull() {
+ if refFullName.IsPull() {
if repo == nil {
repo = loadRepository(ctx, ownerName, repoName)
if ctx.Written() {
diff --git a/routers/private/hook_post_receive_test.go b/routers/private/hook_post_receive_test.go
index bbd0c45769..dde4ec08f4 100644
--- a/routers/private/hook_post_receive_test.go
+++ b/routers/private/hook_post_receive_test.go
@@ -44,7 +44,7 @@ func TestHandlePullRequestMerging(t *testing.T) {
pr, err = issues_model.GetPullRequestByID(db.DefaultContext, pr.ID)
require.NoError(t, err)
assert.True(t, pr.HasMerged)
- assert.EqualValues(t, "01234567", pr.MergedCommitID)
+ assert.Equal(t, "01234567", pr.MergedCommitID)
unittest.AssertNotExistsBean(t, &pull_model.AutoMerge{ID: autoMerge.ID})
}
diff --git a/routers/private/hook_pre_receive.go b/routers/private/hook_pre_receive.go
index 191273209e..45992e8522 100644
--- a/routers/private/hook_pre_receive.go
+++ b/routers/private/hook_pre_receive.go
@@ -155,7 +155,7 @@ func (ctx *preReceiveContext) checkQuota() error {
return nil
}
- ok, err := quota_model.EvaluateForUser(ctx, ctx.PrivateContext.Repo.Repository.OwnerID, quota_model.LimitSubjectSizeReposAll)
+ ok, err := quota_model.EvaluateForUser(ctx, ctx.Repo.Repository.OwnerID, quota_model.LimitSubjectSizeReposAll)
if err != nil {
log.Error("quota_model.EvaluateForUser: %v", err)
ctx.JSON(http.StatusInternalServerError, private.Response{
@@ -205,7 +205,7 @@ func HookPreReceive(ctx *gitea_context.PrivateContext) {
preReceiveBranch(ourCtx, oldCommitID, newCommitID, refFullName)
case refFullName.IsTag():
preReceiveTag(ourCtx, oldCommitID, newCommitID, refFullName)
- case git.SupportProcReceive && refFullName.IsFor():
+ case refFullName.IsFor():
preReceiveFor(ourCtx, oldCommitID, newCommitID, refFullName)
default:
if ourCtx.isOverQuota {
@@ -531,10 +531,7 @@ func preReceiveFor(ctx *preReceiveContext, oldCommitID, newCommitID string, refF
baseBranchName := refFullName.ForBranchName()
- baseBranchExist := false
- if ctx.Repo.GitRepo.IsBranchExist(baseBranchName) {
- baseBranchExist = true
- }
+ baseBranchExist := ctx.Repo.GitRepo.IsBranchExist(baseBranchName)
if !baseBranchExist {
for p, v := range baseBranchName {
diff --git a/routers/private/hook_proc_receive.go b/routers/private/hook_proc_receive.go
index cd45794261..9f6e23f158 100644
--- a/routers/private/hook_proc_receive.go
+++ b/routers/private/hook_proc_receive.go
@@ -7,7 +7,6 @@ import (
"net/http"
repo_model "forgejo.org/models/repo"
- "forgejo.org/modules/git"
"forgejo.org/modules/log"
"forgejo.org/modules/private"
"forgejo.org/modules/web"
@@ -18,10 +17,6 @@ import (
// HookProcReceive proc-receive hook - only handles agit Proc-Receive requests at present
func HookProcReceive(ctx *gitea_context.PrivateContext) {
opts := web.GetForm(ctx).(*private.HookOptions)
- if !git.SupportProcReceive {
- ctx.Status(http.StatusNotFound)
- return
- }
results, err := agit.ProcReceive(ctx, ctx.Repo.Repository, ctx.Repo.GitRepo, opts)
if err != nil {
diff --git a/routers/private/manager_process.go b/routers/private/manager_process.go
index 87447da2be..e60ed04879 100644
--- a/routers/private/manager_process.go
+++ b/routers/private/manager_process.go
@@ -122,7 +122,7 @@ func writeProcess(out io.Writer, process *process_module.Process, indent string,
if stack.Count > 1 {
_, _ = fmt.Fprintf(sb, "* %d", stack.Count)
}
- _, _ = fmt.Fprintf(sb, "\n")
+ _, _ = fmt.Fprintln(sb)
indent += "| "
if len(stack.Labels) > 0 {
_, _ = fmt.Fprintf(sb, "%sLabels: %q:%q", indent, stack.Labels[0].Name, stack.Labels[0].Value)
@@ -132,7 +132,7 @@ func writeProcess(out io.Writer, process *process_module.Process, indent string,
_, _ = fmt.Fprintf(sb, ", %q:%q", label.Name, label.Value)
}
}
- _, _ = fmt.Fprintf(sb, "\n")
+ _, _ = fmt.Fprintln(sb)
}
_, _ = fmt.Fprintf(sb, "%sStack:\n", indent)
indent += " "
diff --git a/routers/private/serv.go b/routers/private/serv.go
index 4c5b7bbccb..a4029e354c 100644
--- a/routers/private/serv.go
+++ b/routers/private/serv.go
@@ -14,7 +14,6 @@ import (
repo_model "forgejo.org/models/repo"
"forgejo.org/models/unit"
user_model "forgejo.org/models/user"
- "forgejo.org/modules/git"
"forgejo.org/modules/log"
"forgejo.org/modules/private"
"forgejo.org/modules/setting"
@@ -303,7 +302,7 @@ func ServCommand(ctx *context.PrivateContext) {
// the permission check to read. The pre-receive hook will do another
// permission check which ensure for non AGit flow references the write
// permission is checked.
- if git.SupportProcReceive && unitType == unit.TypeCode && ctx.FormString("verb") == "git-receive-pack" {
+ if unitType == unit.TypeCode && ctx.FormString("verb") == "git-receive-pack" {
mode = perm.AccessModeRead
}
diff --git a/routers/web/admin/admin_test.go b/routers/web/admin/admin_test.go
index d0c3c2b56f..0bad4402aa 100644
--- a/routers/web/admin/admin_test.go
+++ b/routers/web/admin/admin_test.go
@@ -69,7 +69,7 @@ func TestShadowPassword(t *testing.T) {
}
for _, k := range kases {
- assert.EqualValues(t, k.Result, shadowPassword(k.Provider, k.CfgItem))
+ assert.Equal(t, k.Result, shadowPassword(k.Provider, k.CfgItem))
}
}
diff --git a/routers/web/admin/config.go b/routers/web/admin/config.go
index f99a193960..dcc99ff1a8 100644
--- a/routers/web/admin/config.go
+++ b/routers/web/admin/config.go
@@ -1,5 +1,6 @@
// Copyright 2014 The Gogs Authors. All rights reserved.
// Copyright 2019 The Gitea Authors. All rights reserved.
+// Copyright 2025 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package admin
@@ -145,6 +146,7 @@ func Config(ctx *context.Context) {
ctx.Data["Service"] = setting.Service
ctx.Data["DbCfg"] = setting.Database
ctx.Data["Webhook"] = setting.Webhook
+ ctx.Data["Moderation"] = setting.Moderation
ctx.Data["MailerEnabled"] = false
if setting.MailService != nil {
diff --git a/routers/web/admin/users.go b/routers/web/admin/users.go
index f6d214d24c..964326291e 100644
--- a/routers/web/admin/users.go
+++ b/routers/web/admin/users.go
@@ -22,7 +22,6 @@ import (
"forgejo.org/modules/log"
"forgejo.org/modules/optional"
"forgejo.org/modules/setting"
- "forgejo.org/modules/util"
"forgejo.org/modules/validation"
"forgejo.org/modules/web"
"forgejo.org/routers/web/explore"
@@ -77,11 +76,11 @@ func Users(ctx *context.Context) {
PageSize: setting.UI.Admin.UserPagingNum,
},
SearchByEmail: true,
- IsActive: util.OptionalBoolParse(statusFilterMap["is_active"]),
- IsAdmin: util.OptionalBoolParse(statusFilterMap["is_admin"]),
- IsRestricted: util.OptionalBoolParse(statusFilterMap["is_restricted"]),
- IsTwoFactorEnabled: util.OptionalBoolParse(statusFilterMap["is_2fa_enabled"]),
- IsProhibitLogin: util.OptionalBoolParse(statusFilterMap["is_prohibit_login"]),
+ IsActive: optional.ParseBool(statusFilterMap["is_active"]),
+ IsAdmin: optional.ParseBool(statusFilterMap["is_admin"]),
+ IsRestricted: optional.ParseBool(statusFilterMap["is_restricted"]),
+ IsTwoFactorEnabled: optional.ParseBool(statusFilterMap["is_2fa_enabled"]),
+ IsProhibitLogin: optional.ParseBool(statusFilterMap["is_prohibit_login"]),
IncludeReserved: true, // administrator needs to list all accounts include reserved, bot, remote ones
Load2FAStatus: true,
ExtraParamStrings: extraParamStrings,
@@ -187,7 +186,7 @@ func NewUserPost(ctx *context.Context) {
case user_model.IsErrEmailAlreadyUsed(err):
ctx.Data["Err_Email"] = true
ctx.RenderWithErr(ctx.Tr("form.email_been_used"), tplUserNew, &form)
- case validation.IsErrEmailInvalid(err), validation.IsErrEmailCharIsNotSupported(err):
+ case validation.IsErrEmailInvalid(err):
ctx.Data["Err_Email"] = true
ctx.RenderWithErr(ctx.Tr("form.email_invalid"), tplUserNew, &form)
case db.IsErrNameReserved(err):
@@ -196,9 +195,6 @@ func NewUserPost(ctx *context.Context) {
case db.IsErrNamePatternNotAllowed(err):
ctx.Data["Err_UserName"] = true
ctx.RenderWithErr(ctx.Tr("user.form.name_pattern_not_allowed", err.(db.ErrNamePatternNotAllowed).Pattern), tplUserNew, &form)
- case db.IsErrNameCharsNotAllowed(err):
- ctx.Data["Err_UserName"] = true
- ctx.RenderWithErr(ctx.Tr("user.form.name_chars_not_allowed", err.(db.ErrNameCharsNotAllowed).Name), tplUserNew, &form)
default:
ctx.ServerError("CreateUser", err)
}
@@ -249,17 +245,12 @@ func prepareUserInfo(ctx *context.Context) *user_model.User {
}
ctx.Data["Sources"] = sources
- hasTOTP, err := auth.HasTOTPByUID(ctx, u.ID)
+ hasTwoFactor, err := auth.HasTwoFactorByUID(ctx, u.ID)
if err != nil {
- ctx.ServerError("auth.HasTwoFactorByUID", err)
+ ctx.ServerError("HasTwoFactorByUID", err)
return nil
}
- hasWebAuthn, err := auth.HasWebAuthnRegistrationsByUID(ctx, u.ID)
- if err != nil {
- ctx.ServerError("auth.HasWebAuthnRegistrationsByUID", err)
- return nil
- }
- ctx.Data["TwoFactorEnabled"] = hasTOTP || hasWebAuthn
+ ctx.Data["TwoFactorEnabled"] = hasTwoFactor
return u
}
@@ -322,6 +313,9 @@ func editUserCommon(ctx *context.Context) {
ctx.Data["DisableMigrations"] = setting.Repository.DisableMigrations
ctx.Data["AllowedUserVisibilityModes"] = setting.Service.AllowedUserVisibilityModesSlice.ToVisibleTypeSlice()
ctx.Data["DisableGravatar"] = setting.Config().Picture.DisableGravatar.Value(ctx)
+ ctx.Data["MaxAvatarFileSize"] = setting.Avatar.MaxFileSize
+ ctx.Data["MaxAvatarWidth"] = setting.Avatar.MaxWidth
+ ctx.Data["MaxAvatarHeight"] = setting.Avatar.MaxHeight
}
// EditUser show editing user page
@@ -416,7 +410,7 @@ func EditUserPost(ctx *context.Context) {
if form.Email != "" {
if err := user_service.AdminAddOrSetPrimaryEmailAddress(ctx, u, form.Email); err != nil {
switch {
- case validation.IsErrEmailCharIsNotSupported(err), validation.IsErrEmailInvalid(err):
+ case validation.IsErrEmailInvalid(err):
ctx.Data["Err_Email"] = true
ctx.RenderWithErr(ctx.Tr("form.email_invalid"), tplUserEdit, &form)
case user_model.IsErrEmailAlreadyUsed(err):
diff --git a/routers/web/auth/2fa.go b/routers/web/auth/2fa.go
index 7acf9a87d3..ff769ffd5d 100644
--- a/routers/web/auth/2fa.go
+++ b/routers/web/auth/2fa.go
@@ -133,11 +133,7 @@ func TwoFactorScratchPost(ctx *context.Context) {
// Validate the passcode with the stored TOTP secret.
if twofa.VerifyScratchToken(form.Token) {
// Invalidate the scratch token.
- _, err = twofa.GenerateScratchToken()
- if err != nil {
- ctx.ServerError("UserSignIn", err)
- return
- }
+ twofa.GenerateScratchToken()
if err = auth.UpdateTwoFactor(ctx, twofa); err != nil {
ctx.ServerError("UserSignIn", err)
return
diff --git a/routers/web/auth/auth.go b/routers/web/auth/auth.go
index 64006eeae8..dbb6665398 100644
--- a/routers/web/auth/auth.go
+++ b/routers/web/auth/auth.go
@@ -512,7 +512,8 @@ func createAndHandleCreatedUser(ctx *context.Context, tpl base.TplName, form any
func createUserInContext(ctx *context.Context, tpl base.TplName, form any, u *user_model.User, overwrites *user_model.CreateUserOverwriteOptions, gothUser *goth.User, allowLink bool) (ok bool) {
if err := user_model.CreateUser(ctx, u, overwrites); err != nil {
if allowLink && (user_model.IsErrUserAlreadyExist(err) || user_model.IsErrEmailAlreadyUsed(err)) {
- if setting.OAuth2Client.AccountLinking == setting.OAuth2AccountLinkingAuto {
+ switch setting.OAuth2Client.AccountLinking {
+ case setting.OAuth2AccountLinkingAuto:
var user *user_model.User
user = &user_model.User{Name: u.Name}
hasUser, err := user_model.GetUser(ctx, user)
@@ -528,7 +529,7 @@ func createUserInContext(ctx *context.Context, tpl base.TplName, form any, u *us
// TODO: probably we should respect 'remember' user's choice...
linkAccount(ctx, user, *gothUser, true)
return false // user is already created here, all redirects are handled
- } else if setting.OAuth2Client.AccountLinking == setting.OAuth2AccountLinkingLogin {
+ case setting.OAuth2AccountLinkingLogin:
showLinkingLogin(ctx, *gothUser)
return false // user will be created only after linking login
}
@@ -551,9 +552,6 @@ func createUserInContext(ctx *context.Context, tpl base.TplName, form any, u *us
case user_model.IsErrCooldownPeriod(err):
ctx.Data["Err_UserName"] = true
ctx.RenderWithErr(ctx.Locale.Tr("form.username_claiming_cooldown", err.(user_model.ErrCooldownPeriod).ExpireTime.Format(time.RFC1123Z)), tpl, form)
- case validation.IsErrEmailCharIsNotSupported(err):
- ctx.Data["Err_Email"] = true
- ctx.RenderWithErr(ctx.Tr("form.email_invalid"), tpl, form)
case validation.IsErrEmailInvalid(err):
ctx.Data["Err_Email"] = true
ctx.RenderWithErr(ctx.Tr("form.email_invalid"), tpl, form)
@@ -766,7 +764,7 @@ func ActivatePost(ctx *context.Context) {
ctx.HTML(http.StatusOK, TplActivate)
return
}
- if !user.ValidatePassword(password) {
+ if !user.ValidatePassword(ctx, password) {
ctx.Data["IsPasswordInvalid"] = true
ctx.HTML(http.StatusOK, TplActivate)
return
@@ -783,11 +781,7 @@ func ActivatePost(ctx *context.Context) {
func handleAccountActivation(ctx *context.Context, user *user_model.User) {
user.IsActive = true
- var err error
- if user.Rands, err = user_model.GetUserSalt(); err != nil {
- ctx.ServerError("UpdateUser", err)
- return
- }
+ user.Rands = user_model.GetUserSalt()
if err := user_model.UpdateUserCols(ctx, user, "is_active", "rands"); err != nil {
if user_model.IsErrUserNotExist(err) {
ctx.NotFound("UpdateUserCols", err)
diff --git a/routers/web/auth/linkaccount.go b/routers/web/auth/linkaccount.go
index fbf03ca475..2bba614d8c 100644
--- a/routers/web/auth/linkaccount.go
+++ b/routers/web/auth/linkaccount.go
@@ -157,7 +157,7 @@ func linkAccount(ctx *context.Context, u *user_model.User, gothUser goth.User, r
// We deliberately ignore the skip local 2fa setting here because we are linking to a previous user here
hasTwoFactor, err := auth.HasTwoFactorByUID(ctx, u.ID)
if err != nil {
- ctx.ServerError("UserLinkAccount", err)
+ ctx.ServerError("HasTwoFactorByUID", err)
return
}
diff --git a/routers/web/auth/oauth.go b/routers/web/auth/oauth.go
index aa599bd252..e8e5d2c54b 100644
--- a/routers/web/auth/oauth.go
+++ b/routers/web/auth/oauth.go
@@ -225,7 +225,7 @@ func newAccessTokenResponse(ctx go_context.Context, grant *auth.OAuth2Grant, ser
idToken := &oauth2.OIDCToken{
RegisteredClaims: jwt.RegisteredClaims{
ExpiresAt: jwt.NewNumericDate(expirationDate.AsTime()),
- Issuer: setting.AppURL,
+ Issuer: strings.TrimSuffix(setting.AppURL, "/"),
Audience: []string{app.ClientID},
Subject: fmt.Sprint(grant.UserID),
},
@@ -409,7 +409,7 @@ func IntrospectOAuth(ctx *context.Context) {
if err == nil && app != nil {
response.Active = true
response.Scope = grant.Scope
- response.Issuer = setting.AppURL
+ response.Issuer = strings.TrimSuffix(setting.AppURL, "/")
response.Audience = []string{app.ClientID}
response.Subject = fmt.Sprint(grant.UserID)
}
@@ -669,6 +669,7 @@ func GrantApplicationOAuth(ctx *context.Context) {
// OIDCWellKnown generates JSON so OIDC clients know Gitea's capabilities
func OIDCWellKnown(ctx *context.Context) {
ctx.Data["SigningKey"] = oauth2.DefaultSigningKey
+ ctx.Data["Issuer"] = strings.TrimSuffix(setting.AppURL, "/")
ctx.JSONTemplate("user/auth/oidc_wellknown")
}
diff --git a/routers/web/auth/oauth_test.go b/routers/web/auth/oauth_test.go
index 6275d63382..9782711dd0 100644
--- a/routers/web/auth/oauth_test.go
+++ b/routers/web/auth/oauth_test.go
@@ -51,6 +51,7 @@ func TestNewAccessTokenResponse_OIDCToken(t *testing.T) {
// Scopes: openid
oidcToken := createAndParseToken(t, grants[0])
+ assert.Equal(t, "https://try.gitea.io", oidcToken.RegisteredClaims.Issuer)
assert.Empty(t, oidcToken.Name)
assert.Empty(t, oidcToken.PreferredUsername)
assert.Empty(t, oidcToken.Profile)
@@ -67,11 +68,12 @@ func TestNewAccessTokenResponse_OIDCToken(t *testing.T) {
// Scopes: openid profile email
oidcToken = createAndParseToken(t, grants[0])
+ assert.Equal(t, "https://try.gitea.io", oidcToken.RegisteredClaims.Issuer)
assert.Equal(t, "User Five", oidcToken.Name)
assert.Equal(t, "user5", oidcToken.PreferredUsername)
assert.Equal(t, "https://try.gitea.io/user5", oidcToken.Profile)
assert.Equal(t, "https://try.gitea.io/assets/img/avatar_default.png", oidcToken.Picture)
- assert.Equal(t, "", oidcToken.Website)
+ assert.Empty(t, oidcToken.Website)
assert.Equal(t, timeutil.TimeStamp(0), oidcToken.UpdatedAt)
assert.Equal(t, "user5@example.com", oidcToken.Email)
assert.True(t, oidcToken.EmailVerified)
diff --git a/routers/web/auth/openid.go b/routers/web/auth/openid.go
index b12dea84ea..fcb2155953 100644
--- a/routers/web/auth/openid.go
+++ b/routers/web/auth/openid.go
@@ -4,6 +4,7 @@
package auth
import (
+ "errors"
"fmt"
"net/http"
"net/url"
@@ -55,13 +56,13 @@ func allowedOpenIDURI(uri string) (err error) {
}
}
// must match one of this or be refused
- return fmt.Errorf("URI not allowed by whitelist")
+ return errors.New("URI not allowed by whitelist")
}
// A blacklist match expliclty forbids
for _, pat := range setting.Service.OpenIDBlacklist {
if pat.MatchString(uri) {
- return fmt.Errorf("URI forbidden by blacklist")
+ return errors.New("URI forbidden by blacklist")
}
}
diff --git a/routers/web/auth/password.go b/routers/web/auth/password.go
index 82c2d4e9d3..cb6b22e5b7 100644
--- a/routers/web/auth/password.go
+++ b/routers/web/auth/password.go
@@ -242,12 +242,8 @@ func ResetPasswdPost(ctx *context.Context) {
if regenerateScratchToken {
// Invalidate the scratch token.
- _, err := twofa.GenerateScratchToken()
- if err != nil {
- ctx.ServerError("UserSignIn", err)
- return
- }
- if err = auth.UpdateTwoFactor(ctx, twofa); err != nil {
+ twofa.GenerateScratchToken()
+ if err := auth.UpdateTwoFactor(ctx, twofa); err != nil {
ctx.ServerError("UserSignIn", err)
return
}
diff --git a/routers/web/devtest/devtest.go b/routers/web/devtest/devtest.go
index 37496ca117..9b5804b976 100644
--- a/routers/web/devtest/devtest.go
+++ b/routers/web/devtest/devtest.go
@@ -1,14 +1,18 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
+// Copyright 2025 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package devtest
import (
+ "errors"
"net/http"
"path"
"strings"
"time"
+ "forgejo.org/models/asymkey"
+ "forgejo.org/models/user"
"forgejo.org/modules/base"
"forgejo.org/modules/templates"
"forgejo.org/services/context"
@@ -42,6 +46,17 @@ func FetchActionTest(ctx *context.Context) {
ctx.JSONRedirect("")
}
+func ErrorPage(ctx *context.Context) {
+ if ctx.Params("errcode") == "404" {
+ ctx.NotFound("Example error", errors.New("Example error"))
+ return
+ } else if ctx.Params("errcode") == "413" {
+ ctx.HTML(http.StatusRequestEntityTooLarge, base.TplName("status/413"))
+ return
+ }
+ ctx.ServerError("Example error", errors.New("Example error"))
+}
+
func Tmpl(ctx *context.Context) {
now := time.Now()
ctx.Data["TimeNow"] = now
@@ -52,6 +67,19 @@ func Tmpl(ctx *context.Context) {
ctx.Data["TimePast1y"] = now.Add(-1 * 366 * 86400 * time.Second)
ctx.Data["TimeFuture1y"] = now.Add(1 * 366 * 86400 * time.Second)
+ userNonZero := &user.User{ID: 1}
+ ctx.Data["TrustedVerif"] = &asymkey.ObjectVerification{Verified: true, Reason: asymkey.NotSigned, SigningUser: userNonZero, TrustStatus: "trusted"}
+ ctx.Data["UntrustedVerif"] = &asymkey.ObjectVerification{Verified: true, Reason: asymkey.NotSigned, SigningUser: userNonZero, TrustStatus: "untrusted"}
+ ctx.Data["UnmatchedVerif"] = &asymkey.ObjectVerification{Verified: true, Reason: asymkey.NotSigned, SigningUser: userNonZero, TrustStatus: ""}
+ ctx.Data["WarnVerif"] = &asymkey.ObjectVerification{Verified: false, Warning: true, Reason: asymkey.NotSigned, SigningUser: userNonZero}
+ ctx.Data["UnknownVerif"] = &asymkey.ObjectVerification{Verified: false, Warning: false, Reason: asymkey.NotSigned, SigningUser: userNonZero}
+ userUnknown := &user.User{ID: 0}
+ ctx.Data["TrustedVerifUnk"] = &asymkey.ObjectVerification{Verified: true, Reason: asymkey.NotSigned, SigningUser: userUnknown, TrustStatus: "trusted"}
+ ctx.Data["UntrustedVerifUnk"] = &asymkey.ObjectVerification{Verified: true, Reason: asymkey.NotSigned, SigningUser: userUnknown, TrustStatus: "untrusted"}
+ ctx.Data["UnmatchedVerifUnk"] = &asymkey.ObjectVerification{Verified: true, Reason: asymkey.NotSigned, SigningUser: userUnknown, TrustStatus: ""}
+ ctx.Data["WarnVerifUnk"] = &asymkey.ObjectVerification{Verified: false, Warning: true, Reason: asymkey.NotSigned, SigningUser: userUnknown}
+ ctx.Data["UnknownVerifUnk"] = &asymkey.ObjectVerification{Verified: false, Warning: false, Reason: asymkey.NotSigned, SigningUser: userUnknown}
+
if ctx.Req.Method == "POST" {
_ = ctx.Req.ParseForm()
ctx.Flash.Info("form: "+ctx.Req.Method+" "+ctx.Req.RequestURI+"
"+
diff --git a/routers/web/explore/user.go b/routers/web/explore/user.go
index e349bb1e92..3d4dbcd104 100644
--- a/routers/web/explore/user.go
+++ b/routers/web/explore/user.go
@@ -20,7 +20,7 @@ import (
)
const (
- // tplExploreUsers explore users page template
+ // `tplExploreUsers` explore users page template.
tplExploreUsers base.TplName = "explore/users"
)
@@ -30,9 +30,9 @@ func isKeywordValid(keyword string) bool {
return !bytes.Contains([]byte(keyword), nullByte)
}
-// RenderUserSearch render user search page
+// `RenderUserSearch` render user search page.
func RenderUserSearch(ctx *context.Context, opts *user_model.SearchUserOptions, tplName base.TplName) {
- // Sitemap index for sitemap paths
+ // Sitemap index for sitemap paths.
opts.Page = int(ctx.ParamsInt64("idx"))
isSitemap := ctx.Params("idx") != ""
if opts.Page <= 1 {
@@ -47,42 +47,24 @@ func RenderUserSearch(ctx *context.Context, opts *user_model.SearchUserOptions,
}
var (
- users []*user_model.User
- count int64
- err error
- orderBy db.SearchOrderBy
+ users []*user_model.User
+ count int64
+ err error
)
- // we can not set orderBy to `models.SearchOrderByXxx`, because there may be a JOIN in the statement, different tables may have the same name columns
-
sortOrder := ctx.FormString("sort")
if sortOrder == "" {
sortOrder = setting.UI.ExploreDefaultSort
}
ctx.Data["SortType"] = sortOrder
- switch sortOrder {
- case "newest":
- orderBy = "`user`.id DESC"
- case "oldest":
- orderBy = "`user`.id ASC"
- case "leastupdate":
- orderBy = "`user`.updated_unix ASC"
- case "reversealphabetically":
- orderBy = "`user`.name DESC"
- case "lastlogin":
- orderBy = "`user`.last_login_unix ASC"
- case "reverselastlogin":
- orderBy = "`user`.last_login_unix DESC"
- case "alphabetically":
- orderBy = "`user`.name ASC"
- case "recentupdate":
- fallthrough
- default:
- // in case the sortType is not valid, we set it to recentupdate
+ orderBy := MapSortOrder(sortOrder)
+
+ if orderBy == "" {
+ // In case the `sortType` is not valid, we set it to `recentupdate`.
sortOrder = "recentupdate"
ctx.Data["SortType"] = "recentupdate"
- orderBy = "`user`.updated_unix DESC"
+ orderBy = MapSortOrder(sortOrder)
}
if opts.SupportedSortOrders != nil && !opts.SupportedSortOrders.Contains(sortOrder) {
@@ -130,7 +112,7 @@ func RenderUserSearch(ctx *context.Context, opts *user_model.SearchUserOptions,
ctx.HTML(http.StatusOK, tplName)
}
-// Users render explore users page
+// Users render explore users page.
func Users(ctx *context.Context) {
if setting.Service.Explore.DisableUsersPage {
ctx.Redirect(setting.AppSubURL + "/explore")
@@ -169,3 +151,37 @@ func Users(ctx *context.Context) {
SupportedSortOrders: supportedSortOrders,
}, tplExploreUsers)
}
+
+// Maps a sort query to a database search order.
+//
+// We cannot use `models.SearchOrderByXxx`, because there may be a JOIN in the statement, different tables may have the same name columns.
+func MapSortOrder(sortOrder string) db.SearchOrderBy {
+ switch sortOrder {
+ case "newest":
+ return "`user`.created_unix DESC"
+
+ case "oldest":
+ return "`user`.created_unix ASC"
+
+ case "leastupdate":
+ return "`user`.updated_unix ASC"
+
+ case "reversealphabetically":
+ return "`user`.name DESC"
+
+ case "lastlogin":
+ return "`user`.last_login_unix ASC"
+
+ case "reverselastlogin":
+ return "`user`.last_login_unix DESC"
+
+ case "alphabetically":
+ return "`user`.name ASC"
+
+ case "recentupdate":
+ return "`user`.updated_unix DESC"
+
+ default:
+ return ""
+ }
+}
diff --git a/routers/web/explore/user_test.go b/routers/web/explore/user_test.go
new file mode 100644
index 0000000000..04573a149f
--- /dev/null
+++ b/routers/web/explore/user_test.go
@@ -0,0 +1,23 @@
+// Copyright 2025 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package explore
+
+import (
+ "testing"
+
+ "forgejo.org/models/db"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestMapSortOrder(t *testing.T) {
+ assert.Equal(t, MapSortOrder("newest"), db.SearchOrderBy("`user`.created_unix DESC"))
+ assert.Equal(t, MapSortOrder("oldest"), db.SearchOrderBy("`user`.created_unix ASC"))
+ assert.Equal(t, MapSortOrder("leastupdate"), db.SearchOrderBy("`user`.updated_unix ASC"))
+ assert.Equal(t, MapSortOrder("reversealphabetically"), db.SearchOrderBy("`user`.name DESC"))
+ assert.Equal(t, MapSortOrder("lastlogin"), db.SearchOrderBy("`user`.last_login_unix ASC"))
+ assert.Equal(t, MapSortOrder("reverselastlogin"), db.SearchOrderBy("`user`.last_login_unix DESC"))
+ assert.Equal(t, MapSortOrder("alphabetically"), db.SearchOrderBy("`user`.name ASC"))
+ assert.Equal(t, MapSortOrder("recentupdate"), db.SearchOrderBy("`user`.updated_unix DESC"))
+}
diff --git a/routers/web/feed/convert.go b/routers/web/feed/convert.go
index 24532334ea..7b09c92ee5 100644
--- a/routers/web/feed/convert.go
+++ b/routers/web/feed/convert.go
@@ -1,4 +1,5 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
+// Copyright 2025 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package feed
@@ -24,7 +25,7 @@ import (
"forgejo.org/services/context"
"github.com/gorilla/feeds"
- "github.com/jaytaylor/html2text"
+ "github.com/inbucket/html2text"
)
func toBranchLink(ctx *context.Context, act *activities_model.Action) string {
@@ -209,7 +210,7 @@ func feedActionsToFeedItems(ctx *context.Context, actions activities_model.Actio
{
switch act.OpType {
case activities_model.ActionCommitRepo, activities_model.ActionMirrorSyncPush:
- push := templates.ActionContent2Commits(act)
+ push := templates.ActionContent2Commits(ctx, act)
for _, commit := range push.Commits {
if len(desc) != 0 {
diff --git a/routers/web/home.go b/routers/web/home.go
index e0a466a81c..bd9942748a 100644
--- a/routers/web/home.go
+++ b/routers/web/home.go
@@ -1,5 +1,6 @@
// Copyright 2014 The Gogs Authors. All rights reserved.
// Copyright 2019 The Gitea Authors. All rights reserved.
+// Copyright 2025 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package web
@@ -112,9 +113,3 @@ func HomeSitemap(ctx *context.Context) {
log.Error("Failed writing sitemap: %v", err)
}
}
-
-// NotFound render 404 page
-func NotFound(ctx *context.Context) {
- ctx.Data["Title"] = "Page Not Found"
- ctx.NotFound("home.NotFound", nil)
-}
diff --git a/routers/web/misc/misc.go b/routers/web/misc/misc.go
index 306b15e3d5..22fdccf79f 100644
--- a/routers/web/misc/misc.go
+++ b/routers/web/misc/misc.go
@@ -7,7 +7,6 @@ import (
"net/http"
"path"
- "forgejo.org/modules/git"
"forgejo.org/modules/httpcache"
"forgejo.org/modules/log"
"forgejo.org/modules/setting"
@@ -15,12 +14,8 @@ import (
)
func SSHInfo(rw http.ResponseWriter, req *http.Request) {
- if !git.SupportProcReceive {
- rw.WriteHeader(http.StatusNotFound)
- return
- }
rw.Header().Set("content-type", "text/json;charset=UTF-8")
- _, err := rw.Write([]byte(`{"type":"gitea","version":1}`))
+ _, err := rw.Write([]byte(`{"type":"agit","version":1}`))
if err != nil {
log.Error("fail to write result: err: %v", err)
rw.WriteHeader(http.StatusInternalServerError)
@@ -33,17 +28,96 @@ func DummyOK(w http.ResponseWriter, req *http.Request) {
w.WriteHeader(http.StatusOK)
}
-func RobotsTxt(w http.ResponseWriter, req *http.Request) {
- robotsTxt := util.FilePathJoinAbs(setting.CustomPath, "public/robots.txt")
- if ok, _ := util.IsExist(robotsTxt); !ok {
- robotsTxt = util.FilePathJoinAbs(setting.CustomPath, "robots.txt") // the legacy "robots.txt"
- }
- httpcache.SetCacheControlInHeader(w.Header(), setting.StaticCacheTime)
- http.ServeFile(w, req, robotsTxt)
-}
-
func StaticRedirect(target string) func(w http.ResponseWriter, req *http.Request) {
return func(w http.ResponseWriter, req *http.Request) {
http.Redirect(w, req, path.Join(setting.StaticURLPrefix, target), http.StatusMovedPermanently)
}
}
+
+var defaultRobotsTxt = []byte(`# The default Forgejo robots.txt
+# For more information: https://forgejo.org/docs/latest/admin/search-engines-indexation/
+
+User-agent: *
+Disallow: /api/
+Disallow: /avatars/
+Disallow: /user/
+Disallow: /swagger.*.json
+Disallow: /explore/*?*
+
+Disallow: /repo/create
+Disallow: /repo/migrate
+Disallow: /org/create
+Disallow: /*/*/fork
+
+Disallow: /*/*/watchers
+Disallow: /*/*/stargazers
+Disallow: /*/*/forks
+
+Disallow: /*/*/src/
+Disallow: /*/*/blame/
+Disallow: /*/*/commit/
+Disallow: /*/*/commits/
+Disallow: /*/*/raw/
+Disallow: /*/*/media/
+Disallow: /*/*/tags
+Disallow: /*/*/graph
+Disallow: /*/*/branches
+Disallow: /*/*/compare
+Disallow: /*/*/lastcommit/
+Disallow: /*/*/rss/branch/
+Disallow: /*/*/atom/branch/
+
+Disallow: /*/*/activity
+Disallow: /*/*/activity_author_data
+
+Disallow: /*/*/actions
+Disallow: /*/*/projects
+Disallow: /*/*/labels
+Disallow: /*/*/milestones
+
+Disallow: /*/*/find/
+Disallow: /*/*/tree-list/
+Disallow: /*/*/search/
+Disallow: /*/-/code
+
+Disallow: /*/*/issues/new
+Disallow: /*/*/pulls/*/files
+Disallow: /*/*/pulls/*/commits
+
+Disallow: /attachments/
+Disallow: /*/*/attachments/
+Disallow: /*/*/issues/*/attachments/
+Disallow: /*/*/pulls/*/attachments/
+Disallow: /*/*/releases/attachments
+Disallow: /*/*/releases/download
+
+Disallow: /*/*/archive/
+Disallow: /*.bundle$
+Disallow: /*.patch$
+Disallow: /*.diff$
+Disallow: /*.atom$
+Disallow: /*.rss$
+
+Disallow: /*lang=*
+Disallow: /*redirect_to=*
+Disallow: /*tab=*
+Disallow: /*q=*
+Disallow: /*sort=*
+Disallow: /*repo-search-archived=*
+`)
+
+func RobotsTxt(w http.ResponseWriter, req *http.Request) {
+ httpcache.SetCacheControlInHeader(w.Header(), setting.StaticCacheTime)
+ w.Header().Set("Content-Type", "text/plain")
+
+ robotsTxt := util.FilePathJoinAbs(setting.CustomPath, "public/robots.txt")
+ if ok, _ := util.IsExist(robotsTxt); ok {
+ http.ServeFile(w, req, robotsTxt)
+ return
+ }
+
+ _, err := w.Write(defaultRobotsTxt)
+ if err != nil {
+ log.Error("failed to write robots.txt: %v", err)
+ }
+}
diff --git a/routers/web/moderation/report.go b/routers/web/moderation/report.go
new file mode 100644
index 0000000000..39ca9e8824
--- /dev/null
+++ b/routers/web/moderation/report.go
@@ -0,0 +1,125 @@
+// Copyright 2025 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package moderation
+
+import (
+ "errors"
+ "net/http"
+
+ "forgejo.org/models/moderation"
+ "forgejo.org/modules/base"
+ "forgejo.org/modules/log"
+ "forgejo.org/modules/web"
+ "forgejo.org/services/context"
+ "forgejo.org/services/forms"
+ moderation_service "forgejo.org/services/moderation"
+)
+
+const (
+ tplSubmitAbuseReport base.TplName = "moderation/new_abuse_report"
+)
+
+// NewReport renders the page for new abuse reports.
+func NewReport(ctx *context.Context) {
+ contentID := ctx.FormInt64("id")
+ if contentID <= 0 {
+ setMinimalContextData(ctx)
+ ctx.RenderWithErr(ctx.Tr("moderation.report_abuse_form.invalid"), tplSubmitAbuseReport, nil)
+ log.Warn("The content ID is expected to be an integer greater that 0; the provided value is %s.", ctx.FormString("id"))
+ return
+ }
+
+ contentTypeString := ctx.FormString("type")
+ var contentType moderation.ReportedContentType
+ switch contentTypeString {
+ case "user", "org":
+ contentType = moderation.ReportedContentTypeUser
+ case "repo":
+ contentType = moderation.ReportedContentTypeRepository
+ case "issue", "pull":
+ contentType = moderation.ReportedContentTypeIssue
+ case "comment":
+ contentType = moderation.ReportedContentTypeComment
+ default:
+ setMinimalContextData(ctx)
+ ctx.RenderWithErr(ctx.Tr("moderation.report_abuse_form.invalid"), tplSubmitAbuseReport, nil)
+ log.Warn("The provided content type `%s` is not among the expected values.", contentTypeString)
+ return
+ }
+
+ if moderation.AlreadyReportedByAndOpen(ctx, ctx.Doer.ID, contentType, contentID) {
+ setMinimalContextData(ctx)
+ ctx.RenderWithErr(ctx.Tr("moderation.report_abuse_form.already_reported"), tplSubmitAbuseReport, nil)
+ return
+ }
+
+ setContextDataAndRender(ctx, contentType, contentID)
+}
+
+// setMinimalContextData adds minimal values (Title and CancelLink) into context data.
+func setMinimalContextData(ctx *context.Context) {
+ ctx.Data["Title"] = ctx.Tr("moderation.report_abuse")
+ ctx.Data["CancelLink"] = ctx.Doer.DashboardLink()
+}
+
+// setContextDataAndRender adds some values into context data and renders the new abuse report page.
+func setContextDataAndRender(ctx *context.Context, contentType moderation.ReportedContentType, contentID int64) {
+ setMinimalContextData(ctx)
+ ctx.Data["ContentID"] = contentID
+ ctx.Data["ContentType"] = contentType
+ ctx.Data["AbuseCategories"] = moderation.GetAbuseCategoriesList()
+ ctx.HTML(http.StatusOK, tplSubmitAbuseReport)
+}
+
+// CreatePost handles the POST for creating a new abuse report.
+func CreatePost(ctx *context.Context) {
+ form := *web.GetForm(ctx).(*forms.ReportAbuseForm)
+
+ if form.ContentID <= 0 || !form.ContentType.IsValid() {
+ setMinimalContextData(ctx)
+ ctx.RenderWithErr(ctx.Tr("moderation.report_abuse_form.invalid"), tplSubmitAbuseReport, nil)
+ return
+ }
+
+ if ctx.HasError() {
+ setContextDataAndRender(ctx, form.ContentType, form.ContentID)
+ return
+ }
+
+ can, err := moderation_service.CanReport(*ctx, ctx.Doer, form.ContentType, form.ContentID)
+ if err != nil {
+ if errors.Is(err, moderation_service.ErrContentDoesNotExist) || errors.Is(err, moderation_service.ErrDoerNotAllowed) {
+ ctx.Flash.Error(ctx.Tr("moderation.report_abuse_form.invalid"))
+ ctx.Redirect(ctx.Doer.DashboardLink())
+ } else {
+ ctx.ServerError("Failed to check if user can report content", err)
+ }
+ return
+ } else if !can {
+ ctx.Flash.Error(ctx.Tr("moderation.report_abuse_form.invalid"))
+ ctx.Redirect(ctx.Doer.DashboardLink())
+ return
+ }
+
+ report := moderation.AbuseReport{
+ ReporterID: ctx.Doer.ID,
+ ContentType: form.ContentType,
+ ContentID: form.ContentID,
+ Category: form.AbuseCategory,
+ Remarks: form.Remarks,
+ }
+
+ if err := moderation.ReportAbuse(ctx, &report); err != nil {
+ if errors.Is(err, moderation.ErrSelfReporting) {
+ ctx.Flash.Error(ctx.Tr("moderation.reporting_failed", err))
+ ctx.Redirect(ctx.Doer.DashboardLink())
+ } else {
+ ctx.ServerError("Failed to save new abuse report", err)
+ }
+ return
+ }
+
+ ctx.Flash.Success(ctx.Tr("moderation.reported_thank_you"))
+ ctx.Redirect(ctx.Doer.DashboardLink())
+}
diff --git a/routers/web/org/home.go b/routers/web/org/home.go
index a3823565ed..8f14f8899c 100644
--- a/routers/web/org/home.go
+++ b/routers/web/org/home.go
@@ -175,10 +175,12 @@ func prepareOrgProfileReadme(ctx *context.Context, profileGitRepo *git.Repositor
return
}
- if bytes, err := profileReadme.GetBlobContent(setting.UI.MaxDisplayFileSize); err != nil {
- log.Error("failed to GetBlobContent: %v", err)
+ if rc, _, err := profileReadme.NewTruncatedReader(setting.UI.MaxDisplayFileSize); err != nil {
+ log.Error("failed to NewTruncatedReader: %v", err)
} else {
- if profileContent, err := markdown.RenderString(&markup.RenderContext{
+ defer rc.Close()
+
+ if profileContent, err := markdown.RenderReader(&markup.RenderContext{
Ctx: ctx,
GitRepo: profileGitRepo,
Links: markup.Links{
@@ -188,7 +190,7 @@ func prepareOrgProfileReadme(ctx *context.Context, profileGitRepo *git.Repositor
BranchPath: path.Join("branch", util.PathEscapeSegments(profileDbRepo.DefaultBranch)),
},
Metas: map[string]string{"mode": "document"},
- }, bytes); err != nil {
+ }, rc); err != nil {
log.Error("failed to RenderString: %v", err)
} else {
ctx.Data["ProfileReadme"] = profileContent
diff --git a/routers/web/org/members.go b/routers/web/org/members.go
index 51ac566e1a..65e2b032e8 100644
--- a/routers/web/org/members.go
+++ b/routers/web/org/members.go
@@ -60,8 +60,8 @@ func Members(ctx *context.Context) {
}
pager := context.NewPagination(int(total), setting.UI.MembersPagingNum, page, 5)
- opts.ListOptions.Page = page
- opts.ListOptions.PageSize = setting.UI.MembersPagingNum
+ opts.Page = page
+ opts.PageSize = setting.UI.MembersPagingNum
members, membersIsPublic, err := organization.FindOrgMembers(ctx, opts)
if err != nil {
ctx.ServerError("GetMembers", err)
diff --git a/routers/web/org/org.go b/routers/web/org/org.go
index e9907c04af..f3d23df6a5 100644
--- a/routers/web/org/org.go
+++ b/routers/web/org/org.go
@@ -27,11 +27,14 @@ const (
// Create render the page for create organization
func Create(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("new_org.title")
- ctx.Data["DefaultOrgVisibilityMode"] = setting.Service.DefaultOrgVisibilityMode
if !ctx.Doer.CanCreateOrganization() {
ctx.ServerError("Not allowed", errors.New(ctx.Locale.TrString("org.form.create_org_not_allowed")))
return
}
+
+ ctx.Data["visibility"] = setting.Service.DefaultOrgVisibilityMode
+ ctx.Data["repo_admin_change_team_access"] = true
+
ctx.HTML(http.StatusOK, tplCreateOrg)
}
diff --git a/routers/web/org/setting.go b/routers/web/org/setting.go
index 284f406413..c83242754b 100644
--- a/routers/web/org/setting.go
+++ b/routers/web/org/setting.go
@@ -50,6 +50,9 @@ func Settings(ctx *context.Context) {
ctx.Data["RepoAdminChangeTeamAccess"] = ctx.Org.Organization.RepoAdminChangeTeamAccess
ctx.Data["ContextUser"] = ctx.ContextUser
ctx.Data["CooldownPeriod"] = setting.Service.UsernameCooldownPeriod
+ ctx.Data["MaxAvatarFileSize"] = setting.Avatar.MaxFileSize
+ ctx.Data["MaxAvatarWidth"] = setting.Avatar.MaxWidth
+ ctx.Data["MaxAvatarHeight"] = setting.Avatar.MaxHeight
err := shared_user.LoadHeaderCount(ctx)
if err != nil {
diff --git a/routers/web/repo/action_aggregator_test.go b/routers/web/repo/action_aggregator_test.go
index 8bade074ca..94e6d506c5 100644
--- a/routers/web/repo/action_aggregator_test.go
+++ b/routers/web/repo/action_aggregator_test.go
@@ -187,12 +187,12 @@ func (kase *testCase) doTest(t *testing.T) {
if len(after) != len(issue.Comments) {
t.Logf("Expected %v comments, got %v", len(after), len(issue.Comments))
- t.Logf("Comments got after combination:")
+ t.Log("Comments got after combination:")
for c := 0; c < len(issue.Comments); c++ {
cmt := issue.Comments[c]
t.Logf("%v %v %v\n", cmt.Type, cmt.CreatedUnix, cmt.Content)
}
- assert.EqualValues(t, len(after), len(issue.Comments))
+ assert.Len(t, issue.Comments, len(after))
t.Fail()
return
}
@@ -222,7 +222,7 @@ func (kase *testCase) doTest(t *testing.T) {
l.AssigneeTeamID = 0
}
- assert.EqualValues(t, (after)[c], issue.Comments[c],
+ assert.Equal(t, (after)[c], issue.Comments[c],
"Comment %v is not equal", c,
)
}
diff --git a/routers/web/repo/actions/actions.go b/routers/web/repo/actions/actions.go
index 0a63f566e0..7aa52ddd4c 100644
--- a/routers/web/repo/actions/actions.go
+++ b/routers/web/repo/actions/actions.go
@@ -31,8 +31,9 @@ import (
)
const (
- tplListActions base.TplName = "repo/actions/list"
- tplViewActions base.TplName = "repo/actions/view"
+ tplListActions base.TplName = "repo/actions/list"
+ tplListActionsInner base.TplName = "repo/actions/list_inner"
+ tplViewActions base.TplName = "repo/actions/view"
)
type Workflow struct {
@@ -67,6 +68,8 @@ func List(ctx *context.Context) {
curWorkflow := ctx.FormString("workflow")
ctx.Data["CurWorkflow"] = curWorkflow
+ listInner := ctx.FormBool("list_inner")
+
var workflows []Workflow
if empty, err := ctx.Repo.GitRepo.IsEmpty(); err != nil {
ctx.ServerError("IsEmpty", err)
@@ -250,7 +253,11 @@ func List(ctx *context.Context) {
ctx.Data["Page"] = pager
ctx.Data["HasWorkflowsOrRuns"] = len(workflows) > 0 || len(runs) > 0
- ctx.HTML(http.StatusOK, tplListActions)
+ if listInner {
+ ctx.HTML(http.StatusOK, tplListActionsInner)
+ } else {
+ ctx.HTML(http.StatusOK, tplListActions)
+ }
}
// loadIsRefDeleted loads the IsRefDeleted field for each run in the list.
diff --git a/routers/web/repo/actions/view.go b/routers/web/repo/actions/view.go
index 2d009c5720..260468f207 100644
--- a/routers/web/repo/actions/view.go
+++ b/routers/web/repo/actions/view.go
@@ -383,7 +383,7 @@ func Rerun(ctx *context_module.Context) {
run.PreviousDuration = run.Duration()
run.Started = 0
run.Stopped = 0
- if err := actions_model.UpdateRun(ctx, run, "started", "stopped", "previous_duration"); err != nil {
+ if err := actions_service.UpdateRun(ctx, run, "started", "stopped", "previous_duration"); err != nil {
ctx.Error(http.StatusInternalServerError, err.Error())
return
}
@@ -436,7 +436,7 @@ func rerunJob(ctx *context_module.Context, job *actions_model.ActionRunJob, shou
job.Stopped = 0
if err := db.WithTx(ctx, func(ctx context.Context) error {
- _, err := actions_model.UpdateRunJob(ctx, job, builder.Eq{"status": status}, "task_id", "status", "started", "stopped")
+ _, err := actions_service.UpdateRunJob(ctx, job, builder.Eq{"status": status}, "task_id", "status", "started", "stopped")
return err
}); err != nil {
return err
@@ -512,16 +512,16 @@ func Cancel(ctx *context_module.Context) {
if job.TaskID == 0 {
job.Status = actions_model.StatusCancelled
job.Stopped = timeutil.TimeStampNow()
- n, err := actions_model.UpdateRunJob(ctx, job, builder.Eq{"task_id": 0}, "status", "stopped")
+ n, err := actions_service.UpdateRunJob(ctx, job, builder.Eq{"task_id": 0}, "status", "stopped")
if err != nil {
return err
}
if n == 0 {
- return fmt.Errorf("job has changed, try again")
+ return errors.New("job has changed, try again")
}
continue
}
- if err := actions_model.StopTask(ctx, job.TaskID, actions_model.StatusCancelled); err != nil {
+ if err := actions_service.StopTask(ctx, job.TaskID, actions_model.StatusCancelled); err != nil {
return err
}
}
@@ -549,13 +549,13 @@ func Approve(ctx *context_module.Context) {
if err := db.WithTx(ctx, func(ctx context.Context) error {
run.NeedApproval = false
run.ApprovedBy = doer.ID
- if err := actions_model.UpdateRun(ctx, run, "need_approval", "approved_by"); err != nil {
+ if err := actions_service.UpdateRun(ctx, run, "need_approval", "approved_by"); err != nil {
return err
}
for _, job := range jobs {
if len(job.Needs) == 0 && job.Status.IsBlocked() {
job.Status = actions_model.StatusWaiting
- _, err := actions_model.UpdateRunJob(ctx, job, nil, "status")
+ _, err := actions_service.UpdateRunJob(ctx, job, nil, "status")
if err != nil {
return err
}
diff --git a/routers/web/repo/attachment.go b/routers/web/repo/attachment.go
index e46c08fef8..5b2eaef889 100644
--- a/routers/web/repo/attachment.go
+++ b/routers/web/repo/attachment.go
@@ -106,7 +106,7 @@ func ServeAttachment(ctx *context.Context, uuid string) {
}
if repository == nil { // If not linked
- if !(ctx.IsSigned && attach.UploaderID == ctx.Doer.ID) { // We block if not the uploader
+ if !ctx.IsSigned || attach.UploaderID != ctx.Doer.ID { // We block if not the uploader
ctx.Error(http.StatusNotFound)
return
}
diff --git a/routers/web/repo/blame.go b/routers/web/repo/blame.go
index ccdd59f2dd..f4cc2a2cea 100644
--- a/routers/web/repo/blame.go
+++ b/routers/web/repo/blame.go
@@ -82,19 +82,19 @@ func RefBlame(ctx *context.Context) {
return
}
- ctx.Data["NumLinesSet"] = true
- ctx.Data["NumLines"], err = blob.GetBlobLineCount()
- if err != nil {
- ctx.ServerError("GetBlobLineCount", err)
- return
- }
-
result, err := performBlame(ctx, ctx.Repo.Commit, ctx.Repo.TreePath, ctx.FormBool("bypass-blame-ignore"))
if err != nil {
ctx.ServerError("performBlame", err)
return
}
+ ctx.Data["NumLinesSet"] = true
+ numLines := 0
+ for _, p := range result.Parts {
+ numLines += len(p.Lines)
+ }
+ ctx.Data["NumLines"] = numLines
+
ctx.Data["UsesIgnoreRevs"] = result.UsesIgnoreRevs
ctx.Data["FaultyIgnoreRevsFile"] = result.FaultyIgnoreRevsFile
diff --git a/routers/web/repo/branch.go b/routers/web/repo/branch.go
index af8a838fc9..0fe52bfb48 100644
--- a/routers/web/repo/branch.go
+++ b/routers/web/repo/branch.go
@@ -70,11 +70,6 @@ func Branches(ctx *context.Context) {
ctx.ServerError("LoadBranches", err)
return
}
- if !ctx.Repo.CanRead(unit.TypeActions) {
- for key := range commitStatuses {
- git_model.CommitStatusesHideActionsURL(ctx, commitStatuses[key])
- }
- }
commitStatus := make(map[string]*git_model.CommitStatus)
for commitID, cs := range commitStatuses {
diff --git a/routers/web/repo/code_frequency.go b/routers/web/repo/code_frequency.go
index 04009b4afa..44c07e617e 100644
--- a/routers/web/repo/code_frequency.go
+++ b/routers/web/repo/code_frequency.go
@@ -34,7 +34,7 @@ func CodeFrequencyData(ctx *context.Context) {
ctx.Status(http.StatusAccepted)
return
}
- ctx.ServerError("GetCodeFrequencyData", err)
+ ctx.ServerError("GetContributorStats", err)
} else {
ctx.JSON(http.StatusOK, contributorStats["total"].Weeks)
}
diff --git a/routers/web/repo/commit.go b/routers/web/repo/commit.go
index 3cd80a6777..f3192266ad 100644
--- a/routers/web/repo/commit.go
+++ b/routers/web/repo/commit.go
@@ -16,7 +16,6 @@ import (
"forgejo.org/models/db"
git_model "forgejo.org/models/git"
repo_model "forgejo.org/models/repo"
- unit_model "forgejo.org/models/unit"
user_model "forgejo.org/models/user"
"forgejo.org/modules/base"
"forgejo.org/modules/charset"
@@ -84,7 +83,7 @@ func Commits(ctx *context.Context) {
ctx.ServerError("CommitsByRange", err)
return
}
- ctx.Data["Commits"] = processGitCommits(ctx, commits)
+ ctx.Data["Commits"] = git_model.ParseCommitsWithStatus(ctx, commits, ctx.Repo.Repository)
ctx.Data["Username"] = ctx.Repo.Owner.Name
ctx.Data["Reponame"] = ctx.Repo.Repository.Name
@@ -202,7 +201,7 @@ func SearchCommits(ctx *context.Context) {
return
}
ctx.Data["CommitCount"] = len(commits)
- ctx.Data["Commits"] = processGitCommits(ctx, commits)
+ ctx.Data["Commits"] = git_model.ParseCommitsWithStatus(ctx, commits, ctx.Repo.Repository)
ctx.Data["Keyword"] = query
if all {
@@ -267,7 +266,7 @@ func FileHistory(ctx *context.Context) {
}
}
- ctx.Data["Commits"] = processGitCommits(ctx, commits)
+ ctx.Data["Commits"] = git_model.ParseCommitsWithStatus(ctx, commits, ctx.Repo.Repository)
ctx.Data["Username"] = ctx.Repo.Owner.Name
ctx.Data["Reponame"] = ctx.Repo.Repository.Name
@@ -329,7 +328,7 @@ func Diff(ctx *context.Context) {
maxLines, maxFiles = -1, -1
}
- diff, err := gitdiff.GetDiff(ctx, gitRepo, &gitdiff.DiffOptions{
+ diff, err := gitdiff.GetDiffFull(ctx, gitRepo, &gitdiff.DiffOptions{
AfterCommitID: commitID,
SkipTo: ctx.FormString("skip-to"),
MaxLines: maxLines,
@@ -375,9 +374,6 @@ func Diff(ctx *context.Context) {
if err != nil {
log.Error("GetLatestCommitStatus: %v", err)
}
- if !ctx.Repo.CanRead(unit_model.TypeActions) {
- git_model.CommitStatusesHideActionsURL(ctx, statuses)
- }
ctx.Data["CommitStatus"] = git_model.CalcCommitStatus(statuses)
ctx.Data["CommitStatuses"] = statuses
@@ -456,20 +452,6 @@ func RawDiff(ctx *context.Context) {
}
}
-func processGitCommits(ctx *context.Context, gitCommits []*git.Commit) []*git_model.SignCommitWithStatuses {
- commits := git_model.ConvertFromGitCommit(ctx, gitCommits, ctx.Repo.Repository)
- if !ctx.Repo.CanRead(unit_model.TypeActions) {
- for _, commit := range commits {
- if commit.Status == nil {
- continue
- }
- commit.Status.HideActionsURL(ctx)
- git_model.CommitStatusesHideActionsURL(ctx, commit.Statuses)
- }
- }
- return commits
-}
-
func SetCommitNotes(ctx *context.Context) {
form := web.GetForm(ctx).(*forms.CommitNotesForm)
diff --git a/routers/web/repo/compare.go b/routers/web/repo/compare.go
index db65e889e0..59538d8a0e 100644
--- a/routers/web/repo/compare.go
+++ b/routers/web/repo/compare.go
@@ -312,22 +312,16 @@ func ParseCompareInfo(ctx *context.Context) *common.CompareInfo {
baseIsTag := ctx.Repo.GitRepo.IsTagExist(ci.BaseBranch)
if !baseIsCommit && !baseIsBranch && !baseIsTag {
- // Check if baseBranch is short sha commit hash
- if baseCommit, _ := ctx.Repo.GitRepo.GetCommit(ci.BaseBranch); baseCommit != nil {
- ci.BaseBranch = baseCommit.ID.String()
- ctx.Data["BaseBranch"] = ci.BaseBranch
- baseIsCommit = true
- } else if ci.BaseBranch == ctx.Repo.GetObjectFormat().EmptyObjectID().String() {
+ if ci.BaseBranch == ctx.Repo.GetObjectFormat().EmptyObjectID().String() {
if isSameRepo {
ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ci.HeadBranch))
} else {
ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ci.HeadRepo.FullName()) + ":" + util.PathEscapeSegments(ci.HeadBranch))
}
- return nil
} else {
ctx.NotFound("IsRefExist", nil)
- return nil
}
+ return nil
}
ctx.Data["BaseIsCommit"] = baseIsCommit
ctx.Data["BaseIsBranch"] = baseIsBranch
@@ -514,15 +508,8 @@ func ParseCompareInfo(ctx *context.Context) *common.CompareInfo {
headIsBranch := ci.HeadGitRepo.IsBranchExist(ci.HeadBranch)
headIsTag := ci.HeadGitRepo.IsTagExist(ci.HeadBranch)
if !headIsCommit && !headIsBranch && !headIsTag {
- // Check if headBranch is short sha commit hash
- if headCommit, _ := ci.HeadGitRepo.GetCommit(ci.HeadBranch); headCommit != nil {
- ci.HeadBranch = headCommit.ID.String()
- ctx.Data["HeadBranch"] = ci.HeadBranch
- headIsCommit = true
- } else {
- ctx.NotFound("IsRefExist", nil)
- return nil
- }
+ ctx.NotFound("IsRefExist", nil)
+ return nil
}
ctx.Data["HeadIsCommit"] = headIsCommit
ctx.Data["HeadIsBranch"] = headIsBranch
@@ -597,7 +584,7 @@ func PrepareCompareDiff(
config := unit.PullRequestsConfig()
if !config.AutodetectManualMerge {
- allowEmptyPr := !(ci.BaseBranch == ci.HeadBranch && ctx.Repo.Repository.Name == ci.HeadRepo.Name)
+ allowEmptyPr := ci.BaseBranch != ci.HeadBranch || ctx.Repo.Repository.Name != ci.HeadRepo.Name
ctx.Data["AllowEmptyPr"] = allowEmptyPr
return !allowEmptyPr
@@ -621,7 +608,7 @@ func PrepareCompareDiff(
fileOnly := ctx.FormBool("file-only")
- diff, err := gitdiff.GetDiff(ctx, ci.HeadGitRepo,
+ diff, err := gitdiff.GetDiffFull(ctx, ci.HeadGitRepo,
&gitdiff.DiffOptions{
BeforeCommitID: beforeCommitID,
AfterCommitID: headCommitID,
@@ -654,15 +641,15 @@ func PrepareCompareDiff(
return false
}
- commits := processGitCommits(ctx, ci.CompareInfo.Commits)
+ commits := git_model.ParseCommitsWithStatus(ctx, ci.CompareInfo.Commits, ctx.Repo.Repository)
ctx.Data["Commits"] = commits
ctx.Data["CommitCount"] = len(commits)
if len(commits) == 1 {
c := commits[0]
- title = strings.TrimSpace(c.UserCommit.Summary())
+ title = strings.TrimSpace(c.Summary())
- body := strings.Split(strings.TrimSpace(c.UserCommit.Message()), "\n")
+ body := strings.Split(strings.TrimSpace(c.Message()), "\n")
if len(body) > 1 {
ctx.Data["content"] = strings.Join(body[1:], "\n")
}
@@ -952,9 +939,10 @@ func ExcerptBlob(ctx *context.Context) {
RightHunkSize: rightHunkSize,
},
}
- if direction == "up" {
+ switch direction {
+ case "up":
section.Lines = append([]*gitdiff.DiffLine{lineSection}, section.Lines...)
- } else if direction == "down" {
+ case "down":
section.Lines = append(section.Lines, lineSection)
}
}
@@ -966,7 +954,7 @@ func ExcerptBlob(ctx *context.Context) {
}
func getExcerptLines(commit *git.Commit, filePath string, idxLeft, idxRight, chunkSize int) ([]*gitdiff.DiffLine, error) {
- blob, err := commit.Tree.GetBlobByPath(filePath)
+ blob, err := commit.GetBlobByPath(filePath)
if err != nil {
return nil, err
}
diff --git a/routers/web/repo/editor_test.go b/routers/web/repo/editor_test.go
index 5b893cf258..b5d40abdab 100644
--- a/routers/web/repo/editor_test.go
+++ b/routers/web/repo/editor_test.go
@@ -37,7 +37,7 @@ func TestCleanUploadName(t *testing.T) {
"..a.dotty../.folder../.name...": "..a.dotty../.folder../.name...",
}
for k, v := range kases {
- assert.EqualValues(t, cleanUploadFileName(k), v)
+ assert.Equal(t, cleanUploadFileName(k), v)
}
}
diff --git a/routers/web/repo/githttp.go b/routers/web/repo/githttp.go
index 650b1d88f4..42302d0e02 100644
--- a/routers/web/repo/githttp.go
+++ b/routers/web/repo/githttp.go
@@ -183,9 +183,7 @@ func httpBase(ctx *context.Context) *serviceHandler {
if repoExist {
// Because of special ref "refs/for" .. , need delay write permission check
- if git.SupportProcReceive {
- accessMode = perm.AccessModeRead
- }
+ accessMode = perm.AccessModeRead
if ctx.Data["IsActionsToken"] == true {
taskID := ctx.Data["ActionsTaskID"].(int64)
diff --git a/routers/web/repo/githttp_test.go b/routers/web/repo/githttp_test.go
index 5ba8de3d63..0164b11f66 100644
--- a/routers/web/repo/githttp_test.go
+++ b/routers/web/repo/githttp_test.go
@@ -37,6 +37,6 @@ func TestContainsParentDirectorySeparator(t *testing.T) {
}
for i := range tests {
- assert.EqualValues(t, tests[i].b, containsParentDirectorySeparator(tests[i].v))
+ assert.Equal(t, tests[i].b, containsParentDirectorySeparator(tests[i].v))
}
}
diff --git a/routers/web/repo/issue.go b/routers/web/repo/issue.go
index ff3a903aed..a4f6f97a05 100644
--- a/routers/web/repo/issue.go
+++ b/routers/web/repo/issue.go
@@ -187,9 +187,10 @@ func issues(ctx *context.Context, milestoneID, projectID int64, isPullOption opt
// 0 means issues with no label
// blank means labels will not be filtered for issues
selectLabels := ctx.FormString("labels")
- if selectLabels == "" {
+ switch selectLabels {
+ case "":
ctx.Data["AllLabels"] = true
- } else if selectLabels == "0" {
+ case "0":
ctx.Data["NoLabel"] = true
}
if len(selectLabels) > 0 {
@@ -347,11 +348,6 @@ func issues(ctx *context.Context, milestoneID, projectID int64, isPullOption opt
ctx.ServerError("GetIssuesAllCommitStatus", err)
return
}
- if !ctx.Repo.CanRead(unit.TypeActions) {
- for key := range commitStatuses {
- git_model.CommitStatusesHideActionsURL(ctx, commitStatuses[key])
- }
- }
if err := issues.LoadAttributes(ctx); err != nil {
ctx.ServerError("issues.LoadAttributes", err)
@@ -426,9 +422,10 @@ func issues(ctx *context.Context, milestoneID, projectID int64, isPullOption opt
return 0
}
reviewTyp := issues_model.ReviewTypeApprove
- if typ == "reject" {
+ switch typ {
+ case "reject":
reviewTyp = issues_model.ReviewTypeReject
- } else if typ == "waiting" {
+ case "waiting":
reviewTyp = issues_model.ReviewTypeRequest
}
for _, count := range counts {
@@ -1311,7 +1308,7 @@ func roleDescriptor(ctx stdCtx.Context, repo *repo_model.Repository, poster *use
}
// Special user that can't have associated contributions and permissions in the repo.
- if poster.IsGhost() || poster.IsActions() || poster.IsAPActor() {
+ if poster.IsSystem() || poster.IsAPServerActor() {
return roleDescriptor, nil
}
@@ -1475,6 +1472,7 @@ func ViewIssue(ctx *context.Context) {
ctx.Data["IssueType"] = "all"
}
+ ctx.Data["IsModerationEnabled"] = setting.Moderation.Enabled
ctx.Data["IsProjectsEnabled"] = ctx.Repo.CanRead(unit.TypeProjects)
ctx.Data["IsAttachmentEnabled"] = setting.Attachment.Enabled
upload.AddUploadContext(ctx, "comment")
@@ -1695,7 +1693,7 @@ func ViewIssue(ctx *context.Context) {
return
}
ghostMilestone := &issues_model.Milestone{
- ID: -1,
+ ID: issues_model.GhostMilestoneID,
Name: ctx.Locale.TrString("repo.issues.deleted_milestone"),
}
if comment.OldMilestoneID > 0 && comment.OldMilestone == nil {
@@ -1796,15 +1794,6 @@ func ViewIssue(ctx *context.Context) {
ctx.ServerError("LoadPushCommits", err)
return
}
- if !ctx.Repo.CanRead(unit.TypeActions) {
- for _, commit := range comment.Commits {
- if commit.Status == nil {
- continue
- }
- commit.Status.HideActionsURL(ctx)
- git_model.CommitStatusesHideActionsURL(ctx, commit.Statuses)
- }
- }
} else if comment.Type == issues_model.CommentTypeAddTimeManual ||
comment.Type == issues_model.CommentTypeStopTracking ||
comment.Type == issues_model.CommentTypeDeleteTimeManual {
@@ -2128,7 +2117,7 @@ func checkBlockedByIssues(ctx *context.Context, blockers []*issues_model.Depende
}
repoPerms[blocker.RepoID] = perm
}
- if perm.CanReadIssuesOrPulls(blocker.Issue.IsPull) {
+ if perm.CanReadIssuesOrPulls(blocker.IsPull) {
canRead = append(canRead, blocker)
} else {
notPermitted = append(notPermitted, blocker)
@@ -2786,7 +2775,7 @@ func SearchIssues(ctx *context.Context) {
IncludedAnyLabelIDs: includedAnyLabels,
MilestoneIDs: includedMilestones,
ProjectID: projectID,
- SortBy: issue_indexer.SortByCreatedDesc,
+ SortBy: issue_indexer.ParseSortBy(ctx.FormString("sort"), issue_indexer.SortByCreatedDesc),
}
if since != 0 {
@@ -2815,9 +2804,10 @@ func SearchIssues(ctx *context.Context) {
}
}
- // FIXME: It's unsupported to sort by priority repo when searching by indexer,
- // it's indeed an regression, but I think it is worth to support filtering by indexer first.
- _ = ctx.FormInt64("priority_repo_id")
+ priorityRepoID := ctx.FormInt64("priority_repo_id")
+ if priorityRepoID > 0 {
+ searchOpt.PriorityRepoID = optional.Some(priorityRepoID)
+ }
ids, total, err := issue_indexer.SearchIssues(ctx, searchOpt)
if err != nil {
@@ -2955,7 +2945,7 @@ func ListIssues(ctx *context.Context) {
IsPull: isPull,
IsClosed: isClosed,
ProjectID: projectID,
- SortBy: issue_indexer.SortByCreatedDesc,
+ SortBy: issue_indexer.ParseSortBy(ctx.FormString("sort"), issue_indexer.SortByCreatedDesc),
}
if since != 0 {
searchOpt.UpdatedAfterUnix = optional.Some(since)
@@ -3117,7 +3107,7 @@ func NewComment(ctx *context.Context) {
// Check if issue admin/poster changes the status of issue.
if (ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) || (ctx.IsSigned && issue.IsPoster(ctx.Doer.ID))) &&
(form.Status == "reopen" || form.Status == "close") &&
- !(issue.IsPull && issue.PullRequest.HasMerged) {
+ (!issue.IsPull || !issue.PullRequest.HasMerged) {
// Duplication and conflict check should apply to reopen pull request.
var pr *issues_model.PullRequest
@@ -3253,11 +3243,7 @@ func NewComment(ctx *context.Context) {
comment, err := issue_service.CreateIssueComment(ctx, ctx.Doer, ctx.Repo.Repository, issue, form.Content, attachments)
if err != nil {
if errors.Is(err, user_model.ErrBlockedByUser) {
- if issue.IsPull {
- ctx.JSONError(ctx.Tr("repo.pulls.comment.blocked_by_user"))
- } else {
- ctx.JSONError(ctx.Tr("repo.issues.comment.blocked_by_user"))
- }
+ ctx.JSONError(ctx.Tr("repo.comment.blocked_by_user"))
} else {
ctx.ServerError("CreateIssueComment", err)
}
@@ -3607,9 +3593,9 @@ func GetIssueAttachments(ctx *context.Context) {
if ctx.Written() {
return
}
- attachments := make([]*api.Attachment, len(issue.Attachments))
+ attachments := make([]*api.WebAttachment, len(issue.Attachments))
for i := 0; i < len(issue.Attachments); i++ {
- attachments[i] = convert.ToAttachment(ctx.Repo.Repository, issue.Attachments[i])
+ attachments[i] = convert.ToWebAttachment(ctx.Repo.Repository, issue.Attachments[i])
}
ctx.JSON(http.StatusOK, attachments)
}
@@ -3632,7 +3618,7 @@ func GetCommentAttachments(ctx *context.Context) {
return
}
- if !ctx.Repo.Permission.CanReadIssuesOrPulls(comment.Issue.IsPull) {
+ if !ctx.Repo.CanReadIssuesOrPulls(comment.Issue.IsPull) {
ctx.NotFound("CanReadIssuesOrPulls", issues_model.ErrCommentNotExist{})
return
}
@@ -3642,13 +3628,13 @@ func GetCommentAttachments(ctx *context.Context) {
return
}
- attachments := make([]*api.Attachment, 0)
if err := comment.LoadAttachments(ctx); err != nil {
ctx.ServerError("LoadAttachments", err)
return
}
+ attachments := make([]*api.WebAttachment, len(comment.Attachments))
for i := 0; i < len(comment.Attachments); i++ {
- attachments = append(attachments, convert.ToAttachment(ctx.Repo.Repository, comment.Attachments[i]))
+ attachments[i] = convert.ToWebAttachment(ctx.Repo.Repository, comment.Attachments[i])
}
ctx.JSON(http.StatusOK, attachments)
}
diff --git a/routers/web/repo/issue_content_history.go b/routers/web/repo/issue_content_history.go
index 5c71d75f80..11d0de90de 100644
--- a/routers/web/repo/issue_content_history.go
+++ b/routers/web/repo/issue_content_history.go
@@ -160,15 +160,16 @@ func GetContentHistoryDetail(ctx *context.Context) {
diffHTMLBuf := bytes.Buffer{}
diffHTMLBuf.WriteString("")
for _, it := range diff {
- if it.Type == diffmatchpatch.DiffInsert {
+ switch it.Type {
+ case diffmatchpatch.DiffInsert:
diffHTMLBuf.WriteString("")
diffHTMLBuf.WriteString(html.EscapeString(it.Text))
diffHTMLBuf.WriteString("")
- } else if it.Type == diffmatchpatch.DiffDelete {
+ case diffmatchpatch.DiffDelete:
diffHTMLBuf.WriteString("")
diffHTMLBuf.WriteString(html.EscapeString(it.Text))
diffHTMLBuf.WriteString("")
- } else {
+ default:
diffHTMLBuf.WriteString(html.EscapeString(it.Text))
}
}
diff --git a/routers/web/repo/issue_label_test.go b/routers/web/repo/issue_label_test.go
index 406ab4918c..0adcc39499 100644
--- a/routers/web/repo/issue_label_test.go
+++ b/routers/web/repo/issue_label_test.go
@@ -39,7 +39,7 @@ func TestInitializeLabels(t *testing.T) {
contexttest.LoadRepo(t, ctx, 2)
web.SetForm(ctx, &forms.InitializeLabelsForm{TemplateName: "Default"})
InitializeLabels(ctx)
- assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Equal(t, http.StatusSeeOther, ctx.Resp.Status())
unittest.AssertExistsAndLoadBean(t, &issues_model.Label{
RepoID: 2,
Name: "enhancement",
@@ -69,7 +69,7 @@ func TestRetrieveLabels(t *testing.T) {
assert.True(t, ok)
if assert.Len(t, labels, len(testCase.ExpectedLabelIDs)) {
for i, label := range labels {
- assert.EqualValues(t, testCase.ExpectedLabelIDs[i], label.ID)
+ assert.Equal(t, testCase.ExpectedLabelIDs[i], label.ID)
}
}
}
@@ -85,7 +85,7 @@ func TestNewLabel(t *testing.T) {
Color: "#abcdef",
})
NewLabel(ctx)
- assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Equal(t, http.StatusSeeOther, ctx.Resp.Status())
unittest.AssertExistsAndLoadBean(t, &issues_model.Label{
Name: "newlabel",
Color: "#abcdef",
@@ -105,7 +105,7 @@ func TestUpdateLabel(t *testing.T) {
IsArchived: true,
})
UpdateLabel(ctx)
- assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Equal(t, http.StatusSeeOther, ctx.Resp.Status())
unittest.AssertExistsAndLoadBean(t, &issues_model.Label{
ID: 2,
Name: "newnameforlabel",
@@ -121,7 +121,7 @@ func TestDeleteLabel(t *testing.T) {
contexttest.LoadRepo(t, ctx, 1)
ctx.Req.Form.Set("id", "2")
DeleteLabel(ctx)
- assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.Equal(t, http.StatusOK, ctx.Resp.Status())
unittest.AssertNotExistsBean(t, &issues_model.Label{ID: 2})
unittest.AssertNotExistsBean(t, &issues_model.IssueLabel{LabelID: 2})
assert.EqualValues(t, ctx.Tr("repo.issues.label_deletion_success"), ctx.Flash.SuccessMsg)
@@ -135,7 +135,7 @@ func TestUpdateIssueLabel_Clear(t *testing.T) {
ctx.Req.Form.Set("issue_ids", "1,3")
ctx.Req.Form.Set("action", "clear")
UpdateIssueLabel(ctx)
- assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.Equal(t, http.StatusOK, ctx.Resp.Status())
unittest.AssertNotExistsBean(t, &issues_model.IssueLabel{IssueID: 1})
unittest.AssertNotExistsBean(t, &issues_model.IssueLabel{IssueID: 3})
unittest.CheckConsistencyFor(t, &issues_model.Label{})
@@ -161,7 +161,7 @@ func TestUpdateIssueLabel_Toggle(t *testing.T) {
ctx.Req.Form.Set("action", testCase.Action)
ctx.Req.Form.Set("id", strconv.Itoa(int(testCase.LabelID)))
UpdateIssueLabel(ctx)
- assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.Equal(t, http.StatusOK, ctx.Resp.Status())
for _, issueID := range testCase.IssueIDs {
unittest.AssertExistsIf(t, testCase.ExpectedAdd, &issues_model.IssueLabel{
IssueID: issueID,
diff --git a/routers/web/repo/projects.go b/routers/web/repo/projects.go
index 80f699787c..e5bd06e987 100644
--- a/routers/web/repo/projects.go
+++ b/routers/web/repo/projects.go
@@ -120,7 +120,7 @@ func Projects(ctx *context.Context) {
pager.AddParam(ctx, "state", "State")
ctx.Data["Page"] = pager
- ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects)
+ ctx.Data["CanWriteProjects"] = ctx.Repo.CanWrite(unit.TypeProjects)
ctx.Data["IsShowClosed"] = isShowClosed
ctx.Data["IsProjectsPage"] = true
ctx.Data["SortType"] = sortType
@@ -146,7 +146,7 @@ func RenderNewProject(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("repo.projects.new")
ctx.Data["TemplateConfigs"] = project_model.GetTemplateConfigs()
ctx.Data["CardTypes"] = project_model.GetCardConfig()
- ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects)
+ ctx.Data["CanWriteProjects"] = ctx.Repo.CanWrite(unit.TypeProjects)
ctx.Data["CancelLink"] = ctx.Repo.Repository.Link() + "/projects"
ctx.HTML(http.StatusOK, tplProjectsNew)
}
@@ -228,7 +228,7 @@ func DeleteProject(ctx *context.Context) {
func RenderEditProject(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("repo.projects.edit")
ctx.Data["PageIsEditProjects"] = true
- ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects)
+ ctx.Data["CanWriteProjects"] = ctx.Repo.CanWrite(unit.TypeProjects)
ctx.Data["CardTypes"] = project_model.GetCardConfig()
p, err := project_model.GetProjectByID(ctx, ctx.ParamsInt64(":id"))
@@ -262,7 +262,7 @@ func EditProjectPost(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("repo.projects.edit")
ctx.Data["PageIsEditProjects"] = true
- ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects)
+ ctx.Data["CanWriteProjects"] = ctx.Repo.CanWrite(unit.TypeProjects)
ctx.Data["CardTypes"] = project_model.GetCardConfig()
ctx.Data["CancelLink"] = project_model.ProjectLinkForRepo(ctx.Repo.Repository, projectID)
@@ -378,7 +378,7 @@ func ViewProject(ctx *context.Context) {
ctx.Data["Title"] = project.Title
ctx.Data["IsProjectsPage"] = true
- ctx.Data["CanWriteProjects"] = ctx.Repo.Permission.CanWrite(unit.TypeProjects)
+ ctx.Data["CanWriteProjects"] = ctx.Repo.CanWrite(unit.TypeProjects)
ctx.Data["Project"] = project
ctx.Data["IssuesMap"] = issuesMap
ctx.Data["Columns"] = columns
diff --git a/routers/web/repo/pull.go b/routers/web/repo/pull.go
index 6ba1bca181..4e365f24ea 100644
--- a/routers/web/repo/pull.go
+++ b/routers/web/repo/pull.go
@@ -10,13 +10,16 @@ import (
"errors"
"fmt"
"html"
+ "html/template"
"net/http"
"net/url"
+ "path"
"strconv"
"strings"
"forgejo.org/models"
activities_model "forgejo.org/models/activities"
+ asymkey_model "forgejo.org/models/asymkey"
"forgejo.org/models/db"
git_model "forgejo.org/models/git"
issues_model "forgejo.org/models/issues"
@@ -28,11 +31,13 @@ import (
"forgejo.org/models/unit"
user_model "forgejo.org/models/user"
"forgejo.org/modules/base"
+ "forgejo.org/modules/charset"
"forgejo.org/modules/emoji"
"forgejo.org/modules/git"
"forgejo.org/modules/gitrepo"
issue_template "forgejo.org/modules/issue/template"
"forgejo.org/modules/log"
+ "forgejo.org/modules/markup"
"forgejo.org/modules/optional"
"forgejo.org/modules/setting"
"forgejo.org/modules/structs"
@@ -400,6 +405,7 @@ func setMergeTarget(ctx *context.Context, pull *issues_model.PullRequest) {
// GetPullDiffStats get Pull Requests diff stats
func GetPullDiffStats(ctx *context.Context) {
+ // FIXME: this getPullInfo seems to be a duplicate call with other route handlers
issue, ok := getPullInfo(ctx)
if !ok {
return
@@ -407,15 +413,15 @@ func GetPullDiffStats(ctx *context.Context) {
pull := issue.PullRequest
mergeBaseCommitID := GetMergedBaseCommitID(ctx, issue)
-
if mergeBaseCommitID == "" {
ctx.NotFound("PullFiles", nil)
return
}
+ // do not report 500 server error to end users if error occurs, otherwise a PR missing ref won't be able to view.
headCommitID, err := ctx.Repo.GitRepo.GetRefCommitID(pull.GetGitRefName())
if err != nil {
- ctx.ServerError("GetRefCommitID", err)
+ log.Error("Failed to GetRefCommitID: %v, repo: %v", err, ctx.Repo.Repository.FullName())
return
}
@@ -497,6 +503,7 @@ func PrepareMergedViewPullInfo(ctx *context.Context, issue *issues_model.Issue)
ctx.Data["IsPullRequestBroken"] = true
ctx.Data["BaseTarget"] = pull.BaseBranch
ctx.Data["NumCommits"] = 0
+ ctx.Data["CommitIDs"] = map[string]bool{}
ctx.Data["NumFiles"] = 0
return nil
}
@@ -507,6 +514,12 @@ func PrepareMergedViewPullInfo(ctx *context.Context, issue *issues_model.Issue)
ctx.Data["NumCommits"] = len(compareInfo.Commits)
ctx.Data["NumFiles"] = compareInfo.NumFiles
+ commitIDs := map[string]bool{}
+ for _, commit := range compareInfo.Commits {
+ commitIDs[commit.ID.String()] = true
+ }
+ ctx.Data["CommitIDs"] = commitIDs
+
if len(compareInfo.Commits) != 0 {
sha := compareInfo.Commits[0].ID.String()
commitStatuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, sha, db.ListOptionsAll)
@@ -514,9 +527,6 @@ func PrepareMergedViewPullInfo(ctx *context.Context, issue *issues_model.Issue)
ctx.ServerError("GetLatestCommitStatus", err)
return nil
}
- if !ctx.Repo.CanRead(unit.TypeActions) {
- git_model.CommitStatusesHideActionsURL(ctx, commitStatuses)
- }
if len(commitStatuses) != 0 {
ctx.Data["LatestCommitStatuses"] = commitStatuses
@@ -580,9 +590,6 @@ func PrepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.C
ctx.ServerError("GetLatestCommitStatus", err)
return nil
}
- if !ctx.Repo.CanRead(unit.TypeActions) {
- git_model.CommitStatusesHideActionsURL(ctx, commitStatuses)
- }
if len(commitStatuses) > 0 {
ctx.Data["LatestCommitStatuses"] = commitStatuses
@@ -596,6 +603,7 @@ func PrepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.C
ctx.Data["IsPullRequestBroken"] = true
ctx.Data["BaseTarget"] = pull.BaseBranch
ctx.Data["NumCommits"] = 0
+ ctx.Data["CommitIDs"] = map[string]bool{}
ctx.Data["NumFiles"] = 0
return nil
}
@@ -606,6 +614,13 @@ func PrepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.C
ctx.Data["NumCommits"] = len(compareInfo.Commits)
ctx.Data["NumFiles"] = compareInfo.NumFiles
+
+ commitIDs := map[string]bool{}
+ for _, commit := range compareInfo.Commits {
+ commitIDs[commit.ID.String()] = true
+ }
+ ctx.Data["CommitIDs"] = commitIDs
+
return compareInfo
}
@@ -664,6 +679,7 @@ func PrepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.C
}
ctx.Data["BaseTarget"] = pull.BaseBranch
ctx.Data["NumCommits"] = 0
+ ctx.Data["CommitIDs"] = map[string]bool{}
ctx.Data["NumFiles"] = 0
return nil
}
@@ -676,9 +692,6 @@ func PrepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.C
ctx.ServerError("GetLatestCommitStatus", err)
return nil
}
- if !ctx.Repo.CanRead(unit.TypeActions) {
- git_model.CommitStatusesHideActionsURL(ctx, commitStatuses)
- }
if len(commitStatuses) > 0 {
ctx.Data["LatestCommitStatuses"] = commitStatuses
@@ -744,6 +757,7 @@ func PrepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.C
ctx.Data["IsPullRequestBroken"] = true
ctx.Data["BaseTarget"] = pull.BaseBranch
ctx.Data["NumCommits"] = 0
+ ctx.Data["CommitIDs"] = map[string]bool{}
ctx.Data["NumFiles"] = 0
return nil
}
@@ -768,6 +782,13 @@ func PrepareViewPullInfo(ctx *context.Context, issue *issues_model.Issue) *git.C
ctx.Data["NumCommits"] = len(compareInfo.Commits)
ctx.Data["NumFiles"] = compareInfo.NumFiles
+
+ commitIDs := map[string]bool{}
+ for _, commit := range compareInfo.Commits {
+ commitIDs[commit.ID.String()] = true
+ }
+ ctx.Data["CommitIDs"] = commitIDs
+
return compareInfo
}
@@ -846,7 +867,7 @@ func ViewPullCommits(ctx *context.Context) {
ctx.Data["Username"] = ctx.Repo.Owner.Name
ctx.Data["Reponame"] = ctx.Repo.Repository.Name
- commits := processGitCommits(ctx, prInfo.Commits)
+ commits := git_model.ParseCommitsWithStatus(ctx, prInfo.Commits, ctx.Repo.Repository)
ctx.Data["Commits"] = commits
ctx.Data["CommitCount"] = len(commits)
@@ -891,7 +912,7 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
foundStartCommit := len(specifiedStartCommit) == 0
foundEndCommit := len(specifiedEndCommit) == 0
- if !(foundStartCommit && foundEndCommit) {
+ if !foundStartCommit || !foundEndCommit {
for _, commit := range prInfo.Commits {
if commit.ID.String() == specifiedStartCommit {
foundStartCommit = true
@@ -906,7 +927,7 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
}
}
- if !(foundStartCommit && foundEndCommit) {
+ if !foundStartCommit || !foundEndCommit {
ctx.NotFound("Given SHA1 not found for this PR", nil)
return
}
@@ -927,7 +948,85 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
ctx.Data["IsShowingOnlySingleCommit"] = willShowSpecifiedCommit
- if willShowSpecifiedCommit || willShowSpecifiedCommitRange {
+ if willShowSpecifiedCommit {
+ commitID := specifiedEndCommit
+
+ ctx.Data["CommitID"] = commitID
+
+ var prevCommit, curCommit, nextCommit *git.Commit
+
+ // Iterate in reverse to properly map "previous" and "next" buttons
+ for i := len(prInfo.Commits) - 1; i >= 0; i-- {
+ commit := prInfo.Commits[i]
+
+ if curCommit != nil {
+ nextCommit = commit
+ break
+ }
+
+ if commit.ID.String() == commitID {
+ curCommit = commit
+ } else {
+ prevCommit = commit
+ }
+ }
+
+ if curCommit == nil {
+ ctx.ServerError("Repo.GitRepo.viewPullFiles", git.ErrNotExist{ID: commitID})
+ return
+ }
+
+ ctx.Data["Commit"] = curCommit
+ if prevCommit != nil {
+ ctx.Data["PrevCommitLink"] = path.Join(ctx.Repo.RepoLink, "pulls", strconv.FormatInt(issue.Index, 10), "commits", prevCommit.ID.String())
+ }
+ if nextCommit != nil {
+ ctx.Data["NextCommitLink"] = path.Join(ctx.Repo.RepoLink, "pulls", strconv.FormatInt(issue.Index, 10), "commits", nextCommit.ID.String())
+ }
+
+ statuses, _, err := git_model.GetLatestCommitStatus(ctx, ctx.Repo.Repository.ID, commitID, db.ListOptionsAll)
+ if err != nil {
+ log.Error("GetLatestCommitStatus: %v", err)
+ }
+
+ ctx.Data["CommitStatus"] = git_model.CalcCommitStatus(statuses)
+ ctx.Data["CommitStatuses"] = statuses
+
+ verification := asymkey_model.ParseCommitWithSignature(ctx, curCommit)
+ ctx.Data["Verification"] = verification
+ ctx.Data["Author"] = user_model.ValidateCommitWithEmail(ctx, curCommit)
+
+ if err := asymkey_model.CalculateTrustStatus(verification, ctx.Repo.Repository.GetTrustModel(), func(user *user_model.User) (bool, error) {
+ return repo_model.IsOwnerMemberCollaborator(ctx, ctx.Repo.Repository, user.ID)
+ }, nil); err != nil {
+ ctx.ServerError("CalculateTrustStatus", err)
+ return
+ }
+
+ note := &git.Note{}
+ err = git.GetNote(ctx, ctx.Repo.GitRepo, specifiedEndCommit, note)
+ if err == nil {
+ ctx.Data["NoteCommit"] = note.Commit
+ ctx.Data["NoteAuthor"] = user_model.ValidateCommitWithEmail(ctx, note.Commit)
+ ctx.Data["NoteRendered"], err = markup.RenderCommitMessage(&markup.RenderContext{
+ Links: markup.Links{
+ Base: ctx.Repo.RepoLink,
+ BranchPath: path.Join("commit", util.PathEscapeSegments(commitID)),
+ },
+ Metas: ctx.Repo.Repository.ComposeMetas(ctx),
+ GitRepo: ctx.Repo.GitRepo,
+ Ctx: ctx,
+ }, template.HTMLEscapeString(string(charset.ToUTF8WithFallback(note.Message, charset.ConvertOpts{}))))
+ if err != nil {
+ ctx.ServerError("RenderCommitMessage", err)
+ return
+ }
+ }
+
+ endCommitID = commitID
+ startCommitID = prInfo.MergeBase
+ ctx.Data["IsShowingAllCommits"] = false
+ } else if willShowSpecifiedCommitRange {
if len(specifiedEndCommit) > 0 {
endCommitID = specifiedEndCommit
} else {
@@ -938,6 +1037,7 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
} else {
startCommitID = prInfo.MergeBase
}
+
ctx.Data["IsShowingAllCommits"] = false
} else {
endCommitID = headCommitID
@@ -945,10 +1045,10 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
ctx.Data["IsShowingAllCommits"] = true
}
- ctx.Data["Username"] = ctx.Repo.Owner.Name
- ctx.Data["Reponame"] = ctx.Repo.Repository.Name
ctx.Data["AfterCommitID"] = endCommitID
ctx.Data["BeforeCommitID"] = startCommitID
+ ctx.Data["Username"] = ctx.Repo.Owner.Name
+ ctx.Data["Reponame"] = ctx.Repo.Repository.Name
fileOnly := ctx.FormBool("file-only")
@@ -980,7 +1080,7 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
// as the viewed information is designed to be loaded only on latest PR
// diff and if you're signed in.
if !ctx.IsSigned || willShowSpecifiedCommit || willShowSpecifiedCommitRange {
- diff, err = gitdiff.GetDiff(ctx, gitRepo, diffOptions, files...)
+ diff, err = gitdiff.GetDiffFull(ctx, gitRepo, diffOptions, files...)
methodWithError = "GetDiff"
} else {
diff, err = gitdiff.SyncAndGetUserSpecificDiff(ctx, ctx.Doer.ID, pull, gitRepo, diffOptions, files...)
@@ -1318,8 +1418,8 @@ func MergePullRequest(ctx *context.Context) {
} else if models.IsErrMergeConflicts(err) {
conflictError := err.(models.ErrMergeConflicts)
flashError, err := ctx.RenderToHTML(tplAlertDetails, map[string]any{
- "Message": ctx.Tr("repo.editor.merge_conflict"),
- "Summary": ctx.Tr("repo.editor.merge_conflict_summary"),
+ "Message": ctx.Tr("repo.pulls.merge_conflict"),
+ "Summary": ctx.Tr("repo.pulls.merge_conflict_summary"),
"Details": utils.SanitizeFlashErrorString(conflictError.StdErr) + "
" + utils.SanitizeFlashErrorString(conflictError.StdOut),
})
if err != nil {
diff --git a/routers/web/repo/pull_review.go b/routers/web/repo/pull_review.go
index 18a5b872f1..941e428039 100644
--- a/routers/web/repo/pull_review.go
+++ b/routers/web/repo/pull_review.go
@@ -211,9 +211,10 @@ func renderConversation(ctx *context.Context, comment *issues_model.Comment, ori
return
}
ctx.Data["AfterCommitID"] = pullHeadCommitID
- if origin == "diff" {
+ switch origin {
+ case "diff":
ctx.HTML(http.StatusOK, tplDiffConversation)
- } else if origin == "timeline" {
+ case "timeline":
ctx.HTML(http.StatusOK, tplTimelineConversation)
}
}
diff --git a/routers/web/repo/recent_commits.go b/routers/web/repo/recent_commits.go
index 6154de7377..211b1b2b12 100644
--- a/routers/web/repo/recent_commits.go
+++ b/routers/web/repo/recent_commits.go
@@ -4,12 +4,10 @@
package repo
import (
- "errors"
"net/http"
"forgejo.org/modules/base"
"forgejo.org/services/context"
- contributors_service "forgejo.org/services/repository"
)
const (
@@ -26,16 +24,3 @@ func RecentCommits(ctx *context.Context) {
ctx.HTML(http.StatusOK, tplRecentCommits)
}
-
-// RecentCommitsData returns JSON of recent commits data
-func RecentCommitsData(ctx *context.Context) {
- if contributorStats, err := contributors_service.GetContributorStats(ctx, ctx.Cache, ctx.Repo.Repository, ctx.Repo.CommitID); err != nil {
- if errors.Is(err, contributors_service.ErrAwaitGeneration) {
- ctx.Status(http.StatusAccepted)
- return
- }
- ctx.ServerError("RecentCommitsData", err)
- } else {
- ctx.JSON(http.StatusOK, contributorStats["total"].Weeks)
- }
-}
diff --git a/routers/web/repo/release.go b/routers/web/repo/release.go
index 024dd7b62d..a6de337192 100644
--- a/routers/web/repo/release.go
+++ b/routers/web/repo/release.go
@@ -1,5 +1,6 @@
// Copyright 2014 The Gogs Authors. All rights reserved.
// Copyright 2018 The Gitea Authors. All rights reserved.
+// Copyright 2025 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package repo
@@ -249,7 +250,7 @@ func addVerifyTagToContext(ctx *context.Context) {
if verification == nil {
return false
}
- return verification.Reason != "gpg.error.not_signed_commit"
+ return verification.Reason != asymkey.NotSigned
}
}
diff --git a/routers/web/repo/repo.go b/routers/web/repo/repo.go
index 53b3f34347..493787ad8b 100644
--- a/routers/web/repo/repo.go
+++ b/routers/web/repo/repo.go
@@ -693,9 +693,6 @@ func SearchRepo(ctx *context.Context) {
ctx.JSON(http.StatusInternalServerError, nil)
return
}
- if !ctx.Repo.CanRead(unit.TypeActions) {
- git_model.CommitStatusesHideActionsURL(ctx, latestCommitStatuses)
- }
results := make([]*repo_service.WebSearchRepository, len(repos))
for i, repo := range repos {
@@ -782,3 +779,27 @@ func PrepareBranchList(ctx *context.Context) {
}
ctx.Data["Branches"] = brs
}
+
+func SyncFork(ctx *context.Context) {
+ redirectURL := fmt.Sprintf("%s/src/branch/%s", ctx.Repo.RepoLink, util.PathEscapeSegments(ctx.Repo.BranchName))
+ branch := ctx.FormString("branch")
+
+ syncForkInfo, err := repo_service.GetSyncForkInfo(ctx, ctx.Repo.Repository, branch)
+ if err != nil {
+ ctx.ServerError("GetSyncForkInfo", err)
+ return
+ }
+
+ if !syncForkInfo.Allowed {
+ ctx.Redirect(redirectURL)
+ return
+ }
+
+ err = repo_service.SyncFork(ctx, ctx.Doer, ctx.Repo.Repository, branch)
+ if err != nil {
+ ctx.ServerError("SyncFork", err)
+ return
+ }
+
+ ctx.Redirect(redirectURL)
+}
diff --git a/routers/web/repo/setting/avatar.go b/routers/web/repo/setting/avatar.go
index abbb12cacb..84d7cccdb8 100644
--- a/routers/web/repo/setting/avatar.go
+++ b/routers/web/repo/setting/avatar.go
@@ -46,7 +46,7 @@ func UpdateAvatarSetting(ctx *context.Context, form forms.AvatarForm) error {
return fmt.Errorf("io.ReadAll: %w", err)
}
st := typesniffer.DetectContentType(data)
- if !(st.IsImage() && !st.IsSvgImage()) {
+ if !st.IsImage() || st.IsSvgImage() {
return errors.New(ctx.Locale.TrString("settings.uploaded_avatar_not_a_image"))
}
if err = repo_service.UploadAvatar(ctx, ctxRepo, data); err != nil {
diff --git a/routers/web/repo/setting/lfs.go b/routers/web/repo/setting/lfs.go
index 2e9c34e8a7..b9cb86bd08 100644
--- a/routers/web/repo/setting/lfs.go
+++ b/routers/web/repo/setting/lfs.go
@@ -342,6 +342,20 @@ func LFSFileGet(ctx *context.Context) {
ctx.Data["IsVideoFile"] = true
case st.IsAudio():
ctx.Data["IsAudioFile"] = true
+ case st.Is3DModel():
+ ctx.Data["Is3DModelFile"] = true
+ switch {
+ case st.IsGLB():
+ ctx.Data["IsGLBFile"] = true
+ case st.IsSTL():
+ ctx.Data["IsSTLFile"] = true
+ case st.IsGLTF():
+ ctx.Data["IsGLTFFile"] = true
+ case st.IsOBJ():
+ ctx.Data["IsOBJFile"] = true
+ case st.Is3MF():
+ ctx.Data["Is3MFFile"] = true
+ }
case st.IsImage() && (setting.UI.SVG.Enabled || !st.IsSvgImage()):
ctx.Data["IsImageFile"] = true
}
diff --git a/routers/web/repo/setting/setting.go b/routers/web/repo/setting/setting.go
index 083cc4ae82..6f35e19880 100644
--- a/routers/web/repo/setting/setting.go
+++ b/routers/web/repo/setting/setting.go
@@ -14,7 +14,6 @@ import (
"time"
"forgejo.org/models"
- actions_model "forgejo.org/models/actions"
"forgejo.org/models/db"
"forgejo.org/models/organization"
quota_model "forgejo.org/models/quota"
@@ -24,6 +23,7 @@ import (
"forgejo.org/modules/base"
"forgejo.org/modules/git"
"forgejo.org/modules/indexer/code"
+ "forgejo.org/modules/indexer/issues"
"forgejo.org/modules/indexer/stats"
"forgejo.org/modules/lfs"
"forgejo.org/modules/log"
@@ -64,6 +64,9 @@ func SettingsCtxData(ctx *context.Context) {
ctx.Data["DisableNewPushMirrors"] = setting.Mirror.DisableNewPush
ctx.Data["DefaultMirrorInterval"] = setting.Mirror.DefaultInterval
ctx.Data["MinimumMirrorInterval"] = setting.Mirror.MinInterval
+ ctx.Data["MaxAvatarFileSize"] = setting.Avatar.MaxFileSize
+ ctx.Data["MaxAvatarWidth"] = setting.Avatar.MaxWidth
+ ctx.Data["MaxAvatarHeight"] = setting.Avatar.MaxHeight
signing, _ := asymkey_service.SigningKey(ctx, ctx.Repo.Repository.RepoPath())
ctx.Data["SigningKeyAvailable"] = len(signing) > 0
@@ -150,11 +153,9 @@ func UnitsPost(ctx *context.Context) {
})
deleteUnitTypes = append(deleteUnitTypes, unit_model.TypeWiki)
} else if form.EnableWiki && !form.EnableExternalWiki && !unit_model.TypeWiki.UnitGlobalDisabled() {
- var wikiPermissions repo_model.UnitAccessMode
+ wikiPermissions := repo_model.UnitAccessModeUnset
if form.GloballyWriteableWiki {
wikiPermissions = repo_model.UnitAccessModeWrite
- } else {
- wikiPermissions = repo_model.UnitAccessModeRead
}
units = append(units, repo_model.RepoUnit{
RepoID: repo.ID,
@@ -776,6 +777,8 @@ func SettingsPost(ctx *context.Context) {
return
}
code.UpdateRepoIndexer(ctx.Repo.Repository)
+ case "issues":
+ issues.UpdateRepoIndexer(ctx, ctx.Repo.Repository.ID)
default:
ctx.NotFound("", nil)
return
@@ -1034,7 +1037,7 @@ func SettingsPost(ctx *context.Context) {
return
}
- if err := actions_model.CleanRepoScheduleTasks(ctx, repo, true); err != nil {
+ if err := actions_service.CleanRepoScheduleTasks(ctx, repo, true); err != nil {
log.Error("CleanRepoScheduleTasks for archived repo %s/%s: %v", ctx.Repo.Owner.Name, repo.Name, err)
}
diff --git a/routers/web/repo/setting/settings_test.go b/routers/web/repo/setting/settings_test.go
index 6f05953bfb..3a81b85e4c 100644
--- a/routers/web/repo/setting/settings_test.go
+++ b/routers/web/repo/setting/settings_test.go
@@ -15,6 +15,7 @@ import (
"forgejo.org/models/unittest"
user_model "forgejo.org/models/user"
"forgejo.org/modules/setting"
+ "forgejo.org/modules/test"
"forgejo.org/modules/web"
"forgejo.org/services/context"
"forgejo.org/services/contexttest"
@@ -25,23 +26,8 @@ import (
"github.com/stretchr/testify/require"
)
-func createSSHAuthorizedKeysTmpPath(t *testing.T) func() {
- tmpDir := t.TempDir()
-
- oldPath := setting.SSH.RootPath
- setting.SSH.RootPath = tmpDir
-
- return func() {
- setting.SSH.RootPath = oldPath
- }
-}
-
func TestAddReadOnlyDeployKey(t *testing.T) {
- if deferable := createSSHAuthorizedKeysTmpPath(t); deferable != nil {
- defer deferable()
- } else {
- return
- }
+ defer test.MockVariableValue(&setting.SSH.RootPath, t.TempDir())()
unittest.PrepareTestEnv(t)
ctx, _ := contexttest.MockContext(t, "user2/repo1/settings/keys")
@@ -55,7 +41,7 @@ func TestAddReadOnlyDeployKey(t *testing.T) {
}
web.SetForm(ctx, &addKeyForm)
DeployKeysPost(ctx)
- assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Equal(t, http.StatusSeeOther, ctx.Resp.Status())
unittest.AssertExistsAndLoadBean(t, &asymkey_model.DeployKey{
Name: addKeyForm.Title,
@@ -65,11 +51,7 @@ func TestAddReadOnlyDeployKey(t *testing.T) {
}
func TestAddReadWriteOnlyDeployKey(t *testing.T) {
- if deferable := createSSHAuthorizedKeysTmpPath(t); deferable != nil {
- defer deferable()
- } else {
- return
- }
+ defer test.MockVariableValue(&setting.SSH.RootPath, t.TempDir())()
unittest.PrepareTestEnv(t)
@@ -85,7 +67,7 @@ func TestAddReadWriteOnlyDeployKey(t *testing.T) {
}
web.SetForm(ctx, &addKeyForm)
DeployKeysPost(ctx)
- assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Equal(t, http.StatusSeeOther, ctx.Resp.Status())
unittest.AssertExistsAndLoadBean(t, &asymkey_model.DeployKey{
Name: addKeyForm.Title,
@@ -124,7 +106,7 @@ func TestCollaborationPost(t *testing.T) {
CollaborationPost(ctx)
- assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Equal(t, http.StatusSeeOther, ctx.Resp.Status())
exists, err := repo_model.IsCollaborator(ctx, re.ID, 4)
require.NoError(t, err)
@@ -150,7 +132,7 @@ func TestCollaborationPost_InactiveUser(t *testing.T) {
CollaborationPost(ctx)
- assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Equal(t, http.StatusSeeOther, ctx.Resp.Status())
assert.NotEmpty(t, ctx.Flash.ErrorMsg)
}
@@ -184,7 +166,7 @@ func TestCollaborationPost_AddCollaboratorTwice(t *testing.T) {
CollaborationPost(ctx)
- assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Equal(t, http.StatusSeeOther, ctx.Resp.Status())
exists, err := repo_model.IsCollaborator(ctx, re.ID, 4)
require.NoError(t, err)
@@ -193,7 +175,7 @@ func TestCollaborationPost_AddCollaboratorTwice(t *testing.T) {
// Try adding the same collaborator again
CollaborationPost(ctx)
- assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Equal(t, http.StatusSeeOther, ctx.Resp.Status())
assert.NotEmpty(t, ctx.Flash.ErrorMsg)
}
@@ -215,7 +197,7 @@ func TestCollaborationPost_NonExistentUser(t *testing.T) {
CollaborationPost(ctx)
- assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Equal(t, http.StatusSeeOther, ctx.Resp.Status())
assert.NotEmpty(t, ctx.Flash.ErrorMsg)
}
@@ -255,7 +237,7 @@ func TestAddTeamPost(t *testing.T) {
AddTeamPost(ctx)
assert.True(t, repo_service.HasRepository(db.DefaultContext, team, re.ID))
- assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Equal(t, http.StatusSeeOther, ctx.Resp.Status())
assert.Empty(t, ctx.Flash.ErrorMsg)
}
@@ -295,7 +277,7 @@ func TestAddTeamPost_NotAllowed(t *testing.T) {
AddTeamPost(ctx)
assert.False(t, repo_service.HasRepository(db.DefaultContext, team, re.ID))
- assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Equal(t, http.StatusSeeOther, ctx.Resp.Status())
assert.NotEmpty(t, ctx.Flash.ErrorMsg)
}
@@ -336,7 +318,7 @@ func TestAddTeamPost_AddTeamTwice(t *testing.T) {
AddTeamPost(ctx)
assert.True(t, repo_service.HasRepository(db.DefaultContext, team, re.ID))
- assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Equal(t, http.StatusSeeOther, ctx.Resp.Status())
assert.NotEmpty(t, ctx.Flash.ErrorMsg)
}
@@ -369,7 +351,7 @@ func TestAddTeamPost_NonExistentTeam(t *testing.T) {
ctx.Repo = repo
AddTeamPost(ctx)
- assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Equal(t, http.StatusSeeOther, ctx.Resp.Status())
assert.NotEmpty(t, ctx.Flash.ErrorMsg)
}
diff --git a/routers/web/repo/setting/webhook.go b/routers/web/repo/setting/webhook.go
index 6d4d9e47e2..0caa196e25 100644
--- a/routers/web/repo/setting/webhook.go
+++ b/routers/web/repo/setting/webhook.go
@@ -175,6 +175,9 @@ func ParseHookEvent(form forms.WebhookCoreForm) *webhook_module.HookEvent {
Wiki: form.Wiki,
Repository: form.Repository,
Package: form.Package,
+ ActionRunFailure: form.ActionFailure,
+ ActionRunRecover: form.ActionRecover,
+ ActionRunSuccess: form.ActionSuccess,
},
BranchFilter: form.BranchFilter,
}
diff --git a/routers/web/repo/view.go b/routers/web/repo/view.go
index bea002f690..bb3e1388a8 100644
--- a/routers/web/repo/view.go
+++ b/routers/web/repo/view.go
@@ -1,5 +1,6 @@
-// Copyright 2017 The Gitea Authors. All rights reserved.
// Copyright 2014 The Gogs Authors. All rights reserved.
+// Copyright 2017 The Gitea Authors. All rights reserved.
+// Copyright 2023 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package repo
@@ -52,6 +53,7 @@ import (
"forgejo.org/routers/web/feed"
"forgejo.org/services/context"
issue_service "forgejo.org/services/issue"
+ repo_service "forgejo.org/services/repository"
files_service "forgejo.org/services/repository/files"
"github.com/nektos/act/pkg/model"
@@ -367,9 +369,6 @@ func loadLatestCommitData(ctx *context.Context, latestCommit *git.Commit) bool {
if err != nil {
log.Error("GetLatestCommitStatus: %v", err)
}
- if !ctx.Repo.CanRead(unit_model.TypeActions) {
- git_model.CommitStatusesHideActionsURL(ctx, statuses)
- }
ctx.Data["LatestCommitStatus"] = git_model.CalcCommitStatus(statuses)
ctx.Data["LatestCommitStatuses"] = statuses
@@ -440,8 +439,8 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry) {
ctx.Data["FileError"] = ctx.Locale.Tr("actions.runs.invalid_workflow_helper", workFlowErr.Error())
}
} else if slices.Contains([]string{"CODEOWNERS", "docs/CODEOWNERS", ".gitea/CODEOWNERS"}, ctx.Repo.TreePath) {
- if data, err := blob.GetBlobContent(setting.UI.MaxDisplayFileSize); err == nil {
- _, warnings := issue_model.GetCodeOwnersFromContent(ctx, data)
+ if rc, size, err := blob.NewTruncatedReader(setting.UI.MaxDisplayFileSize); err == nil {
+ _, warnings := issue_model.GetCodeOwnersFromReader(ctx, rc, size > setting.UI.MaxDisplayFileSize)
if len(warnings) > 0 {
ctx.Data["FileWarning"] = strings.Join(warnings, "\n")
}
@@ -625,6 +624,20 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry) {
ctx.Data["IsVideoFile"] = true
case fInfo.st.IsAudio():
ctx.Data["IsAudioFile"] = true
+ case fInfo.st.Is3DModel():
+ ctx.Data["Is3DModelFile"] = true
+ switch {
+ case fInfo.st.IsGLB():
+ ctx.Data["IsGLBFile"] = true
+ case fInfo.st.IsSTL():
+ ctx.Data["IsSTLFile"] = true
+ case fInfo.st.IsGLTF():
+ ctx.Data["IsGLTFFile"] = true
+ case fInfo.st.IsOBJ():
+ ctx.Data["IsOBJFile"] = true
+ case fInfo.st.Is3MF():
+ ctx.Data["Is3MFFile"] = true
+ }
case fInfo.st.IsImage() && (setting.UI.SVG.Enabled || !fInfo.st.IsSvgImage()):
ctx.Data["IsImageFile"] = true
ctx.Data["CanCopyContent"] = true
@@ -1154,6 +1167,20 @@ PostRecentBranchCheck:
}
}
+ if ctx.Repo.Repository.IsFork && ctx.Repo.IsViewBranch && len(ctx.Repo.TreePath) == 0 && ctx.Repo.CanWriteToBranch(ctx, ctx.Doer, ctx.Repo.BranchName) {
+ syncForkInfo, err := repo_service.GetSyncForkInfo(ctx, ctx.Repo.Repository, ctx.Repo.BranchName)
+ if err != nil {
+ ctx.ServerError("CanSync", err)
+ return
+ }
+
+ if syncForkInfo.Allowed {
+ ctx.Data["CanSyncFork"] = true
+ ctx.Data["ForkCommitsBehind"] = syncForkInfo.CommitsBehind
+ ctx.Data["BaseBranchLink"] = fmt.Sprintf("%s/src/branch/%s", ctx.Repo.Repository.BaseRepo.HTMLURL(), util.PathEscapeSegments(ctx.Repo.BranchName))
+ }
+ }
+
ctx.Data["Paths"] = paths
branchLink := ctx.Repo.RepoLink + "/src/" + ctx.Repo.BranchNameSubURL()
@@ -1225,6 +1252,7 @@ func RenderUserCards(ctx *context.Context, total int, getter func(opts db.ListOp
func Watchers(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("repo.watchers")
ctx.Data["CardsTitle"] = ctx.Tr("repo.watchers")
+ ctx.Data["CardsNoneMsg"] = ctx.Tr("watch.list.none")
ctx.Data["PageIsWatchers"] = true
RenderUserCards(ctx, ctx.Repo.Repository.NumWatches, func(opts db.ListOptions) ([]*user_model.User, error) {
@@ -1236,6 +1264,7 @@ func Watchers(ctx *context.Context) {
func Stars(ctx *context.Context) {
ctx.Data["Title"] = ctx.Tr("repo.stargazers")
ctx.Data["CardsTitle"] = ctx.Tr("repo.stargazers")
+ ctx.Data["CardsNoneMsg"] = ctx.Tr("stars.list.none")
ctx.Data["PageIsStargazers"] = true
RenderUserCards(ctx, ctx.Repo.Repository.NumStars, func(opts db.ListOptions) ([]*user_model.User, error) {
return repo_model.GetStargazers(ctx, ctx.Repo.Repository, opts)
diff --git a/routers/web/repo/wiki.go b/routers/web/repo/wiki.go
index 9a21ac21a3..1b5265978a 100644
--- a/routers/web/repo/wiki.go
+++ b/routers/web/repo/wiki.go
@@ -393,7 +393,7 @@ func renderRevisionPage(ctx *context.Context) (*git.Repository, *git.TreeEntry)
ctx.ServerError("CommitsByFileAndRange", err)
return nil, nil
}
- ctx.Data["Commits"] = git_model.ConvertFromGitCommit(ctx, commitsHistory, ctx.Repo.Repository)
+ ctx.Data["Commits"] = git_model.ParseCommitsWithStatus(ctx, commitsHistory, ctx.Repo.Repository)
pager := context.NewPagination(int(commitsCount), setting.Git.CommitsRangeSize, page, 5)
pager.SetDefaultParams(ctx)
diff --git a/routers/web/repo/wiki_test.go b/routers/web/repo/wiki_test.go
index cba416fc92..5709b32257 100644
--- a/routers/web/repo/wiki_test.go
+++ b/routers/web/repo/wiki_test.go
@@ -73,7 +73,7 @@ func assertPagesMetas(t *testing.T, expectedNames []string, metas any) {
return
}
for i, pageMeta := range pageMetas {
- assert.EqualValues(t, expectedNames[i], pageMeta.Name)
+ assert.Equal(t, expectedNames[i], pageMeta.Name)
}
}
@@ -84,7 +84,7 @@ func TestWiki(t *testing.T) {
ctx.SetParams("*", "Home")
contexttest.LoadRepo(t, ctx, 1)
Wiki(ctx)
- assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.Equal(t, http.StatusOK, ctx.Resp.Status())
assert.EqualValues(t, "Home", ctx.Data["Title"])
assertPagesMetas(t, []string{"Home", "Long Page", "Page With Image", "Page With Spaced Name", "Unescaped File", "XSS"}, ctx.Data["Pages"])
}
@@ -95,7 +95,7 @@ func TestWikiPages(t *testing.T) {
ctx, _ := contexttest.MockContext(t, "user2/repo1/wiki/?action=_pages")
contexttest.LoadRepo(t, ctx, 1)
WikiPages(ctx)
- assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.Equal(t, http.StatusOK, ctx.Resp.Status())
assertPagesMetas(t, []string{"Home", "Long Page", "Page With Image", "Page With Spaced Name", "Unescaped File", "XSS"}, ctx.Data["Pages"])
}
@@ -106,7 +106,7 @@ func TestNewWiki(t *testing.T) {
contexttest.LoadUser(t, ctx, 2)
contexttest.LoadRepo(t, ctx, 1)
NewWiki(ctx)
- assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.Equal(t, http.StatusOK, ctx.Resp.Status())
assert.EqualValues(t, ctx.Tr("repo.wiki.new_page"), ctx.Data["Title"])
}
@@ -126,7 +126,7 @@ func TestNewWikiPost(t *testing.T) {
Message: message,
})
NewWikiPost(ctx)
- assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Equal(t, http.StatusSeeOther, ctx.Resp.Status())
assertWikiExists(t, ctx.Repo.Repository, wiki_service.UserTitleToWebPath("", title))
assert.Equal(t, content, wikiContent(t, ctx.Repo.Repository, wiki_service.UserTitleToWebPath("", title)))
}
@@ -144,7 +144,7 @@ func TestNewWikiPost_ReservedName(t *testing.T) {
Message: message,
})
NewWikiPost(ctx)
- assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.Equal(t, http.StatusOK, ctx.Resp.Status())
assert.EqualValues(t, ctx.Tr("repo.wiki.reserved_page"), ctx.Flash.ErrorMsg)
assertWikiNotExists(t, ctx.Repo.Repository, "_edit")
}
@@ -157,7 +157,7 @@ func TestEditWiki(t *testing.T) {
contexttest.LoadUser(t, ctx, 2)
contexttest.LoadRepo(t, ctx, 1)
EditWiki(ctx)
- assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.Equal(t, http.StatusOK, ctx.Resp.Status())
assert.EqualValues(t, "Home", ctx.Data["Title"])
assert.Equal(t, wikiContent(t, ctx.Repo.Repository, "Home"), ctx.Data["content"])
}
@@ -178,7 +178,7 @@ func TestEditWikiPost(t *testing.T) {
Message: message,
})
EditWikiPost(ctx)
- assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Equal(t, http.StatusSeeOther, ctx.Resp.Status())
assertWikiExists(t, ctx.Repo.Repository, wiki_service.UserTitleToWebPath("", title))
assert.Equal(t, content, wikiContent(t, ctx.Repo.Repository, wiki_service.UserTitleToWebPath("", title)))
if title != "Home" {
@@ -194,7 +194,7 @@ func TestDeleteWikiPagePost(t *testing.T) {
contexttest.LoadUser(t, ctx, 2)
contexttest.LoadRepo(t, ctx, 1)
DeleteWikiPagePost(ctx)
- assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.Equal(t, http.StatusOK, ctx.Resp.Status())
assertWikiNotExists(t, ctx.Repo.Repository, "Home")
}
@@ -215,10 +215,10 @@ func TestWikiRaw(t *testing.T) {
contexttest.LoadRepo(t, ctx, 1)
WikiRaw(ctx)
if filetype == "" {
- assert.EqualValues(t, http.StatusNotFound, ctx.Resp.Status(), "filepath: %s", filepath)
+ assert.Equal(t, http.StatusNotFound, ctx.Resp.Status(), "filepath: %s", filepath)
} else {
- assert.EqualValues(t, http.StatusOK, ctx.Resp.Status(), "filepath: %s", filepath)
- assert.EqualValues(t, filetype, ctx.Resp.Header().Get("Content-Type"), "filepath: %s", filepath)
+ assert.Equal(t, http.StatusOK, ctx.Resp.Status(), "filepath: %s", filepath)
+ assert.Equal(t, filetype, ctx.Resp.Header().Get("Content-Type"), "filepath: %s", filepath)
}
}
}
diff --git a/routers/web/shared/user/header.go b/routers/web/shared/user/header.go
index 56f0de2033..379e23cce4 100644
--- a/routers/web/shared/user/header.go
+++ b/routers/web/shared/user/header.go
@@ -38,6 +38,7 @@ func prepareContextForCommonProfile(ctx *context.Context) {
func PrepareContextForProfileBigAvatar(ctx *context.Context) {
prepareContextForCommonProfile(ctx)
+ ctx.Data["IsModerationEnabled"] = setting.Moderation.Enabled
ctx.Data["IsBlocked"] = ctx.Doer != nil && user_model.IsBlocked(ctx, ctx.Doer.ID, ctx.ContextUser.ID)
ctx.Data["IsFollowing"] = ctx.Doer != nil && user_model.IsFollowing(ctx, ctx.Doer.ID, ctx.ContextUser.ID)
ctx.Data["ShowUserEmail"] = setting.UI.ShowUserEmail && ctx.ContextUser.Email != "" && ctx.IsSigned && !ctx.ContextUser.KeepEmailPrivate
diff --git a/routers/web/user/home.go b/routers/web/user/home.go
index 9f22cebaba..d980fa393a 100644
--- a/routers/web/user/home.go
+++ b/routers/web/user/home.go
@@ -16,7 +16,6 @@ import (
activities_model "forgejo.org/models/activities"
asymkey_model "forgejo.org/models/asymkey"
"forgejo.org/models/db"
- git_model "forgejo.org/models/git"
issues_model "forgejo.org/models/issues"
"forgejo.org/models/organization"
repo_model "forgejo.org/models/repo"
@@ -611,11 +610,6 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) {
ctx.ServerError("GetIssuesLastCommitStatus", err)
return
}
- if !ctx.Repo.CanRead(unit.TypeActions) {
- for key := range commitStatuses {
- git_model.CommitStatusesHideActionsURL(ctx, commitStatuses[key])
- }
- }
// -------------------------------
// Fill stats to post to ctx.Data.
@@ -655,9 +649,10 @@ func buildIssueOverview(ctx *context.Context, unitType unit.Type) {
return 0
}
reviewTyp := issues_model.ReviewTypeApprove
- if typ == "reject" {
+ switch typ {
+ case "reject":
reviewTyp = issues_model.ReviewTypeReject
- } else if typ == "waiting" {
+ case "waiting":
reviewTyp = issues_model.ReviewTypeRequest
}
for _, count := range counts {
diff --git a/routers/web/user/home_test.go b/routers/web/user/home_test.go
index af9d50538d..f3a2f12ae6 100644
--- a/routers/web/user/home_test.go
+++ b/routers/web/user/home_test.go
@@ -40,15 +40,15 @@ func TestArchivedIssues(t *testing.T) {
NumIssues[repo.ID] = repo.NumIssues
}
assert.False(t, IsArchived[50])
- assert.EqualValues(t, 1, NumIssues[50])
+ assert.Equal(t, 1, NumIssues[50])
assert.True(t, IsArchived[51])
- assert.EqualValues(t, 1, NumIssues[51])
+ assert.Equal(t, 1, NumIssues[51])
// Act
Issues(ctx)
// Assert: One Issue (ID 30) from one Repo (ID 50) is retrieved, while nothing from archived Repo 51 is retrieved
- assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.Equal(t, http.StatusOK, ctx.Resp.Status())
assert.Len(t, ctx.Data["Issues"], 1)
}
@@ -61,7 +61,7 @@ func TestIssues(t *testing.T) {
contexttest.LoadUser(t, ctx, 2)
ctx.Req.Form.Set("state", "closed")
Issues(ctx)
- assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.Equal(t, http.StatusOK, ctx.Resp.Status())
assert.EqualValues(t, true, ctx.Data["IsShowClosed"])
assert.Len(t, ctx.Data["Issues"], 1)
@@ -76,7 +76,7 @@ func TestPulls(t *testing.T) {
ctx.Req.Form.Set("state", "open")
ctx.Req.Form.Set("type", "your_repositories")
Pulls(ctx)
- assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.Equal(t, http.StatusOK, ctx.Resp.Status())
assert.Len(t, ctx.Data["Issues"], 5)
}
@@ -91,15 +91,15 @@ func TestMilestones(t *testing.T) {
ctx.Req.Form.Set("state", "closed")
ctx.Req.Form.Set("sort", "furthestduedate")
Milestones(ctx)
- assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.Equal(t, http.StatusOK, ctx.Resp.Status())
assert.EqualValues(t, map[int64]int64{1: 1}, ctx.Data["Counts"])
assert.EqualValues(t, true, ctx.Data["IsShowClosed"])
assert.EqualValues(t, "furthestduedate", ctx.Data["SortType"])
assert.EqualValues(t, 1, ctx.Data["Total"])
assert.Len(t, ctx.Data["Milestones"], 1)
assert.Len(t, ctx.Data["Repos"], 2) // both repo 42 and 1 have milestones and both are owned by user 2
- assert.EqualValues(t, "user2/glob", ctx.Data["Repos"].(repo_model.RepositoryList)[0].FullName())
- assert.EqualValues(t, "user2/repo1", ctx.Data["Repos"].(repo_model.RepositoryList)[1].FullName())
+ assert.Equal(t, "user2/glob", ctx.Data["Repos"].(repo_model.RepositoryList)[0].FullName())
+ assert.Equal(t, "user2/repo1", ctx.Data["Repos"].(repo_model.RepositoryList)[1].FullName())
}
func TestMilestonesForSpecificRepo(t *testing.T) {
@@ -113,7 +113,7 @@ func TestMilestonesForSpecificRepo(t *testing.T) {
ctx.Req.Form.Set("state", "closed")
ctx.Req.Form.Set("sort", "furthestduedate")
Milestones(ctx)
- assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.Equal(t, http.StatusOK, ctx.Resp.Status())
assert.EqualValues(t, map[int64]int64{1: 1}, ctx.Data["Counts"])
assert.EqualValues(t, true, ctx.Data["IsShowClosed"])
assert.EqualValues(t, "furthestduedate", ctx.Data["SortType"])
@@ -144,7 +144,7 @@ func TestOrgLabels(t *testing.T) {
contexttest.LoadUser(t, ctx, 2)
contexttest.LoadOrganization(t, ctx, 3)
Issues(ctx)
- assert.EqualValues(t, http.StatusOK, ctx.Resp.Status())
+ assert.Equal(t, http.StatusOK, ctx.Resp.Status())
assert.True(t, ctx.Data["PageIsOrgIssues"].(bool))
@@ -163,9 +163,9 @@ func TestOrgLabels(t *testing.T) {
if assert.Len(t, labels, len(orgLabels)) {
for i, label := range labels {
- assert.EqualValues(t, orgLabels[i].OrgID, label.OrgID)
- assert.EqualValues(t, orgLabels[i].ID, label.ID)
- assert.EqualValues(t, orgLabels[i].Name, label.Name)
+ assert.Equal(t, orgLabels[i].OrgID, label.OrgID)
+ assert.Equal(t, orgLabels[i].ID, label.ID)
+ assert.Equal(t, orgLabels[i].Name, label.Name)
}
}
}
diff --git a/routers/web/user/notification.go b/routers/web/user/notification.go
index 296951b2ff..fdca1a2fdd 100644
--- a/routers/web/user/notification.go
+++ b/routers/web/user/notification.go
@@ -13,10 +13,8 @@ import (
activities_model "forgejo.org/models/activities"
"forgejo.org/models/db"
- git_model "forgejo.org/models/git"
issues_model "forgejo.org/models/issues"
repo_model "forgejo.org/models/repo"
- "forgejo.org/models/unit"
"forgejo.org/modules/base"
"forgejo.org/modules/log"
"forgejo.org/modules/optional"
@@ -311,11 +309,6 @@ func NotificationSubscriptions(ctx *context.Context) {
ctx.ServerError("GetIssuesAllCommitStatus", err)
return
}
- if !ctx.Repo.CanRead(unit.TypeActions) {
- for key := range commitStatuses {
- git_model.CommitStatusesHideActionsURL(ctx, commitStatuses[key])
- }
- }
ctx.Data["CommitLastStatus"] = lastStatus
ctx.Data["CommitStatuses"] = commitStatuses
ctx.Data["Issues"] = issues
@@ -340,9 +333,10 @@ func NotificationSubscriptions(ctx *context.Context) {
return 0
}
reviewTyp := issues_model.ReviewTypeApprove
- if typ == "reject" {
+ switch typ {
+ case "reject":
reviewTyp = issues_model.ReviewTypeReject
- } else if typ == "waiting" {
+ case "waiting":
reviewTyp = issues_model.ReviewTypeRequest
}
for _, count := range counts {
diff --git a/routers/web/user/profile.go b/routers/web/user/profile.go
index 5132b1da5c..78dd6c5e7c 100644
--- a/routers/web/user/profile.go
+++ b/routers/web/user/profile.go
@@ -1,5 +1,6 @@
// Copyright 2015 The Gogs Authors. All rights reserved.
// Copyright 2019 The Gitea Authors. All rights reserved.
+// Copyright 2023 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package user
@@ -69,17 +70,6 @@ func userProfile(ctx *context.Context) {
ctx.Data["OpenGraphURL"] = ctx.ContextUser.HTMLURL()
ctx.Data["OpenGraphDescription"] = ctx.ContextUser.Description
- // prepare heatmap data
- if setting.Service.EnableUserHeatmap {
- data, err := activities_model.GetUserHeatmapDataByUser(ctx, ctx.ContextUser, ctx.Doer)
- if err != nil {
- ctx.ServerError("GetUserHeatmapDataByUser", err)
- return
- }
- ctx.Data["HeatmapData"] = data
- ctx.Data["HeatmapTotalContributions"] = activities_model.GetTotalContributionsInHeatmap(data)
- }
-
profileDbRepo, profileGitRepo, profileReadmeBlob, profileClose := shared_user.FindUserProfileReadme(ctx, ctx.Doer)
defer profileClose()
@@ -170,11 +160,32 @@ func prepareUserProfileTabData(ctx *context.Context, showPrivate bool, profileDb
ctx.Data["Cards"] = followers
total = int(numFollowers)
ctx.Data["CardsTitle"] = ctx.TrN(total, "user.followers.title.one", "user.followers.title.few")
+ if ctx.IsSigned && ctx.ContextUser.ID == ctx.Doer.ID {
+ ctx.Data["CardsNoneMsg"] = ctx.Tr("followers.incoming.list.self.none")
+ } else {
+ ctx.Data["CardsNoneMsg"] = ctx.Tr("followers.incoming.list.none")
+ }
case "following":
ctx.Data["Cards"] = following
total = int(numFollowing)
ctx.Data["CardsTitle"] = ctx.TrN(total, "user.following.title.one", "user.following.title.few")
+ if ctx.IsSigned && ctx.ContextUser.ID == ctx.Doer.ID {
+ ctx.Data["CardsNoneMsg"] = ctx.Tr("followers.outgoing.list.self.none")
+ } else {
+ ctx.Data["CardsNoneMsg"] = ctx.Tr("followers.outgoing.list.none", ctx.ContextUser.Name)
+ }
case "activity":
+ // prepare heatmap data
+ if setting.Service.EnableUserHeatmap {
+ data, err := activities_model.GetUserHeatmapDataByUser(ctx, ctx.ContextUser, ctx.Doer)
+ if err != nil {
+ ctx.ServerError("GetUserHeatmapDataByUser", err)
+ return
+ }
+ ctx.Data["HeatmapData"] = data
+ ctx.Data["HeatmapTotalContributions"] = activities_model.GetTotalContributionsInHeatmap(data)
+ }
+
date := ctx.FormString("date")
pagingNum = setting.UI.FeedPagingNum
items, count, err := activities_model.GetFeeds(ctx, activities_model.GetFeedsOptions{
@@ -253,10 +264,12 @@ func prepareUserProfileTabData(ctx *context.Context, showPrivate bool, profileDb
total = int(count)
case "overview":
- if bytes, err := profileReadme.GetBlobContent(setting.UI.MaxDisplayFileSize); err != nil {
- log.Error("failed to GetBlobContent: %v", err)
+ if rc, _, err := profileReadme.NewTruncatedReader(setting.UI.MaxDisplayFileSize); err != nil {
+ log.Error("failed to NewTruncatedReader: %v", err)
} else {
- if profileContent, err := markdown.RenderString(&markup.RenderContext{
+ defer rc.Close()
+
+ if profileContent, err := markdown.RenderReader(&markup.RenderContext{
Ctx: ctx,
GitRepo: profileGitRepo,
Links: markup.Links{
@@ -269,7 +282,7 @@ func prepareUserProfileTabData(ctx *context.Context, showPrivate bool, profileDb
BranchPath: path.Join("branch", util.PathEscapeSegments(profileDbRepo.DefaultBranch)),
},
Metas: map[string]string{"mode": "document"},
- }, bytes); err != nil {
+ }, rc); err != nil {
log.Error("failed to RenderString: %v", err)
} else {
ctx.Data["ProfileReadme"] = profileContent
diff --git a/routers/web/user/setting/account.go b/routers/web/user/setting/account.go
index a0cdb25f44..1dfcc90e35 100644
--- a/routers/web/user/setting/account.go
+++ b/routers/web/user/setting/account.go
@@ -57,7 +57,7 @@ func AccountPost(ctx *context.Context) {
return
}
- if ctx.Doer.IsPasswordSet() && !ctx.Doer.ValidatePassword(form.OldPassword) {
+ if ctx.Doer.IsPasswordSet() && !ctx.Doer.ValidatePassword(ctx, form.OldPassword) {
ctx.Flash.Error(ctx.Tr("settings.password_incorrect"))
} else if form.Password != form.Retype {
ctx.Flash.Error(ctx.Tr("form.password_not_match"))
@@ -178,10 +178,10 @@ func EmailPost(ctx *context.Context) {
// Set Email Notification Preference
if ctx.FormString("_method") == "NOTIFICATION" {
preference := ctx.FormString("preference")
- if !(preference == user_model.EmailNotificationsEnabled ||
- preference == user_model.EmailNotificationsOnMention ||
- preference == user_model.EmailNotificationsDisabled ||
- preference == user_model.EmailNotificationsAndYourOwn) {
+ if preference != user_model.EmailNotificationsEnabled &&
+ preference != user_model.EmailNotificationsOnMention &&
+ preference != user_model.EmailNotificationsDisabled &&
+ preference != user_model.EmailNotificationsAndYourOwn {
log.Error("Email notifications preference change returned unrecognized option %s: %s", preference, ctx.Doer.Name)
ctx.ServerError("SetEmailPreference", errors.New("option unrecognized"))
return
@@ -212,7 +212,7 @@ func EmailPost(ctx *context.Context) {
loadAccountData(ctx)
ctx.RenderWithErr(ctx.Tr("form.email_been_used"), tplSettingsAccount, &form)
- } else if validation.IsErrEmailCharIsNotSupported(err) || validation.IsErrEmailInvalid(err) {
+ } else if validation.IsErrEmailInvalid(err) {
loadAccountData(ctx)
ctx.RenderWithErr(ctx.Tr("form.email_invalid"), tplSettingsAccount, &form)
diff --git a/routers/web/user/setting/account_test.go b/routers/web/user/setting/account_test.go
index 82e00bbf7c..3f7e1c13bc 100644
--- a/routers/web/user/setting/account_test.go
+++ b/routers/web/user/setting/account_test.go
@@ -95,7 +95,7 @@ func TestChangePassword(t *testing.T) {
AccountPost(ctx)
assert.Contains(t, ctx.Flash.ErrorMsg, req.Message)
- assert.EqualValues(t, http.StatusSeeOther, ctx.Resp.Status())
+ assert.Equal(t, http.StatusSeeOther, ctx.Resp.Status())
})
}
}
diff --git a/routers/web/user/setting/adopt.go b/routers/web/user/setting/adopt.go
index f7fd1c3803..59ff31162b 100644
--- a/routers/web/user/setting/adopt.go
+++ b/routers/web/user/setting/adopt.go
@@ -16,12 +16,8 @@ import (
// AdoptOrDeleteRepository adopts or deletes a repository
func AdoptOrDeleteRepository(ctx *context.Context) {
- ctx.Data["Title"] = ctx.Tr("settings.adopt")
- ctx.Data["PageIsSettingsRepos"] = true
allowAdopt := ctx.IsUserSiteAdmin() || setting.Repository.AllowAdoptionOfUnadoptedRepositories
- ctx.Data["allowAdopt"] = allowAdopt
allowDelete := ctx.IsUserSiteAdmin() || setting.Repository.AllowDeleteOfUnadoptedRepositories
- ctx.Data["allowDelete"] = allowDelete
dir := ctx.FormString("id")
action := ctx.FormString("action")
diff --git a/routers/web/user/setting/applications.go b/routers/web/user/setting/applications.go
index 631d5958ea..e73239b79b 100644
--- a/routers/web/user/setting/applications.go
+++ b/routers/web/user/setting/applications.go
@@ -49,6 +49,9 @@ func ApplicationsPost(ctx *context.Context) {
ctx.ServerError("GetScope", err)
return
}
+ if !scope.HasPermissionScope() {
+ ctx.Flash.Error(ctx.Tr("settings.at_least_one_permission"), true)
+ }
t := &auth_model.AccessToken{
UID: ctx.Doer.ID,
Name: form.Name,
diff --git a/routers/web/user/setting/keys.go b/routers/web/user/setting/keys.go
index 94d32b730f..935efd7ba7 100644
--- a/routers/web/user/setting/keys.go
+++ b/routers/web/user/setting/keys.go
@@ -5,7 +5,7 @@
package setting
import (
- "fmt"
+ "errors"
"net/http"
asymkey_model "forgejo.org/models/asymkey"
@@ -80,7 +80,7 @@ func KeysPost(ctx *context.Context) {
ctx.Redirect(setting.AppSubURL + "/user/settings/keys")
case "gpg":
if user_model.IsFeatureDisabledWithLoginType(ctx.Doer, setting.UserFeatureManageGPGKeys) {
- ctx.NotFound("Not Found", fmt.Errorf("gpg keys setting is not allowed to be visited"))
+ ctx.NotFound("Not Found", errors.New("gpg keys setting is not allowed to be visited"))
return
}
@@ -161,7 +161,7 @@ func KeysPost(ctx *context.Context) {
ctx.Redirect(setting.AppSubURL + "/user/settings/keys")
case "ssh":
if user_model.IsFeatureDisabledWithLoginType(ctx.Doer, setting.UserFeatureManageSSHKeys) {
- ctx.NotFound("Not Found", fmt.Errorf("ssh keys setting is not allowed to be visited"))
+ ctx.NotFound("Not Found", errors.New("ssh keys setting is not allowed to be visited"))
return
}
@@ -205,7 +205,7 @@ func KeysPost(ctx *context.Context) {
ctx.Redirect(setting.AppSubURL + "/user/settings/keys")
case "verify_ssh":
if user_model.IsFeatureDisabledWithLoginType(ctx.Doer, setting.UserFeatureManageSSHKeys) {
- ctx.NotFound("Not Found", fmt.Errorf("ssh keys setting is not allowed to be visited"))
+ ctx.NotFound("Not Found", errors.New("ssh keys setting is not allowed to be visited"))
return
}
@@ -242,7 +242,7 @@ func DeleteKey(ctx *context.Context) {
switch ctx.FormString("type") {
case "gpg":
if user_model.IsFeatureDisabledWithLoginType(ctx.Doer, setting.UserFeatureManageGPGKeys) {
- ctx.NotFound("Not Found", fmt.Errorf("gpg keys setting is not allowed to be visited"))
+ ctx.NotFound("Not Found", errors.New("gpg keys setting is not allowed to be visited"))
return
}
if err := asymkey_model.DeleteGPGKey(ctx, ctx.Doer, ctx.FormInt64("id")); err != nil {
@@ -252,7 +252,7 @@ func DeleteKey(ctx *context.Context) {
}
case "ssh":
if user_model.IsFeatureDisabledWithLoginType(ctx.Doer, setting.UserFeatureManageSSHKeys) {
- ctx.NotFound("Not Found", fmt.Errorf("ssh keys setting is not allowed to be visited"))
+ ctx.NotFound("Not Found", errors.New("ssh keys setting is not allowed to be visited"))
return
}
diff --git a/routers/web/user/setting/profile.go b/routers/web/user/setting/profile.go
index 173550ad19..400ee71f08 100644
--- a/routers/web/user/setting/profile.go
+++ b/routers/web/user/setting/profile.go
@@ -51,6 +51,9 @@ func Profile(ctx *context.Context) {
ctx.Data["DisableGravatar"] = setting.Config().Picture.DisableGravatar.Value(ctx)
ctx.Data["CooldownPeriod"] = setting.Service.UsernameCooldownPeriod
ctx.Data["CommonPronouns"] = commonPronouns
+ ctx.Data["MaxAvatarFileSize"] = setting.Avatar.MaxFileSize
+ ctx.Data["MaxAvatarWidth"] = setting.Avatar.MaxWidth
+ ctx.Data["MaxAvatarHeight"] = setting.Avatar.MaxHeight
ctx.HTML(http.StatusOK, tplSettingsProfile)
}
@@ -63,6 +66,9 @@ func ProfilePost(ctx *context.Context) {
ctx.Data["DisableGravatar"] = setting.Config().Picture.DisableGravatar.Value(ctx)
ctx.Data["CooldownPeriod"] = setting.Service.UsernameCooldownPeriod
ctx.Data["CommonPronouns"] = commonPronouns
+ ctx.Data["MaxAvatarFileSize"] = setting.Avatar.MaxFileSize
+ ctx.Data["MaxAvatarWidth"] = setting.Avatar.MaxWidth
+ ctx.Data["MaxAvatarHeight"] = setting.Avatar.MaxHeight
if ctx.HasError() {
ctx.HTML(http.StatusOK, tplSettingsProfile)
@@ -146,7 +152,7 @@ func UpdateAvatarSetting(ctx *context.Context, form *forms.AvatarForm, ctxUser *
}
st := typesniffer.DetectContentType(data)
- if !(st.IsImage() && !st.IsSvgImage()) {
+ if !st.IsImage() || st.IsSvgImage() {
return errors.New(ctx.Locale.TrString("settings.uploaded_avatar_not_a_image"))
}
if err = user_service.UploadAvatar(ctx, ctxUser, data); err != nil {
diff --git a/routers/web/user/setting/security/2fa.go b/routers/web/user/setting/security/2fa.go
index f1271c8370..8b362c4f08 100644
--- a/routers/web/user/setting/security/2fa.go
+++ b/routers/web/user/setting/security/2fa.go
@@ -40,11 +40,7 @@ func RegenerateScratchTwoFactor(ctx *context.Context) {
return
}
- token, err := t.GenerateScratchToken()
- if err != nil {
- ctx.ServerError("SettingsTwoFactor: Failed to GenerateScratchToken", err)
- return
- }
+ token := t.GenerateScratchToken()
if err = auth.UpdateTwoFactor(ctx, t); err != nil {
ctx.ServerError("SettingsTwoFactor: Failed to UpdateTwoFactor", err)
@@ -220,11 +216,7 @@ func EnrollTwoFactorPost(ctx *context.Context) {
t = &auth.TwoFactor{
UID: ctx.Doer.ID,
}
- token, err := t.GenerateScratchToken()
- if err != nil {
- ctx.ServerError("SettingsTwoFactor: Failed to generate scratch token", err)
- return
- }
+ token := t.GenerateScratchToken()
// Now we have to delete the secrets - because if we fail to insert then it's highly likely that they have already been used
// If we can detect the unique constraint failure below we can move this to after the NewTwoFactor
diff --git a/routers/web/web.go b/routers/web/web.go
index 303167a6b9..4b39f22f7d 100644
--- a/routers/web/web.go
+++ b/routers/web/web.go
@@ -1,4 +1,5 @@
// Copyright 2017 The Gitea Authors. All rights reserved.
+// Copyright 2023 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package web
@@ -31,6 +32,7 @@ import (
"forgejo.org/routers/web/feed"
"forgejo.org/routers/web/healthcheck"
"forgejo.org/routers/web/misc"
+ "forgejo.org/routers/web/moderation"
"forgejo.org/routers/web/org"
org_setting "forgejo.org/routers/web/org/setting"
"forgejo.org/routers/web/repo"
@@ -473,6 +475,11 @@ func registerRoutes(m *web.Route) {
m.Get("/search", repo.SearchIssues)
}, reqSignIn)
+ if setting.Moderation.Enabled {
+ m.Get("/report_abuse", reqSignIn, moderation.NewReport)
+ m.Post("/report_abuse", reqSignIn, web.Bind(forms.ReportAbuseForm{}), moderation.CreatePost)
+ }
+
m.Get("/pulls", reqSignIn, user.Pulls)
m.Get("/milestones", reqSignIn, reqMilestonesDashboardPageEnabled, user.Milestones)
@@ -811,13 +818,13 @@ func registerRoutes(m *web.Route) {
individualPermsChecker := func(ctx *context.Context) {
// org permissions have been checked in context.OrgAssignment(), but individual permissions haven't been checked.
if ctx.ContextUser.IsIndividual() {
- switch {
- case ctx.ContextUser.Visibility == structs.VisibleTypePrivate:
+ switch ctx.ContextUser.Visibility {
+ case structs.VisibleTypePrivate:
if ctx.Doer == nil || (ctx.ContextUser.ID != ctx.Doer.ID && !ctx.Doer.IsAdmin) {
ctx.NotFound("Visit Project", nil)
return
}
- case ctx.ContextUser.Visibility == structs.VisibleTypeLimited:
+ case structs.VisibleTypeLimited:
if ctx.Doer == nil {
ctx.NotFound("Visit Project", nil)
return
@@ -1392,7 +1399,7 @@ func registerRoutes(m *web.Route) {
m.Get("", actions.List)
m.Post("/disable", reqRepoAdmin, actions.DisableWorkflowFile)
m.Post("/enable", reqRepoAdmin, actions.EnableWorkflowFile)
- m.Post("/manual", reqRepoAdmin, actions.ManualRunWorkflow)
+ m.Post("/manual", reqRepoActionsWriter, actions.ManualRunWorkflow)
m.Group("/runs", func() {
m.Get("/latest", actions.ViewLatest)
@@ -1454,7 +1461,7 @@ func registerRoutes(m *web.Route) {
}, repo.MustBeNotEmpty, context.RequireRepoReaderOr(unit.TypeCode))
m.Group("/recent-commits", func() {
m.Get("", repo.RecentCommits)
- m.Get("/data", repo.RecentCommitsData)
+ m.Get("/data", repo.CodeFrequencyData)
}, repo.MustBeNotEmpty, context.RequireRepoReaderOr(unit.TypeCode))
}, context.RepoRef(), context.RequireRepoReaderOr(unit.TypeCode, unit.TypePullRequests, unit.TypeIssues, unit.TypeReleases))
@@ -1503,7 +1510,10 @@ func registerRoutes(m *web.Route) {
m.Group("/commits", func() {
m.Get("", context.RepoRef(), repo.SetWhitespaceBehavior, repo.GetPullDiffStats, repo.ViewPullCommits)
m.Get("/list", context.RepoRef(), repo.GetPullCommits)
- m.Get("/{sha:[a-f0-9]{4,40}}", context.RepoRef(), repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForSingleCommit)
+ m.Group("/{sha:[a-f0-9]{4,40}}", func() {
+ m.Get("", context.RepoRef(), repo.SetEditorconfigIfExists, repo.SetDiffViewStyle, repo.SetWhitespaceBehavior, repo.SetShowOutdatedComments, repo.ViewPullFilesForSingleCommit)
+ m.Post("/reviews/submit", context.RepoMustNotBeArchived(), web.Bind(forms.SubmitReviewForm{}), repo.SubmitReview)
+ })
})
m.Post("/merge", context.RepoMustNotBeArchived(), web.Bind(forms.MergePullRequestForm{}), context.EnforceQuotaWeb(quota_model.LimitSubjectSizeGitAll, context.QuotaTargetRepo), repo.MergePullRequest)
m.Post("/cancel_auto_merge", context.RepoMustNotBeArchived(), repo.CancelAutoMergePullRequest)
@@ -1591,6 +1601,8 @@ func registerRoutes(m *web.Route) {
}, context.RepoRef(), reqRepoCodeReader)
}
m.Get("/commit/{sha:([a-f0-9]{4,64})}.{ext:patch|diff}", repo.MustBeNotEmpty, reqRepoCodeReader, repo.RawDiff)
+
+ m.Post("/sync_fork", context.RepoMustNotBeArchived(), repo.MustBeNotEmpty, reqRepoCodeWriter, repo.SyncFork)
}, ignSignIn, context.RepoAssignment, context.UnitTypes())
m.Post("/{username}/{reponame}/lastcommit/*", ignSignInAndCsrf, context.RepoAssignment, context.UnitTypes(), context.RepoRefByType(context.RepoRefCommit), reqRepoCodeReader, repo.LastCommit)
@@ -1661,6 +1673,7 @@ func registerRoutes(m *web.Route) {
m.Any("/devtest", devtest.List)
m.Any("/devtest/fetch-action-test", devtest.FetchActionTest)
m.Any("/devtest/{sub}", devtest.Tmpl)
+ m.Get("/devtest/error/{errcode}", devtest.ErrorPage)
}
m.NotFound(func(w http.ResponseWriter, req *http.Request) {
diff --git a/services/actions/auth.go b/services/actions/auth.go
index 4dc86a35f3..98b618aeba 100644
--- a/services/actions/auth.go
+++ b/services/actions/auth.go
@@ -4,6 +4,7 @@
package actions
import (
+ "errors"
"fmt"
"net/http"
"strings"
@@ -80,7 +81,7 @@ func ParseAuthorizationToken(req *http.Request) (int64, error) {
parts := strings.SplitN(h, " ", 2)
if len(parts) != 2 {
log.Error("split token failed: %s", h)
- return 0, fmt.Errorf("split token failed")
+ return 0, errors.New("split token failed")
}
return TokenToTaskID(parts[1])
@@ -100,7 +101,7 @@ func TokenToTaskID(token string) (int64, error) {
c, ok := parsedToken.Claims.(*actionsClaims)
if !parsedToken.Valid || !ok {
- return 0, fmt.Errorf("invalid token claim")
+ return 0, errors.New("invalid token claim")
}
return c.TaskID, nil
diff --git a/services/actions/auth_test.go b/services/actions/auth_test.go
index 93a5980bc5..d9f0437e1b 100644
--- a/services/actions/auth_test.go
+++ b/services/actions/auth_test.go
@@ -19,7 +19,7 @@ func TestCreateAuthorizationToken(t *testing.T) {
var taskID int64 = 23
token, err := CreateAuthorizationToken(taskID, 1, 2)
require.NoError(t, err)
- assert.NotEqual(t, "", token)
+ assert.NotEmpty(t, token)
claims := jwt.MapClaims{}
_, err = jwt.ParseWithClaims(token, claims, func(t *jwt.Token) (any, error) {
return setting.GetGeneralTokenSigningSecret(), nil
@@ -45,7 +45,7 @@ func TestParseAuthorizationToken(t *testing.T) {
var taskID int64 = 23
token, err := CreateAuthorizationToken(taskID, 1, 2)
require.NoError(t, err)
- assert.NotEqual(t, "", token)
+ assert.NotEmpty(t, token)
headers := http.Header{}
headers.Set("Authorization", "Bearer "+token)
rTaskID, err := ParseAuthorizationToken(&http.Request{
diff --git a/services/actions/cleanup.go b/services/actions/cleanup.go
index fde5286e60..918be0f185 100644
--- a/services/actions/cleanup.go
+++ b/services/actions/cleanup.go
@@ -126,3 +126,9 @@ func CleanupLogs(ctx context.Context) error {
log.Info("Removed %d logs", count)
return nil
}
+
+// CleanupOfflineRunners removes offline runners
+func CleanupOfflineRunners(ctx context.Context, duration time.Duration, globalOnly bool) error {
+ olderThan := timeutil.TimeStampNow().AddDuration(-duration)
+ return actions_model.DeleteOfflineRunners(ctx, olderThan, globalOnly)
+}
diff --git a/services/actions/cleanup_test.go b/services/actions/cleanup_test.go
index 67f68d4de9..4a847ced23 100644
--- a/services/actions/cleanup_test.go
+++ b/services/actions/cleanup_test.go
@@ -24,7 +24,7 @@ func TestCleanup(t *testing.T) {
require.NoError(t, CleanupLogs(db.DefaultContext))
task := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionTask{ID: 1001})
- assert.EqualValues(t, "does-not-exist", task.LogFilename)
+ assert.Equal(t, "does-not-exist", task.LogFilename)
assert.True(t, task.LogExpired)
assert.Nil(t, task.LogIndexes)
})
diff --git a/services/actions/clear_tasks.go b/services/actions/clear_tasks.go
index 31e15ec927..c36dda55b2 100644
--- a/services/actions/clear_tasks.go
+++ b/services/actions/clear_tasks.go
@@ -41,7 +41,7 @@ func stopTasks(ctx context.Context, opts actions_model.FindTaskOptions) error {
jobs := make([]*actions_model.ActionRunJob, 0, len(tasks))
for _, task := range tasks {
if err := db.WithTx(ctx, func(ctx context.Context) error {
- if err := actions_model.StopTask(ctx, task.ID, actions_model.StatusFailure); err != nil {
+ if err := StopTask(ctx, task.ID, actions_model.StatusFailure); err != nil {
return err
}
if err := task.LoadJob(ctx); err != nil {
@@ -88,7 +88,7 @@ func CancelAbandonedJobs(ctx context.Context) error {
job.Status = actions_model.StatusCancelled
job.Stopped = now
if err := db.WithTx(ctx, func(ctx context.Context) error {
- _, err := actions_model.UpdateRunJob(ctx, job, nil, "status", "stopped")
+ _, err := UpdateRunJob(ctx, job, nil, "status", "stopped")
return err
}); err != nil {
log.Warn("cancel abandoned job %v: %v", job.ID, err)
diff --git a/services/actions/commit_status.go b/services/actions/commit_status.go
index 1fffa6852f..755fa648dc 100644
--- a/services/actions/commit_status.go
+++ b/services/actions/commit_status.go
@@ -5,6 +5,7 @@ package actions
import (
"context"
+ "errors"
"fmt"
"path"
@@ -50,7 +51,7 @@ func createCommitStatus(ctx context.Context, job *actions_model.ActionRunJob) er
return fmt.Errorf("GetPushEventPayload: %w", err)
}
if payload.HeadCommit == nil {
- return fmt.Errorf("head commit is missing in event payload")
+ return errors.New("head commit is missing in event payload")
}
sha = payload.HeadCommit.ID
case webhook_module.HookEventPullRequest, webhook_module.HookEventPullRequestSync, webhook_module.HookEventPullRequestLabel, webhook_module.HookEventPullRequestAssign, webhook_module.HookEventPullRequestMilestone:
@@ -64,9 +65,9 @@ func createCommitStatus(ctx context.Context, job *actions_model.ActionRunJob) er
return fmt.Errorf("GetPullRequestEventPayload: %w", err)
}
if payload.PullRequest == nil {
- return fmt.Errorf("pull request is missing in event payload")
+ return errors.New("pull request is missing in event payload")
} else if payload.PullRequest.Head == nil {
- return fmt.Errorf("head of pull request is missing in event payload")
+ return errors.New("head of pull request is missing in event payload")
}
sha = payload.PullRequest.Head.Sha
case webhook_module.HookEventRelease:
diff --git a/services/actions/job_emitter.go b/services/actions/job_emitter.go
index d4ca029d46..942c698e73 100644
--- a/services/actions/job_emitter.go
+++ b/services/actions/job_emitter.go
@@ -59,7 +59,7 @@ func checkJobsOfRun(ctx context.Context, runID int64) error {
for _, job := range jobs {
if status, ok := updates[job.ID]; ok {
job.Status = status
- if n, err := actions_model.UpdateRunJob(ctx, job, builder.Eq{"status": actions_model.StatusBlocked}, "status"); err != nil {
+ if n, err := UpdateRunJob(ctx, job, builder.Eq{"status": actions_model.StatusBlocked}, "status"); err != nil {
return err
} else if n != 1 {
return fmt.Errorf("no affected for updating blocked job %v", job.ID)
diff --git a/services/actions/notifier.go b/services/actions/notifier.go
index 2d3a1d2107..f1e9a6d7e9 100644
--- a/services/actions/notifier.go
+++ b/services/actions/notifier.go
@@ -5,7 +5,9 @@ package actions
import (
"context"
+ "errors"
+ actions_model "forgejo.org/models/actions"
issues_model "forgejo.org/models/issues"
packages_model "forgejo.org/models/packages"
perm_model "forgejo.org/models/perm"
@@ -17,9 +19,12 @@ import (
"forgejo.org/modules/repository"
"forgejo.org/modules/setting"
api "forgejo.org/modules/structs"
+ "forgejo.org/modules/util"
webhook_module "forgejo.org/modules/webhook"
"forgejo.org/services/convert"
notify_service "forgejo.org/services/notify"
+
+ "xorm.io/builder"
)
type actionsNotifier struct {
@@ -775,3 +780,76 @@ func (n *actionsNotifier) MigrateRepository(ctx context.Context, doer, u *user_m
Sender: convert.ToUser(ctx, doer, nil),
}).Notify(ctx)
}
+
+// Call this sendActionRunNowDoneNotificationIfNeeded when there has been an update for an ActionRun.
+// priorRun and updatedRun represent the very same ActionRun, just at different times:
+// priorRun before the update and updatedRun after.
+// The parameter lastRun in the ActionRunNowDone notification represents an entirely different ActionRun:
+// the ActionRun of the same workflow that finished before priorRun/updatedRun.
+func sendActionRunNowDoneNotificationIfNeeded(ctx context.Context, priorRun, updatedRun *actions_model.ActionRun) error {
+ if !priorRun.Status.IsDone() && updatedRun.Status.IsDone() {
+ lastRun, err := actions_model.GetRunBefore(ctx, updatedRun.RepoID, updatedRun.Stopped)
+ if err != nil && !errors.Is(err, util.ErrNotExist) {
+ return err
+ }
+ // when no last run was found lastRun is nil
+ if lastRun != nil {
+ if err = lastRun.LoadAttributes(ctx); err != nil {
+ return err
+ }
+ }
+ if err = updatedRun.LoadAttributes(ctx); err != nil {
+ return err
+ }
+ notify_service.ActionRunNowDone(ctx, updatedRun, priorRun.Status, lastRun)
+ }
+ return nil
+}
+
+// wrapper of UpdateRunWithoutNotification with a call to the ActionRunNowDone notification channel
+func UpdateRun(ctx context.Context, run *actions_model.ActionRun, cols ...string) error {
+ // run.ID is the only thing that must be given
+ priorRun, err := actions_model.GetRunByID(ctx, run.ID)
+ if err != nil {
+ return err
+ }
+
+ if err = actions_model.UpdateRunWithoutNotification(ctx, run, cols...); err != nil {
+ return err
+ }
+
+ updatedRun, err := actions_model.GetRunByID(ctx, run.ID)
+ if err != nil {
+ return err
+ }
+ return sendActionRunNowDoneNotificationIfNeeded(ctx, priorRun, updatedRun)
+}
+
+// wrapper of UpdateRunJobWithoutNotification with a call to the ActionRunNowDone notification channel
+func UpdateRunJob(ctx context.Context, job *actions_model.ActionRunJob, cond builder.Cond, cols ...string) (int64, error) {
+ runID := job.RunID
+ if runID == 0 {
+ // job.ID is the only thing that must be given
+ // Don't overwrite job here, we'd loose the change we need to make.
+ oldJob, err := actions_model.GetRunJobByID(ctx, job.ID)
+ if err != nil {
+ return 0, err
+ }
+ runID = oldJob.RunID
+ }
+ priorRun, err := actions_model.GetRunByID(ctx, runID)
+ if err != nil {
+ return 0, err
+ }
+
+ affected, err := actions_model.UpdateRunJobWithoutNotification(ctx, job, cond, cols...)
+ if err != nil {
+ return affected, err
+ }
+
+ updatedRun, err := actions_model.GetRunByID(ctx, runID)
+ if err != nil {
+ return affected, err
+ }
+ return affected, sendActionRunNowDoneNotificationIfNeeded(ctx, priorRun, updatedRun)
+}
diff --git a/services/actions/notifier_helper.go b/services/actions/notifier_helper.go
index 9de0b75ac7..e240c996b5 100644
--- a/services/actions/notifier_helper.go
+++ b/services/actions/notifier_helper.go
@@ -139,7 +139,7 @@ func notify(ctx context.Context, input *notifyInput) error {
return nil
}
if unit_model.TypeActions.UnitGlobalDisabled() {
- if err := actions_model.CleanRepoScheduleTasks(ctx, input.Repo, true); err != nil {
+ if err := CleanRepoScheduleTasks(ctx, input.Repo, true); err != nil {
log.Error("CleanRepoScheduleTasks: %v", err)
}
return nil
@@ -345,6 +345,14 @@ func handleWorkflows(
Status: actions_model.StatusWaiting,
}
+ if workflow, err := model.ReadWorkflow(bytes.NewReader(dwf.Content)); err == nil {
+ notifications, err := workflow.Notifications()
+ if err != nil {
+ log.Error("Notifications: %w", err)
+ }
+ run.NotifyEmail = notifications
+ }
+
need, err := ifNeedApproval(ctx, run, input.Repo, input.Doer)
if err != nil {
log.Error("check if need approval for repo %d with user %d: %v", input.Repo.ID, input.Doer.ID, err)
@@ -366,14 +374,17 @@ func handleWorkflows(
jobs, err := jobparser.Parse(dwf.Content, jobparser.WithVars(vars))
if err != nil {
- log.Error("jobparser.Parse: %v", err)
- continue
+ run.Status = actions_model.StatusFailure
+ log.Info("jobparser.Parse: invalid workflow, setting job status to failed: %v", err)
+ jobs = []*jobparser.SingleWorkflow{{
+ Name: dwf.EntryName,
+ }}
}
// cancel running jobs if the event is push or pull_request_sync
if run.Event == webhook_module.HookEventPush ||
run.Event == webhook_module.HookEventPullRequestSync {
- if err := actions_model.CancelPreviousJobs(
+ if err := CancelPreviousJobs(
ctx,
run.RepoID,
run.Ref,
@@ -504,7 +515,7 @@ func handleSchedules(
log.Error("CountSchedules: %v", err)
return err
} else if count > 0 {
- if err := actions_model.CleanRepoScheduleTasks(ctx, input.Repo, false); err != nil {
+ if err := CleanRepoScheduleTasks(ctx, input.Repo, false); err != nil {
log.Error("CleanRepoScheduleTasks: %v", err)
}
}
diff --git a/services/actions/schedule_tasks.go b/services/actions/schedule_tasks.go
index f66a6ca092..cf8b29ead7 100644
--- a/services/actions/schedule_tasks.go
+++ b/services/actions/schedule_tasks.go
@@ -4,7 +4,9 @@
package actions
import (
+ "bytes"
"context"
+ "errors"
"fmt"
"time"
@@ -17,6 +19,8 @@ import (
webhook_module "forgejo.org/modules/webhook"
"github.com/nektos/act/pkg/jobparser"
+ act_model "github.com/nektos/act/pkg/model"
+ "xorm.io/builder"
)
// StartScheduleTasks start the task
@@ -55,7 +59,7 @@ func startTasks(ctx context.Context) error {
// cancel running jobs if the event is push
if row.Schedule.Event == webhook_module.HookEventPush {
// cancel running jobs of the same workflow
- if err := actions_model.CancelPreviousJobs(
+ if err := CancelPreviousJobs(
ctx,
row.RepoID,
row.Schedule.Ref,
@@ -138,6 +142,16 @@ func CreateScheduleTask(ctx context.Context, cron *actions_model.ActionSchedule)
return err
}
+ workflow, err := act_model.ReadWorkflow(bytes.NewReader(cron.Content))
+ if err != nil {
+ return err
+ }
+ notifications, err := workflow.Notifications()
+ if err != nil {
+ return err
+ }
+ run.NotifyEmail = notifications
+
// Parse the workflow specification from the cron schedule
workflows, err := jobparser.Parse(cron.Content, jobparser.WithVars(vars))
if err != nil {
@@ -152,3 +166,93 @@ func CreateScheduleTask(ctx context.Context, cron *actions_model.ActionSchedule)
// Return nil if no errors occurred
return nil
}
+
+// CancelPreviousJobs cancels all previous jobs of the same repository, reference, workflow, and event.
+// It's useful when a new run is triggered, and all previous runs needn't be continued anymore.
+func CancelPreviousJobs(ctx context.Context, repoID int64, ref, workflowID string, event webhook_module.HookEventType) error {
+ // Find all runs in the specified repository, reference, and workflow with non-final status
+ runs, total, err := db.FindAndCount[actions_model.ActionRun](ctx, actions_model.FindRunOptions{
+ RepoID: repoID,
+ Ref: ref,
+ WorkflowID: workflowID,
+ TriggerEvent: event,
+ Status: []actions_model.Status{actions_model.StatusRunning, actions_model.StatusWaiting, actions_model.StatusBlocked},
+ })
+ if err != nil {
+ return err
+ }
+
+ // If there are no runs found, there's no need to proceed with cancellation, so return nil.
+ if total == 0 {
+ return nil
+ }
+
+ // Iterate over each found run and cancel its associated jobs.
+ for _, run := range runs {
+ // Find all jobs associated with the current run.
+ jobs, err := db.Find[actions_model.ActionRunJob](ctx, actions_model.FindRunJobOptions{
+ RunID: run.ID,
+ })
+ if err != nil {
+ return err
+ }
+
+ // Iterate over each job and attempt to cancel it.
+ for _, job := range jobs {
+ // Skip jobs that are already in a terminal state (completed, cancelled, etc.).
+ status := job.Status
+ if status.IsDone() {
+ continue
+ }
+
+ // If the job has no associated task (probably an error), set its status to 'Cancelled' and stop it.
+ if job.TaskID == 0 {
+ job.Status = actions_model.StatusCancelled
+ job.Stopped = timeutil.TimeStampNow()
+
+ // Update the job's status and stopped time in the database.
+ n, err := UpdateRunJob(ctx, job, builder.Eq{"task_id": 0}, "status", "stopped")
+ if err != nil {
+ return err
+ }
+
+ // If the update affected 0 rows, it means the job has changed in the meantime, so we need to try again.
+ if n == 0 {
+ return errors.New("job has changed, try again")
+ }
+
+ // Continue with the next job.
+ continue
+ }
+
+ // If the job has an associated task, try to stop the task, effectively cancelling the job.
+ if err := StopTask(ctx, job.TaskID, actions_model.StatusCancelled); err != nil {
+ return err
+ }
+ }
+ }
+
+ // Return nil to indicate successful cancellation of all running and waiting jobs.
+ return nil
+}
+
+func CleanRepoScheduleTasks(ctx context.Context, repo *repo_model.Repository, cancelPreviousJobs bool) error {
+ // If actions disabled when there is schedule task, this will remove the outdated schedule tasks
+ // There is no other place we can do this because the app.ini will be changed manually
+ if err := actions_model.DeleteScheduleTaskByRepo(ctx, repo.ID); err != nil {
+ return fmt.Errorf("DeleteCronTaskByRepo: %v", err)
+ }
+ if cancelPreviousJobs {
+ // cancel running cron jobs of this repository and delete old schedules
+ if err := CancelPreviousJobs(
+ ctx,
+ repo.ID,
+ repo.DefaultBranch,
+ "",
+ webhook_module.HookEventSchedule,
+ ); err != nil {
+ return fmt.Errorf("CancelPreviousJobs: %v", err)
+ }
+ }
+ return nil
+}
diff --git a/services/actions/schedule_tasks_test.go b/services/actions/schedule_tasks_test.go
new file mode 100644
index 0000000000..7073985252
--- /dev/null
+++ b/services/actions/schedule_tasks_test.go
@@ -0,0 +1,121 @@
+// Copyright 2025 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package actions
+
+import (
+ "testing"
+
+ actions_model "forgejo.org/models/actions"
+ repo_model "forgejo.org/models/repo"
+ "forgejo.org/models/unittest"
+ webhook_module "forgejo.org/modules/webhook"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestCreateScheduleTask(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2, OwnerID: 2})
+
+ assertConstant := func(t *testing.T, cron *actions_model.ActionSchedule, run *actions_model.ActionRun) {
+ t.Helper()
+ assert.Equal(t, cron.Title, run.Title)
+ assert.Equal(t, cron.RepoID, run.RepoID)
+ assert.Equal(t, cron.OwnerID, run.OwnerID)
+ assert.Equal(t, cron.WorkflowID, run.WorkflowID)
+ assert.Equal(t, cron.TriggerUserID, run.TriggerUserID)
+ assert.Equal(t, cron.Ref, run.Ref)
+ assert.Equal(t, cron.CommitSHA, run.CommitSHA)
+ assert.Equal(t, cron.Event, run.Event)
+ assert.Equal(t, cron.EventPayload, run.EventPayload)
+ assert.Equal(t, cron.ID, run.ScheduleID)
+ assert.Equal(t, actions_model.StatusWaiting, run.Status)
+ }
+
+ assertMutable := func(t *testing.T, expected, run *actions_model.ActionRun) {
+ t.Helper()
+ assert.Equal(t, expected.NotifyEmail, run.NotifyEmail)
+ }
+
+ testCases := []struct {
+ name string
+ cron actions_model.ActionSchedule
+ want []actions_model.ActionRun
+ }{
+ {
+ name: "simple",
+ cron: actions_model.ActionSchedule{
+ Title: "scheduletitle1",
+ RepoID: repo.ID,
+ OwnerID: repo.OwnerID,
+ WorkflowID: "some.yml",
+ TriggerUserID: repo.OwnerID,
+ Ref: "branch",
+ CommitSHA: "fakeSHA",
+ Event: webhook_module.HookEventSchedule,
+ EventPayload: "fakepayload",
+ Content: []byte(
+ `
+name: test
+on: push
+jobs:
+ job2:
+ runs-on: ubuntu-latest
+ steps:
+ - run: true
+`),
+ },
+ want: []actions_model.ActionRun{
+ {
+ Title: "scheduletitle1",
+ NotifyEmail: false,
+ },
+ },
+ },
+ {
+ name: "enable-email-notifications is true",
+ cron: actions_model.ActionSchedule{
+ Title: "scheduletitle2",
+ RepoID: repo.ID,
+ OwnerID: repo.OwnerID,
+ WorkflowID: "some.yml",
+ TriggerUserID: repo.OwnerID,
+ Ref: "branch",
+ CommitSHA: "fakeSHA",
+ Event: webhook_module.HookEventSchedule,
+ EventPayload: "fakepayload",
+ Content: []byte(
+ `
+name: test
+enable-email-notifications: true
+on: push
+jobs:
+ job2:
+ runs-on: ubuntu-latest
+ steps:
+ - run: true
+`),
+ },
+ want: []actions_model.ActionRun{
+ {
+ Title: "scheduletitle2",
+ NotifyEmail: true,
+ },
+ },
+ },
+ }
+ for _, testCase := range testCases {
+ t.Run(testCase.name, func(t *testing.T) {
+ require.NoError(t, CreateScheduleTask(t.Context(), &testCase.cron))
+ require.Equal(t, len(testCase.want), unittest.GetCount(t, actions_model.ActionRun{RepoID: repo.ID}))
+ for _, expected := range testCase.want {
+ run := unittest.AssertExistsAndLoadBean(t, &actions_model.ActionRun{Title: expected.Title})
+ assertConstant(t, &testCase.cron, run)
+ assertMutable(t, &expected, run)
+ }
+ unittest.AssertSuccessfulDelete(t, actions_model.ActionRun{RepoID: repo.ID})
+ })
+ }
+}
diff --git a/services/actions/task.go b/services/actions/task.go
index 43c8deaa5f..bb319c7d05 100644
--- a/services/actions/task.go
+++ b/services/actions/task.go
@@ -5,14 +5,18 @@ package actions
import (
"context"
+ "errors"
"fmt"
actions_model "forgejo.org/models/actions"
"forgejo.org/models/db"
secret_model "forgejo.org/models/secret"
+ "forgejo.org/modules/timeutil"
+ "forgejo.org/modules/util"
runnerv1 "code.gitea.io/actions-proto-go/runner/v1"
"google.golang.org/protobuf/types/known/structpb"
+ "google.golang.org/protobuf/types/known/timestamppb"
)
func PickTask(ctx context.Context, runner *actions_model.ActionRunner) (*runnerv1.Task, bool, error) {
@@ -105,3 +109,144 @@ func findTaskNeeds(ctx context.Context, taskJob *actions_model.ActionRunJob) (ma
}
return ret, nil
}
+
+func StopTask(ctx context.Context, taskID int64, status actions_model.Status) error {
+ if !status.IsDone() {
+ return fmt.Errorf("cannot stop task with status %v", status)
+ }
+ e := db.GetEngine(ctx)
+
+ task := &actions_model.ActionTask{}
+ if has, err := e.ID(taskID).Get(task); err != nil {
+ return err
+ } else if !has {
+ return util.ErrNotExist
+ }
+ if task.Status.IsDone() {
+ return nil
+ }
+
+ now := timeutil.TimeStampNow()
+ task.Status = status
+ task.Stopped = now
+ if _, err := UpdateRunJob(ctx, &actions_model.ActionRunJob{
+ ID: task.JobID,
+ Status: task.Status,
+ Stopped: task.Stopped,
+ }, nil); err != nil {
+ return err
+ }
+
+ if err := actions_model.UpdateTask(ctx, task, "status", "stopped"); err != nil {
+ return err
+ }
+
+ if err := task.LoadAttributes(ctx); err != nil {
+ return err
+ }
+
+ for _, step := range task.Steps {
+ if !step.Status.IsDone() {
+ step.Status = status
+ if step.Started == 0 {
+ step.Started = now
+ }
+ step.Stopped = now
+ }
+ if _, err := e.ID(step.ID).Update(step); err != nil {
+ return err
+ }
+ }
+
+ return nil
+}
+
+// UpdateTaskByState updates the task by the state.
+// It will always update the task if the state is not final, even there is no change.
+// So it will update ActionTask.Updated to avoid the task being judged as a zombie task.
+func UpdateTaskByState(ctx context.Context, runnerID int64, state *runnerv1.TaskState) (*actions_model.ActionTask, error) {
+ stepStates := map[int64]*runnerv1.StepState{}
+ for _, v := range state.Steps {
+ stepStates[v.Id] = v
+ }
+
+ ctx, commiter, err := db.TxContext(ctx)
+ if err != nil {
+ return nil, err
+ }
+ defer commiter.Close()
+
+ e := db.GetEngine(ctx)
+
+ task := &actions_model.ActionTask{}
+ if has, err := e.ID(state.Id).Get(task); err != nil {
+ return nil, err
+ } else if !has {
+ return nil, util.ErrNotExist
+ } else if runnerID != task.RunnerID {
+ return nil, errors.New("invalid runner for task")
+ }
+
+ if task.Status.IsDone() {
+ // the state is final, do nothing
+ return task, nil
+ }
+
+ // state.Result is not unspecified means the task is finished
+ if state.Result != runnerv1.Result_RESULT_UNSPECIFIED {
+ task.Status = actions_model.Status(state.Result)
+ task.Stopped = timeutil.TimeStamp(state.StoppedAt.AsTime().Unix())
+ if err := actions_model.UpdateTask(ctx, task, "status", "stopped"); err != nil {
+ return nil, err
+ }
+ if _, err := UpdateRunJob(ctx, &actions_model.ActionRunJob{
+ ID: task.JobID,
+ Status: task.Status,
+ Stopped: task.Stopped,
+ }, nil); err != nil {
+ return nil, err
+ }
+ } else {
+ // Force update ActionTask.Updated to avoid the task being judged as a zombie task
+ task.Updated = timeutil.TimeStampNow()
+ if err := actions_model.UpdateTask(ctx, task, "updated"); err != nil {
+ return nil, err
+ }
+ }
+
+ if err := task.LoadAttributes(ctx); err != nil {
+ return nil, err
+ }
+
+ for _, step := range task.Steps {
+ var result runnerv1.Result
+ if v, ok := stepStates[step.Index]; ok {
+ result = v.Result
+ step.LogIndex = v.LogIndex
+ step.LogLength = v.LogLength
+ step.Started = convertTimestamp(v.StartedAt)
+ step.Stopped = convertTimestamp(v.StoppedAt)
+ }
+ if result != runnerv1.Result_RESULT_UNSPECIFIED {
+ step.Status = actions_model.Status(result)
+ } else if step.Started != 0 {
+ step.Status = actions_model.StatusRunning
+ }
+ if _, err := e.ID(step.ID).Update(step); err != nil {
+ return nil, err
+ }
+ }
+
+ if err := commiter.Commit(); err != nil {
+ return nil, err
+ }
+
+ return task, nil
+}
+
+func convertTimestamp(timestamp *timestamppb.Timestamp) timeutil.TimeStamp {
+ if timestamp.GetSeconds() == 0 && timestamp.GetNanos() == 0 {
+ return timeutil.TimeStamp(0)
+ }
+ return timeutil.TimeStamp(timestamp.AsTime().Unix())
+}
diff --git a/services/actions/workflows.go b/services/actions/workflows.go
index 7ec7c3abed..fbba3fd667 100644
--- a/services/actions/workflows.go
+++ b/services/actions/workflows.go
@@ -111,6 +111,11 @@ func (entry *Workflow) Dispatch(ctx context.Context, inputGetter InputValueGette
return nil, nil, err
}
+ notifications, err := wf.Notifications()
+ if err != nil {
+ return nil, nil, err
+ }
+
run := &actions_model.ActionRun{
Title: title,
RepoID: repo.ID,
@@ -125,6 +130,7 @@ func (entry *Workflow) Dispatch(ctx context.Context, inputGetter InputValueGette
EventPayload: string(p),
TriggerEvent: string(webhook.HookEventWorkflowDispatch),
Status: actions_model.StatusWaiting,
+ NotifyEmail: notifications,
}
vars, err := actions_model.GetVariablesOfRun(ctx, run)
diff --git a/services/asymkey/sign.go b/services/asymkey/sign.go
index 0030523b22..527f6edd92 100644
--- a/services/asymkey/sign.go
+++ b/services/asymkey/sign.go
@@ -10,7 +10,6 @@ import (
asymkey_model "forgejo.org/models/asymkey"
"forgejo.org/models/auth"
- "forgejo.org/models/db"
git_model "forgejo.org/models/git"
issues_model "forgejo.org/models/issues"
repo_model "forgejo.org/models/repo"
@@ -90,6 +89,13 @@ func SigningKey(ctx context.Context, repoPath string) (string, *git.Signature) {
return "", nil
}
+ if setting.Repository.Signing.Format == "ssh" {
+ return setting.Repository.Signing.SigningKey, &git.Signature{
+ Name: setting.Repository.Signing.SigningName,
+ Email: setting.Repository.Signing.SigningEmail,
+ }
+ }
+
if setting.Repository.Signing.SigningKey == "default" || setting.Repository.Signing.SigningKey == "" {
// Can ignore the error here as it means that commit.gpgsign is not set
value, _, _ := git.NewCommand(ctx, "config", "--get", "commit.gpgsign").RunStdString(&git.RunOpts{Dir: repoPath})
@@ -145,22 +151,19 @@ Loop:
case always:
break Loop
case pubkey:
- keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
- OwnerID: u.ID,
- IncludeSubKeys: true,
- })
+ hasPubKey, err := asymkey_model.HasAsymKeyByUID(ctx, u.ID)
if err != nil {
return false, "", nil, err
}
- if len(keys) == 0 {
+ if !hasPubKey {
return false, "", nil, &ErrWontSign{pubkey}
}
case twofa:
- twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID)
- if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) {
+ hasTwoFactor, err := auth.HasTwoFactorByUID(ctx, u.ID)
+ if err != nil {
return false, "", nil, err
}
- if twofaModel == nil {
+ if !hasTwoFactor {
return false, "", nil, &ErrWontSign{twofa}
}
}
@@ -185,22 +188,19 @@ Loop:
case always:
break Loop
case pubkey:
- keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
- OwnerID: u.ID,
- IncludeSubKeys: true,
- })
+ hasPubKey, err := asymkey_model.HasAsymKeyByUID(ctx, u.ID)
if err != nil {
return false, "", nil, err
}
- if len(keys) == 0 {
+ if !hasPubKey {
return false, "", nil, &ErrWontSign{pubkey}
}
case twofa:
- twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID)
- if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) {
+ hasTwoFactor, err := auth.HasTwoFactorByUID(ctx, u.ID)
+ if err != nil {
return false, "", nil, err
}
- if twofaModel == nil {
+ if !hasTwoFactor {
return false, "", nil, &ErrWontSign{twofa}
}
case parentSigned:
@@ -241,22 +241,19 @@ Loop:
case always:
break Loop
case pubkey:
- keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
- OwnerID: u.ID,
- IncludeSubKeys: true,
- })
+ hasPubKey, err := asymkey_model.HasAsymKeyByUID(ctx, u.ID)
if err != nil {
return false, "", nil, err
}
- if len(keys) == 0 {
+ if !hasPubKey {
return false, "", nil, &ErrWontSign{pubkey}
}
case twofa:
- twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID)
- if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) {
+ hasTwoFactor, err := auth.HasTwoFactorByUID(ctx, u.ID)
+ if err != nil {
return false, "", nil, err
}
- if twofaModel == nil {
+ if !hasTwoFactor {
return false, "", nil, &ErrWontSign{twofa}
}
case parentSigned:
@@ -306,22 +303,19 @@ Loop:
case always:
break Loop
case pubkey:
- keys, err := db.Find[asymkey_model.GPGKey](ctx, asymkey_model.FindGPGKeyOptions{
- OwnerID: u.ID,
- IncludeSubKeys: true,
- })
+ hasPubKey, err := asymkey_model.HasAsymKeyByUID(ctx, u.ID)
if err != nil {
return false, "", nil, err
}
- if len(keys) == 0 {
+ if !hasPubKey {
return false, "", nil, &ErrWontSign{pubkey}
}
case twofa:
- twofaModel, err := auth.GetTwoFactorByUID(ctx, u.ID)
- if err != nil && !auth.IsErrTwoFactorNotEnrolled(err) {
+ hasTwoFactor, err := auth.HasTwoFactorByUID(ctx, u.ID)
+ if err != nil {
return false, "", nil, err
}
- if twofaModel == nil {
+ if !hasTwoFactor {
return false, "", nil, &ErrWontSign{twofa}
}
case approved:
diff --git a/services/attachment/attachment.go b/services/attachment/attachment.go
index 365bd7faf6..b6f763842b 100644
--- a/services/attachment/attachment.go
+++ b/services/attachment/attachment.go
@@ -51,7 +51,7 @@ func NewExternalAttachment(ctx context.Context, attach *repo_model.Attachment) (
if attach.ExternalURL == "" {
return nil, fmt.Errorf("attachment %s should have a external url", attach.Name)
}
- if !validation.IsValidExternalURL(attach.ExternalURL) {
+ if !validation.IsValidReleaseAssetURL(attach.ExternalURL) {
return nil, repo_model.ErrInvalidExternalURL{ExternalURL: attach.ExternalURL}
}
diff --git a/services/attachment/attachment_test.go b/services/attachment/attachment_test.go
index 70b1e80d6a..ef002bf16c 100644
--- a/services/attachment/attachment_test.go
+++ b/services/attachment/attachment_test.go
@@ -43,6 +43,6 @@ func TestUploadAttachment(t *testing.T) {
attachment, err := repo_model.GetAttachmentByUUID(db.DefaultContext, attach.UUID)
require.NoError(t, err)
- assert.EqualValues(t, user.ID, attachment.UploaderID)
+ assert.Equal(t, user.ID, attachment.UploaderID)
assert.Equal(t, int64(0), attachment.DownloadCount)
}
diff --git a/services/auth/httpsign.go b/services/auth/httpsign.go
index d3cbb8aa60..e776ccbbed 100644
--- a/services/auth/httpsign.go
+++ b/services/auth/httpsign.go
@@ -134,7 +134,7 @@ func VerifyCert(r *http.Request) (*asymkey_model.PublicKey, error) {
// Check if it's really a ssh certificate
cert, ok := pk.(*ssh.Certificate)
if !ok {
- return nil, fmt.Errorf("no certificate found")
+ return nil, errors.New("no certificate found")
}
c := &ssh.CertChecker{
@@ -153,7 +153,7 @@ func VerifyCert(r *http.Request) (*asymkey_model.PublicKey, error) {
// check the CA of the cert
if !c.IsUserAuthority(cert.SignatureKey) {
- return nil, fmt.Errorf("CA check failed")
+ return nil, errors.New("CA check failed")
}
// Create a verifier
@@ -191,7 +191,7 @@ func VerifyCert(r *http.Request) (*asymkey_model.PublicKey, error) {
}
// No public key matching a principal in the certificate is registered in gitea
- return nil, fmt.Errorf("no valid principal found")
+ return nil, errors.New("no valid principal found")
}
// doVerify iterates across the provided public keys attempting the verify the current request against each key in turn
diff --git a/services/auth/oauth2.go b/services/auth/oauth2.go
index e6d556d10b..093940aa18 100644
--- a/services/auth/oauth2.go
+++ b/services/auth/oauth2.go
@@ -121,18 +121,6 @@ func (o *OAuth2) Name() string {
// representing whether the token exists or not
func parseToken(req *http.Request) (string, bool) {
_ = req.ParseForm()
- if !setting.DisableQueryAuthToken {
- // Check token.
- if token := req.Form.Get("token"); token != "" {
- return token, true
- }
- // Check access token.
- if token := req.Form.Get("access_token"); token != "" {
- return token, true
- }
- } else if req.Form.Get("token") != "" || req.Form.Get("access_token") != "" {
- log.Warn("API token sent in query string but DISABLE_QUERY_AUTH_TOKEN=true")
- }
// check header token
if auHead := req.Header.Get("Authorization"); auHead != "" {
diff --git a/services/auth/reverseproxy_test.go b/services/auth/reverseproxy_test.go
index 70ce1f8b0b..cdcd845148 100644
--- a/services/auth/reverseproxy_test.go
+++ b/services/auth/reverseproxy_test.go
@@ -38,10 +38,10 @@ func TestReverseProxyAuth(t *testing.T) {
require.EqualValues(t, 1, user_model.CountUsers(db.DefaultContext, nil))
unittest.AssertExistsAndLoadBean(t, &user_model.User{Email: "edgar@example.org", Name: "Edgar", LowerName: "edgar", FullName: "Edgar Allan Poe", IsAdmin: true})
- require.EqualValues(t, "edgar@example.org", user.Email)
- require.EqualValues(t, "Edgar", user.Name)
- require.EqualValues(t, "edgar", user.LowerName)
- require.EqualValues(t, "Edgar Allan Poe", user.FullName)
+ require.Equal(t, "edgar@example.org", user.Email)
+ require.Equal(t, "Edgar", user.Name)
+ require.Equal(t, "edgar", user.LowerName)
+ require.Equal(t, "Edgar Allan Poe", user.FullName)
require.True(t, user.IsAdmin)
})
@@ -58,10 +58,10 @@ func TestReverseProxyAuth(t *testing.T) {
require.EqualValues(t, 2, user_model.CountUsers(db.DefaultContext, nil))
unittest.AssertExistsAndLoadBean(t, &user_model.User{Email: "gusted@example.org", Name: "Gusted", LowerName: "gusted", FullName: "❤‿❤"}, "is_admin = false")
- require.EqualValues(t, "gusted@example.org", user.Email)
- require.EqualValues(t, "Gusted", user.Name)
- require.EqualValues(t, "gusted", user.LowerName)
- require.EqualValues(t, "❤‿❤", user.FullName)
+ require.Equal(t, "gusted@example.org", user.Email)
+ require.Equal(t, "Gusted", user.Name)
+ require.Equal(t, "gusted", user.LowerName)
+ require.Equal(t, "❤‿❤", user.FullName)
require.False(t, user.IsAdmin)
})
}
diff --git a/services/auth/source/db/authenticate.go b/services/auth/source/db/authenticate.go
index 7c18540a10..b1d8eae6ae 100644
--- a/services/auth/source/db/authenticate.go
+++ b/services/auth/source/db/authenticate.go
@@ -50,7 +50,7 @@ func Authenticate(ctx context.Context, user *user_model.User, login, password st
if !user.IsPasswordSet() {
return nil, ErrUserPasswordNotSet{UID: user.ID, Name: user.Name}
- } else if !user.ValidatePassword(password) {
+ } else if !user.ValidatePassword(ctx, password) {
return nil, ErrUserPasswordInvalid{UID: user.ID, Name: user.Name}
}
diff --git a/services/auth/source/oauth2/jwtsigningkey_test.go b/services/auth/source/oauth2/jwtsigningkey_test.go
index 7cf2833696..9b07b022df 100644
--- a/services/auth/source/oauth2/jwtsigningkey_test.go
+++ b/services/auth/source/oauth2/jwtsigningkey_test.go
@@ -30,7 +30,7 @@ func TestLoadOrCreateAsymmetricKey(t *testing.T) {
block, _ := pem.Decode(fileContent)
assert.NotNil(t, block)
- assert.EqualValues(t, "PRIVATE KEY", block.Type)
+ assert.Equal(t, "PRIVATE KEY", block.Type)
parsedKey, err := x509.ParsePKCS8PrivateKey(block.Bytes)
require.NoError(t, err)
@@ -44,14 +44,14 @@ func TestLoadOrCreateAsymmetricKey(t *testing.T) {
parsedKey := loadKey(t)
rsaPrivateKey := parsedKey.(*rsa.PrivateKey)
- assert.EqualValues(t, 2048, rsaPrivateKey.N.BitLen())
+ assert.Equal(t, 2048, rsaPrivateKey.N.BitLen())
t.Run("Load key with differ specified algorithm", func(t *testing.T) {
defer test.MockVariableValue(&setting.OAuth2.JWTSigningAlgorithm, "EdDSA")()
parsedKey := loadKey(t)
rsaPrivateKey := parsedKey.(*rsa.PrivateKey)
- assert.EqualValues(t, 2048, rsaPrivateKey.N.BitLen())
+ assert.Equal(t, 2048, rsaPrivateKey.N.BitLen())
})
})
@@ -62,7 +62,7 @@ func TestLoadOrCreateAsymmetricKey(t *testing.T) {
parsedKey := loadKey(t)
rsaPrivateKey := parsedKey.(*rsa.PrivateKey)
- assert.EqualValues(t, 3072, rsaPrivateKey.N.BitLen())
+ assert.Equal(t, 3072, rsaPrivateKey.N.BitLen())
})
t.Run("RSA-4096", func(t *testing.T) {
@@ -72,7 +72,7 @@ func TestLoadOrCreateAsymmetricKey(t *testing.T) {
parsedKey := loadKey(t)
rsaPrivateKey := parsedKey.(*rsa.PrivateKey)
- assert.EqualValues(t, 4096, rsaPrivateKey.N.BitLen())
+ assert.Equal(t, 4096, rsaPrivateKey.N.BitLen())
})
t.Run("ECDSA-256", func(t *testing.T) {
@@ -82,7 +82,7 @@ func TestLoadOrCreateAsymmetricKey(t *testing.T) {
parsedKey := loadKey(t)
ecdsaPrivateKey := parsedKey.(*ecdsa.PrivateKey)
- assert.EqualValues(t, 256, ecdsaPrivateKey.Params().BitSize)
+ assert.Equal(t, 256, ecdsaPrivateKey.Params().BitSize)
})
t.Run("ECDSA-384", func(t *testing.T) {
@@ -92,7 +92,7 @@ func TestLoadOrCreateAsymmetricKey(t *testing.T) {
parsedKey := loadKey(t)
ecdsaPrivateKey := parsedKey.(*ecdsa.PrivateKey)
- assert.EqualValues(t, 384, ecdsaPrivateKey.Params().BitSize)
+ assert.Equal(t, 384, ecdsaPrivateKey.Params().BitSize)
})
t.Run("ECDSA-512", func(t *testing.T) {
@@ -102,7 +102,7 @@ func TestLoadOrCreateAsymmetricKey(t *testing.T) {
parsedKey := loadKey(t)
ecdsaPrivateKey := parsedKey.(*ecdsa.PrivateKey)
- assert.EqualValues(t, 521, ecdsaPrivateKey.Params().BitSize)
+ assert.Equal(t, 521, ecdsaPrivateKey.Params().BitSize)
})
t.Run("EdDSA", func(t *testing.T) {
diff --git a/services/auth/source/oauth2/token.go b/services/auth/source/oauth2/token.go
index fba1fd8a01..b060b6b746 100644
--- a/services/auth/source/oauth2/token.go
+++ b/services/auth/source/oauth2/token.go
@@ -4,6 +4,7 @@
package oauth2
import (
+ "errors"
"fmt"
"time"
@@ -51,12 +52,12 @@ func ParseToken(jwtToken string, signingKey JWTSigningKey) (*Token, error) {
return nil, err
}
if !parsedToken.Valid {
- return nil, fmt.Errorf("invalid token")
+ return nil, errors.New("invalid token")
}
var token *Token
var ok bool
if token, ok = parsedToken.Claims.(*Token); !ok || !parsedToken.Valid {
- return nil, fmt.Errorf("invalid token")
+ return nil, errors.New("invalid token")
}
return token, nil
}
diff --git a/services/automerge/automerge.go b/services/automerge/automerge.go
index 51a14edd9a..cbfe3bd54e 100644
--- a/services/automerge/automerge.go
+++ b/services/automerge/automerge.go
@@ -32,7 +32,7 @@ func Init() error {
shared_automerge.PRAutoMergeQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "pr_auto_merge", handler)
if shared_automerge.PRAutoMergeQueue == nil {
- return fmt.Errorf("unable to create pr_auto_merge queue")
+ return errors.New("unable to create pr_auto_merge queue")
}
go graceful.GetManager().RunWithCancel(shared_automerge.PRAutoMergeQueue)
return nil
@@ -107,6 +107,7 @@ func handlePullRequestAutoMerge(pullID int64, sha string) {
return
}
if !exists {
+ log.Trace("GetScheduledMergeByPullID found nothing for PR %d", pullID)
return
}
@@ -204,6 +205,10 @@ func handlePullRequestAutoMerge(pullID int64, sha string) {
return
}
+ if err := pull_model.DeleteScheduledAutoMerge(ctx, pr.ID); err != nil && !db.IsErrNotExist(err) {
+ log.Error("DeleteScheduledAutoMerge[%d]: %v", pr.ID, err)
+ }
+
if err := pull_service.Merge(ctx, pr, doer, baseGitRepo, scheduledPRM.MergeStyle, "", scheduledPRM.Message, true); err != nil {
log.Error("pull_service.Merge: %v", err)
// FIXME: if merge failed, we should display some error message to the pull request page.
diff --git a/services/context/api.go b/services/context/api.go
index 37f0e0f559..e9f67c720d 100644
--- a/services/context/api.go
+++ b/services/context/api.go
@@ -6,6 +6,7 @@ package context
import (
"context"
+ "errors"
"fmt"
"net/http"
"net/url"
@@ -186,7 +187,7 @@ func (ctx *APIContext) Error(status int, title string, obj any) {
if status == http.StatusInternalServerError {
log.ErrorWithSkip(1, "%s: %s", title, message)
- if setting.IsProd && !(ctx.Doer != nil && ctx.Doer.IsAdmin) {
+ if setting.IsProd && (ctx.Doer == nil || !ctx.Doer.IsAdmin) {
message = ""
}
}
@@ -285,8 +286,8 @@ func APIContexter() func(http.Handler) http.Handler {
}
defer baseCleanUp()
- ctx.Base.AppendContextValue(apiContextKey, ctx)
- ctx.Base.AppendContextValueFunc(gitrepo.RepositoryContextKey, func() any { return ctx.Repo.GitRepo })
+ ctx.AppendContextValue(apiContextKey, ctx)
+ ctx.AppendContextValueFunc(gitrepo.RepositoryContextKey, func() any { return ctx.Repo.GitRepo })
// If request sends files, parse them here otherwise the Query() can't be parsed and the CsrfToken will be invalid.
if ctx.Req.Method == "POST" && strings.Contains(ctx.Req.Header.Get("Content-Type"), "multipart/form-data") {
@@ -334,7 +335,7 @@ func (ctx *APIContext) NotFound(objs ...any) {
func ReferencesGitRepo(allowEmpty ...bool) func(ctx *APIContext) (cancel context.CancelFunc) {
return func(ctx *APIContext) (cancel context.CancelFunc) {
// Empty repository does not have reference information.
- if ctx.Repo.Repository.IsEmpty && !(len(allowEmpty) != 0 && allowEmpty[0]) {
+ if ctx.Repo.Repository.IsEmpty && (len(allowEmpty) == 0 || !allowEmpty[0]) {
return nil
}
@@ -365,12 +366,12 @@ func RepoRefForAPI(next http.Handler) http.Handler {
ctx := GetAPIContext(req)
if ctx.Repo.Repository.IsEmpty {
- ctx.NotFound(fmt.Errorf("repository is empty"))
+ ctx.NotFound(errors.New("repository is empty"))
return
}
if ctx.Repo.GitRepo == nil {
- ctx.InternalServerError(fmt.Errorf("no open git repo"))
+ ctx.InternalServerError(errors.New("no open git repo"))
return
}
diff --git a/services/context/api_test.go b/services/context/api_test.go
index 90e4d5ec65..4bc89939ca 100644
--- a/services/context/api_test.go
+++ b/services/context/api_test.go
@@ -9,13 +9,14 @@ import (
"testing"
"forgejo.org/modules/setting"
+ "forgejo.org/modules/test"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestGenAPILinks(t *testing.T) {
- setting.AppURL = "http://localhost:3000/"
+ defer test.MockVariableValue(&setting.AppURL, "http://localhost:3000/")()
kases := map[string][]string{
"api/v1/repos/jerrykan/example-repo/issues?state=all": {
`; rel="next"`,
@@ -46,6 +47,6 @@ func TestGenAPILinks(t *testing.T) {
links := genAPILinks(u, 100, 20, curPage)
- assert.EqualValues(t, links, response)
+ assert.Equal(t, links, response)
}
}
diff --git a/services/context/base.go b/services/context/base.go
index 0275ea8a99..dc3d226bb0 100644
--- a/services/context/base.go
+++ b/services/context/base.go
@@ -250,7 +250,7 @@ func (b *Base) PlainText(status int, text string) {
// Redirect redirects the request
func (b *Base) Redirect(location string, status ...int) {
code := http.StatusSeeOther
- if len(status) == 1 {
+ if len(status) == 1 && status[0] > 0 {
code = status[0]
}
diff --git a/services/context/base_test.go b/services/context/base_test.go
index 868ac00f8b..9e058d8f24 100644
--- a/services/context/base_test.go
+++ b/services/context/base_test.go
@@ -9,11 +9,13 @@ import (
"testing"
"forgejo.org/modules/setting"
+ "forgejo.org/modules/test"
"github.com/stretchr/testify/assert"
)
func TestRedirect(t *testing.T) {
+ defer test.MockVariableValue(&setting.AppURL, "http://localhost:3000/")()
req, _ := http.NewRequest("GET", "/", nil)
cases := []struct {
@@ -34,6 +36,7 @@ func TestRedirect(t *testing.T) {
cleanup()
has := resp.Header().Get("Set-Cookie") == "i_like_gitea=dummy"
assert.Equal(t, c.keep, has, "url = %q", c.url)
+ assert.Equal(t, http.StatusSeeOther, resp.Code)
}
req, _ = http.NewRequest("GET", "/", nil)
@@ -45,3 +48,24 @@ func TestRedirect(t *testing.T) {
assert.Equal(t, "/other", resp.Header().Get("HX-Redirect"))
assert.Equal(t, http.StatusNoContent, resp.Code)
}
+
+func TestRedirectOptionalStatus(t *testing.T) {
+ defer test.MockVariableValue(&setting.AppURL, "http://localhost:3000/")()
+ req, _ := http.NewRequest("GET", "/", nil)
+
+ cases := []struct {
+ expected int
+ actual int
+ }{
+ {expected: 303},
+ {http.StatusTemporaryRedirect, 307},
+ {http.StatusPermanentRedirect, 308},
+ }
+ for _, c := range cases {
+ resp := httptest.NewRecorder()
+ b, cleanup := NewBaseContext(resp, req)
+ b.Redirect("/", c.actual)
+ cleanup()
+ assert.Equal(t, c.expected, resp.Code)
+ }
+}
diff --git a/services/context/context.go b/services/context/context.go
index 91484c5ba3..1a839773a8 100644
--- a/services/context/context.go
+++ b/services/context/context.go
@@ -100,7 +100,7 @@ func GetValidateContext(req *http.Request) (ctx *ValidateContext) {
func NewTemplateContextForWeb(ctx *Context) TemplateContext {
tmplCtx := NewTemplateContext(ctx)
- tmplCtx["Locale"] = ctx.Base.Locale
+ tmplCtx["Locale"] = ctx.Locale
tmplCtx["AvatarUtils"] = templates.NewAvatarUtils(ctx)
return tmplCtx
}
@@ -121,6 +121,18 @@ func NewWebContext(base *Base, render Render, session session.Store) *Context {
return ctx
}
+func (ctx *Context) AddPluralStringsToPageData(keys []string) {
+ for _, key := range keys {
+ array, fallback := ctx.Locale.TrPluralStringAllForms(key)
+
+ ctx.PageData["PLURALSTRINGS_LANG"].(map[string][]string)[key] = array
+
+ if fallback != nil {
+ ctx.PageData["PLURALSTRINGS_FALLBACK"].(map[string][]string)[key] = fallback
+ }
+ }
+}
+
// Contexter initializes a classic context for a request.
func Contexter() func(next http.Handler) http.Handler {
rnd := templates.HTMLRenderer()
@@ -151,8 +163,8 @@ func Contexter() func(next http.Handler) http.Handler {
ctx.PageData = map[string]any{}
ctx.Data["PageData"] = ctx.PageData
- ctx.Base.AppendContextValue(WebContextKey, ctx)
- ctx.Base.AppendContextValueFunc(gitrepo.RepositoryContextKey, func() any { return ctx.Repo.GitRepo })
+ ctx.AppendContextValue(WebContextKey, ctx)
+ ctx.AppendContextValueFunc(gitrepo.RepositoryContextKey, func() any { return ctx.Repo.GitRepo })
ctx.Csrf = NewCSRFProtector(csrfOpts)
@@ -208,6 +220,25 @@ func Contexter() func(next http.Handler) http.Handler {
ctx.Data["AllLangs"] = translation.AllLangs()
+ ctx.PageData["PLURAL_RULE_LANG"] = translation.GetPluralRule(ctx.Locale)
+ ctx.PageData["PLURAL_RULE_FALLBACK"] = translation.GetDefaultPluralRule()
+ ctx.PageData["PLURALSTRINGS_LANG"] = map[string][]string{}
+ ctx.PageData["PLURALSTRINGS_FALLBACK"] = map[string][]string{}
+
+ ctx.AddPluralStringsToPageData([]string{"relativetime.mins", "relativetime.hours", "relativetime.days", "relativetime.weeks", "relativetime.months", "relativetime.years"})
+
+ ctx.PageData["DATETIMESTRINGS"] = map[string]string{
+ "FUTURE": ctx.Locale.TrString("relativetime.future"),
+ "NOW": ctx.Locale.TrString("relativetime.now"),
+ }
+ for _, key := range []string{"relativetime.1day", "relativetime.1week", "relativetime.1month", "relativetime.1year", "relativetime.2days", "relativetime.2weeks", "relativetime.2months", "relativetime.2years"} {
+ // These keys are used for special-casing some time words. We only add keys that are actually translated, so that we
+ // can fall back to the generic pluralized time word in the correct language if the special case is untranslated.
+ if ctx.Locale.HasKey(key) {
+ ctx.PageData["DATETIMESTRINGS"].(map[string]string)[key] = ctx.Locale.TrString(key)
+ }
+ }
+
next.ServeHTTP(ctx.Resp, ctx.Req)
})
}
diff --git a/services/context/context_response.go b/services/context/context_response.go
index e20e7dd852..e64f478420 100644
--- a/services/context/context_response.go
+++ b/services/context/context_response.go
@@ -1,4 +1,5 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
+// Copyright 2024 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package context
@@ -66,7 +67,10 @@ func (ctx *Context) RedirectToFirst(location ...string) string {
return setting.AppSubURL + "/"
}
-const tplStatus500 base.TplName = "status/500"
+const (
+ tplStatus404 base.TplName = "status/404"
+ tplStatus500 base.TplName = "status/500"
+)
// HTML calls Context.HTML and renders the template to HTTP response
func (ctx *Context) HTML(status int, name base.TplName) {
@@ -153,8 +157,8 @@ func (ctx *Context) notFoundInternal(logMsg string, logErr error) {
}
ctx.Data["IsRepo"] = ctx.Repo.Repository != nil
- ctx.Data["Title"] = "Page Not Found"
- ctx.HTML(http.StatusNotFound, base.TplName("status/404"))
+ ctx.Data["Title"] = ctx.Locale.TrString("error.not_found.title")
+ ctx.HTML(http.StatusNotFound, tplStatus404)
}
// ServerError displays a 500 (Internal Server Error) page and prints the given error, if any.
@@ -177,7 +181,6 @@ func (ctx *Context) serverErrorInternal(logMsg string, logErr error) {
}
}
- ctx.Data["Title"] = "Internal Server Error"
ctx.HTML(http.StatusInternalServerError, tplStatus500)
}
diff --git a/services/context/org.go b/services/context/org.go
index 31ad60704f..3ddc40b6b3 100644
--- a/services/context/org.go
+++ b/services/context/org.go
@@ -1,5 +1,6 @@
// Copyright 2014 The Gogs Authors. All rights reserved.
-// Copyright 2020 The Gitea Authors.
+// Copyright 2020 The Gitea Authors. All rights reserved.
+// Copyright 2025 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package context
@@ -165,6 +166,7 @@ func HandleOrgAssignment(ctx *Context, args ...bool) {
ctx.Data["IsOrganizationMember"] = ctx.Org.IsMember
ctx.Data["IsPackageEnabled"] = setting.Packages.Enabled
ctx.Data["IsRepoIndexerEnabled"] = setting.Indexer.RepoIndexerEnabled
+ ctx.Data["IsModerationEnabled"] = setting.Moderation.Enabled
ctx.Data["IsPublicMember"] = func(uid int64) bool {
is, _ := organization.IsPublicMembership(ctx, ctx.Org.Organization.ID, uid)
return is
diff --git a/services/context/package.go b/services/context/package.go
index e597249e2a..b95e02a882 100644
--- a/services/context/package.go
+++ b/services/context/package.go
@@ -158,7 +158,7 @@ func PackageContexter() func(next http.Handler) http.Handler {
// it is still needed when rendering 500 page in a package handler
ctx := NewWebContext(base, renderer, nil)
- ctx.Base.AppendContextValue(WebContextKey, ctx)
+ ctx.AppendContextValue(WebContextKey, ctx)
next.ServeHTTP(ctx.Resp, ctx.Req)
})
}
diff --git a/services/context/private.go b/services/context/private.go
index 3d7ed694f1..94ee31876a 100644
--- a/services/context/private.go
+++ b/services/context/private.go
@@ -67,7 +67,7 @@ func PrivateContexter() func(http.Handler) http.Handler {
base, baseCleanUp := NewBaseContext(w, req)
ctx := &PrivateContext{Base: base}
defer baseCleanUp()
- ctx.Base.AppendContextValue(privateContextKey, ctx)
+ ctx.AppendContextValue(privateContextKey, ctx)
next.ServeHTTP(ctx.Resp, ctx.Req)
})
diff --git a/services/context/quota.go b/services/context/quota.go
index f6e79e1ebe..502a316107 100644
--- a/services/context/quota.go
+++ b/services/context/quota.go
@@ -64,7 +64,7 @@ func QuotaRuleAssignmentAPI() func(ctx *APIContext) {
// ctx.CheckQuota checks whether the user in question is within quota limits (web context)
func (ctx *Context) CheckQuota(subject quota_model.LimitSubject, userID int64, username string) bool {
- ok, err := checkQuota(ctx.Base.originCtx, subject, userID, username, func(userID int64, username string) {
+ ok, err := checkQuota(ctx.originCtx, subject, userID, username, func(userID int64, username string) {
showHTML := false
for _, part := range ctx.Req.Header["Accept"] {
if strings.Contains(part, "text/html") {
@@ -91,7 +91,7 @@ func (ctx *Context) CheckQuota(subject quota_model.LimitSubject, userID int64, u
// ctx.CheckQuota checks whether the user in question is within quota limits (API context)
func (ctx *APIContext) CheckQuota(subject quota_model.LimitSubject, userID int64, username string) bool {
- ok, err := checkQuota(ctx.Base.originCtx, subject, userID, username, func(userID int64, username string) {
+ ok, err := checkQuota(ctx.originCtx, subject, userID, username, func(userID int64, username string) {
ctx.JSON(http.StatusRequestEntityTooLarge, APIQuotaExceeded{
Message: "quota exceeded",
UserID: userID,
diff --git a/services/context/repo.go b/services/context/repo.go
index a1e1cadf6c..c8876d7166 100644
--- a/services/context/repo.go
+++ b/services/context/repo.go
@@ -83,7 +83,7 @@ func (r *Repository) CanEnableEditor(ctx context.Context, user *user_model.User)
// CanCreateBranch returns true if repository is editable and user has proper access level.
func (r *Repository) CanCreateBranch() bool {
- return r.Permission.CanWrite(unit_model.TypeCode) && r.Repository.CanCreateBranch()
+ return r.CanWrite(unit_model.TypeCode) && r.Repository.CanCreateBranch()
}
func (r *Repository) GetObjectFormat() git.ObjectFormat {
@@ -160,12 +160,12 @@ func (r *Repository) CanUseTimetracker(ctx context.Context, issue *issues_model.
// 2. Is the user a contributor, admin, poster or assignee and do the repository policies require this?
isAssigned, _ := issues_model.IsUserAssignedToIssue(ctx, issue, user)
return r.Repository.IsTimetrackerEnabled(ctx) && (!r.Repository.AllowOnlyContributorsToTrackTime(ctx) ||
- r.Permission.CanWriteIssuesOrPulls(issue.IsPull) || issue.IsPoster(user.ID) || isAssigned)
+ r.CanWriteIssuesOrPulls(issue.IsPull) || issue.IsPoster(user.ID) || isAssigned)
}
// CanCreateIssueDependencies returns whether or not a user can create dependencies.
func (r *Repository) CanCreateIssueDependencies(ctx context.Context, user *user_model.User, isPull bool) bool {
- return r.Repository.IsDependenciesEnabled(ctx) && r.Permission.CanWriteIssuesOrPulls(isPull)
+ return r.Repository.IsDependenciesEnabled(ctx) && r.CanWriteIssuesOrPulls(isPull)
}
// GetCommitsCount returns cached commit count for current view
@@ -361,7 +361,9 @@ func RedirectToRepo(ctx *Base, redirectRepoID int64) {
if ctx.Req.URL.RawQuery != "" {
redirectPath += "?" + ctx.Req.URL.RawQuery
}
- ctx.Redirect(path.Join(setting.AppSubURL, redirectPath), http.StatusTemporaryRedirect)
+ // Git client needs a 301 redirect by default to follow the new location
+ // It's not documentated in git documentation, but it's the behavior of git client
+ ctx.Redirect(path.Join(setting.AppSubURL, redirectPath), http.StatusMovedPermanently)
}
func repoAssignment(ctx *Context, repo *repo_model.Repository) {
@@ -378,7 +380,7 @@ func repoAssignment(ctx *Context, repo *repo_model.Repository) {
}
// Check access.
- if !ctx.Repo.Permission.HasAccess() {
+ if !ctx.Repo.HasAccess() {
if ctx.FormString("go-get") == "1" {
EarlyResponseForGoGetMeta(ctx)
return
@@ -591,6 +593,7 @@ func RepoAssignment(ctx *Context) context.CancelFunc {
ctx.Data["CanWriteIssues"] = ctx.Repo.CanWrite(unit_model.TypeIssues)
ctx.Data["CanWritePulls"] = ctx.Repo.CanWrite(unit_model.TypePullRequests)
ctx.Data["CanWriteActions"] = ctx.Repo.CanWrite(unit_model.TypeActions)
+ ctx.Data["IsModerationEnabled"] = setting.Moderation.Enabled
canSignedUserFork, err := repo_module.CanUserForkRepo(ctx, ctx.Doer, ctx.Repo.Repository)
if err != nil {
@@ -641,7 +644,11 @@ func RepoAssignment(ctx *Context) context.CancelFunc {
ctx.Data["OpenGraphImageURL"] = repo.SummaryCardURL()
ctx.Data["OpenGraphImageWidth"] = cardWidth
ctx.Data["OpenGraphImageHeight"] = cardHeight
- ctx.Data["OpenGraphImageAltText"] = ctx.Tr("repo.summary_card_alt", repo.FullName())
+ if util.IsEmptyString(repo.Description) {
+ ctx.Data["OpenGraphImageAltText"] = ctx.Tr("repo.summary_card_alt", repo.FullName())
+ } else {
+ ctx.Data["OpenGraphImageAltText"] = ctx.Tr("og.repo.summary_card.alt_description", repo.FullName(), repo.Description)
+ }
if repo.IsFork {
RetrieveBaseRepo(ctx, repo)
diff --git a/services/context/upload/upload.go b/services/context/upload/upload.go
index 2fa177e604..e71fc50c1f 100644
--- a/services/context/upload/upload.go
+++ b/services/context/upload/upload.go
@@ -76,14 +76,15 @@ func Verify(buf []byte, fileName, allowedTypesStr string) error {
// AddUploadContext renders template values for dropzone
func AddUploadContext(ctx *context.Context, uploadType string) {
- if uploadType == "release" {
+ switch uploadType {
+ case "release":
ctx.Data["UploadUrl"] = ctx.Repo.RepoLink + "/releases/attachments"
ctx.Data["UploadRemoveUrl"] = ctx.Repo.RepoLink + "/releases/attachments/remove"
ctx.Data["UploadLinkUrl"] = ctx.Repo.RepoLink + "/releases/attachments"
ctx.Data["UploadAccepts"] = strings.ReplaceAll(setting.Repository.Release.AllowedTypes, "|", ",")
ctx.Data["UploadMaxFiles"] = setting.Attachment.MaxFiles
ctx.Data["UploadMaxSize"] = setting.Attachment.MaxSize
- } else if uploadType == "comment" {
+ case "comment":
ctx.Data["UploadUrl"] = ctx.Repo.RepoLink + "/issues/attachments"
ctx.Data["UploadRemoveUrl"] = ctx.Repo.RepoLink + "/issues/attachments/remove"
if len(ctx.Params(":index")) > 0 {
@@ -94,7 +95,7 @@ func AddUploadContext(ctx *context.Context, uploadType string) {
ctx.Data["UploadAccepts"] = strings.ReplaceAll(setting.Attachment.AllowedTypes, "|", ",")
ctx.Data["UploadMaxFiles"] = setting.Attachment.MaxFiles
ctx.Data["UploadMaxSize"] = setting.Attachment.MaxSize
- } else if uploadType == "repo" {
+ case "repo":
ctx.Data["UploadUrl"] = ctx.Repo.RepoLink + "/upload-file"
ctx.Data["UploadRemoveUrl"] = ctx.Repo.RepoLink + "/upload-remove"
ctx.Data["UploadLinkUrl"] = ctx.Repo.RepoLink + "/upload-file"
diff --git a/services/contexttest/context_tests.go b/services/contexttest/context_tests.go
index ebab04f620..a4e674a896 100644
--- a/services/contexttest/context_tests.go
+++ b/services/contexttest/context_tests.go
@@ -68,7 +68,7 @@ func MockContext(t *testing.T, reqPath string, opts ...MockContextOption) (*cont
ctx.PageData = map[string]any{}
ctx.Data["PageStartTime"] = time.Now()
chiCtx := chi.NewRouteContext()
- ctx.Base.AppendContextValue(chi.RouteCtxKey, chiCtx)
+ ctx.AppendContextValue(chi.RouteCtxKey, chiCtx)
return ctx, resp
}
@@ -83,7 +83,7 @@ func MockAPIContext(t *testing.T, reqPath string) (*context.APIContext, *httptes
_ = baseCleanUp // during test, it doesn't need to do clean up. TODO: this can be improved later
chiCtx := chi.NewRouteContext()
- ctx.Base.AppendContextValue(chi.RouteCtxKey, chiCtx)
+ ctx.AppendContextValue(chi.RouteCtxKey, chiCtx)
return ctx, resp
}
@@ -96,7 +96,7 @@ func MockPrivateContext(t *testing.T, reqPath string) (*context.PrivateContext,
ctx := &context.PrivateContext{Base: base}
_ = baseCleanUp // during test, it doesn't need to do clean up. TODO: this can be improved later
chiCtx := chi.NewRouteContext()
- ctx.Base.AppendContextValue(chi.RouteCtxKey, chiCtx)
+ ctx.AppendContextValue(chi.RouteCtxKey, chiCtx)
return ctx, resp
}
diff --git a/services/contexttest/pagedata_test.go b/services/contexttest/pagedata_test.go
new file mode 100644
index 0000000000..0c9319b6db
--- /dev/null
+++ b/services/contexttest/pagedata_test.go
@@ -0,0 +1,63 @@
+// Copyright 2024 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package contexttest
+
+import (
+ "testing"
+
+ "forgejo.org/modules/translation"
+ "forgejo.org/modules/translation/i18n"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func TestPluralStringsForClient(t *testing.T) {
+ mockLocale := translation.MockLocale{}
+ mockLocale.MockTranslations = map[string]string{
+ "relativetime.mins" + i18n.PluralFormSeparator + "one": "%d minute ago",
+ "relativetime.hours" + i18n.PluralFormSeparator + "one": "%d hour ago",
+ "relativetime.days" + i18n.PluralFormSeparator + "one": "%d day ago",
+ "relativetime.weeks" + i18n.PluralFormSeparator + "one": "%d week ago",
+ "relativetime.months" + i18n.PluralFormSeparator + "one": "%d month ago",
+ "relativetime.years" + i18n.PluralFormSeparator + "one": "%d year ago",
+ "relativetime.mins" + i18n.PluralFormSeparator + "other": "%d minutes ago",
+ "relativetime.hours" + i18n.PluralFormSeparator + "other": "%d hours ago",
+ "relativetime.days" + i18n.PluralFormSeparator + "other": "%d days ago",
+ "relativetime.weeks" + i18n.PluralFormSeparator + "other": "%d weeks ago",
+ "relativetime.months" + i18n.PluralFormSeparator + "other": "%d months ago",
+ "relativetime.years" + i18n.PluralFormSeparator + "other": "%d years ago",
+ }
+
+ ctx, _ := MockContext(t, "/")
+ ctx.Locale = mockLocale
+ assert.True(t, ctx.Locale.HasKey("relativetime.mins"))
+ assert.True(t, ctx.Locale.HasKey("relativetime.weeks"))
+ assert.Equal(t, "%d minutes ago", ctx.Locale.TrString("relativetime.mins"+i18n.PluralFormSeparator+"other"))
+ assert.Equal(t, "%d week ago", ctx.Locale.TrString("relativetime.weeks"+i18n.PluralFormSeparator+"one"))
+
+ assert.Empty(t, ctx.PageData)
+ ctx.PageData["PLURALSTRINGS_LANG"] = map[string][]string{}
+ assert.Empty(t, ctx.PageData["PLURALSTRINGS_LANG"])
+
+ ctx.AddPluralStringsToPageData([]string{"relativetime.mins", "relativetime.hours"})
+ assert.Len(t, ctx.PageData["PLURALSTRINGS_LANG"], 2)
+ assert.Len(t, ctx.PageData["PLURALSTRINGS_LANG"].(map[string][]string)["relativetime.mins"], 2)
+ assert.Len(t, ctx.PageData["PLURALSTRINGS_LANG"].(map[string][]string)["relativetime.hours"], 2)
+ assert.Equal(t, "%d minute ago", ctx.PageData["PLURALSTRINGS_LANG"].(map[string][]string)["relativetime.mins"][0])
+ assert.Equal(t, "%d minutes ago", ctx.PageData["PLURALSTRINGS_LANG"].(map[string][]string)["relativetime.mins"][1])
+ assert.Equal(t, "%d hour ago", ctx.PageData["PLURALSTRINGS_LANG"].(map[string][]string)["relativetime.hours"][0])
+ assert.Equal(t, "%d hours ago", ctx.PageData["PLURALSTRINGS_LANG"].(map[string][]string)["relativetime.hours"][1])
+
+ ctx.AddPluralStringsToPageData([]string{"relativetime.years", "relativetime.days"})
+ assert.Len(t, ctx.PageData["PLURALSTRINGS_LANG"], 4)
+ assert.Len(t, ctx.PageData["PLURALSTRINGS_LANG"].(map[string][]string)["relativetime.mins"], 2)
+ assert.Len(t, ctx.PageData["PLURALSTRINGS_LANG"].(map[string][]string)["relativetime.days"], 2)
+ assert.Len(t, ctx.PageData["PLURALSTRINGS_LANG"].(map[string][]string)["relativetime.years"], 2)
+ assert.Equal(t, "%d minute ago", ctx.PageData["PLURALSTRINGS_LANG"].(map[string][]string)["relativetime.mins"][0])
+ assert.Equal(t, "%d minutes ago", ctx.PageData["PLURALSTRINGS_LANG"].(map[string][]string)["relativetime.mins"][1])
+ assert.Equal(t, "%d day ago", ctx.PageData["PLURALSTRINGS_LANG"].(map[string][]string)["relativetime.days"][0])
+ assert.Equal(t, "%d days ago", ctx.PageData["PLURALSTRINGS_LANG"].(map[string][]string)["relativetime.days"][1])
+ assert.Equal(t, "%d year ago", ctx.PageData["PLURALSTRINGS_LANG"].(map[string][]string)["relativetime.years"][0])
+ assert.Equal(t, "%d years ago", ctx.PageData["PLURALSTRINGS_LANG"].(map[string][]string)["relativetime.years"][1])
+}
diff --git a/services/convert/action.go b/services/convert/action.go
new file mode 100644
index 0000000000..703c1f1261
--- /dev/null
+++ b/services/convert/action.go
@@ -0,0 +1,49 @@
+// Copyright 2025 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package convert
+
+import (
+ "context"
+
+ actions_model "forgejo.org/models/actions"
+ access_model "forgejo.org/models/perm/access"
+ user_model "forgejo.org/models/user"
+ api "forgejo.org/modules/structs"
+)
+
+// ToActionRun convert actions_model.User to api.ActionRun
+// the run needs all attributes loaded
+func ToActionRun(ctx context.Context, run *actions_model.ActionRun, doer *user_model.User) *api.ActionRun {
+ if run == nil {
+ return nil
+ }
+
+ permissionInRepo, _ := access_model.GetUserRepoPermission(ctx, run.Repo, doer)
+
+ return &api.ActionRun{
+ ID: run.ID,
+ Title: run.Title,
+ Repo: ToRepo(ctx, run.Repo, permissionInRepo),
+ WorkflowID: run.WorkflowID,
+ Index: run.Index,
+ TriggerUser: ToUser(ctx, run.TriggerUser, doer),
+ ScheduleID: run.ScheduleID,
+ PrettyRef: run.PrettyRef(),
+ IsRefDeleted: run.IsRefDeleted,
+ CommitSHA: run.CommitSHA,
+ IsForkPullRequest: run.IsForkPullRequest,
+ NeedApproval: run.NeedApproval,
+ ApprovedBy: run.ApprovedBy,
+ Event: run.Event.Event(),
+ EventPayload: run.EventPayload,
+ TriggerEvent: run.TriggerEvent,
+ Status: run.Status.String(),
+ Started: run.Started.AsTime(),
+ Stopped: run.Stopped.AsTime(),
+ Created: run.Created.AsTime(),
+ Updated: run.Updated.AsTime(),
+ Duration: run.Duration(),
+ HTMLURL: run.HTMLURL(),
+ }
+}
diff --git a/services/convert/attachment.go b/services/convert/attachment.go
index 6617aac906..74ae7c509c 100644
--- a/services/convert/attachment.go
+++ b/services/convert/attachment.go
@@ -4,6 +4,9 @@
package convert
import (
+ "mime"
+ "path/filepath"
+
repo_model "forgejo.org/models/repo"
api "forgejo.org/modules/structs"
)
@@ -20,9 +23,13 @@ func APIAssetDownloadURL(repo *repo_model.Repository, attach *repo_model.Attachm
return attach.DownloadURL()
}
-// ToAttachment converts models.Attachment to api.Attachment for API usage
-func ToAttachment(repo *repo_model.Repository, a *repo_model.Attachment) *api.Attachment {
- return toAttachment(repo, a, WebAssetDownloadURL)
+// ToWebAttachment converts models.Attachment to api.WebAttachment for API usage
+func ToWebAttachment(repo *repo_model.Repository, a *repo_model.Attachment) *api.WebAttachment {
+ attachment := toAttachment(repo, a, WebAssetDownloadURL)
+ return &api.WebAttachment{
+ Attachment: attachment,
+ MimeType: mime.TypeByExtension(filepath.Ext(attachment.Name)),
+ }
}
// ToAPIAttachment converts models.Attachment to api.Attachment for API usage
diff --git a/services/convert/attachment_test.go b/services/convert/attachment_test.go
new file mode 100644
index 0000000000..d7bf0c1ee7
--- /dev/null
+++ b/services/convert/attachment_test.go
@@ -0,0 +1,56 @@
+// Copyright 2025 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package convert
+
+import (
+ "fmt"
+ "testing"
+ "time"
+
+ repo_model "forgejo.org/models/repo"
+ "forgejo.org/models/unittest"
+ "forgejo.org/modules/setting"
+ api "forgejo.org/modules/structs"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestToWebAttachment(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ headRepo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 1})
+ attachment := &repo_model.Attachment{
+ ID: 10,
+ UUID: "uuidxxx",
+ RepoID: 1,
+ IssueID: 1,
+ ReleaseID: 0,
+ UploaderID: 0,
+ CommentID: 0,
+ Name: "test.png",
+ DownloadCount: 90,
+ Size: 30,
+ NoAutoTime: false,
+ CreatedUnix: 9342,
+ CustomDownloadURL: "",
+ ExternalURL: "",
+ }
+
+ webAttachment := ToWebAttachment(headRepo, attachment)
+
+ assert.NotNil(t, webAttachment)
+ assert.Equal(t, &api.WebAttachment{
+ Attachment: &api.Attachment{
+ ID: 10,
+ Name: "test.png",
+ Created: time.Unix(9342, 0),
+ DownloadCount: 90,
+ Size: 30,
+ UUID: "uuidxxx",
+ DownloadURL: fmt.Sprintf("%sattachments/uuidxxx", setting.AppURL),
+ Type: "attachment",
+ },
+ MimeType: "image/png",
+ }, webAttachment)
+}
diff --git a/services/convert/git_commit.go b/services/convert/git_commit.go
index e041361737..4603cfac4d 100644
--- a/services/convert/git_commit.go
+++ b/services/convert/git_commit.go
@@ -210,7 +210,7 @@ func ToCommit(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Rep
// Get diff stats for commit
if opts.Stat {
- diff, err := gitdiff.GetDiff(ctx, gitRepo, &gitdiff.DiffOptions{
+ diff, _, err := gitdiff.GetDiffSimple(ctx, gitRepo, &gitdiff.DiffOptions{
AfterCommitID: commit.ID.String(),
})
if err != nil {
diff --git a/services/convert/git_commit_test.go b/services/convert/git_commit_test.go
index 463b93aac3..97dff365e6 100644
--- a/services/convert/git_commit_test.go
+++ b/services/convert/git_commit_test.go
@@ -34,7 +34,7 @@ func TestToCommitMeta(t *testing.T) {
commitMeta := ToCommitMeta(headRepo, tag)
assert.NotNil(t, commitMeta)
- assert.EqualValues(t, &api.CommitMeta{
+ assert.Equal(t, &api.CommitMeta{
SHA: sha1.EmptyObjectID().String(),
URL: util.URLJoin(headRepo.APIURL(), "git/commits", sha1.EmptyObjectID().String()),
Created: time.Unix(0, 0),
diff --git a/services/convert/notification.go b/services/convert/notification.go
index 3a4239e0fe..2a69b62e4b 100644
--- a/services/convert/notification.go
+++ b/services/convert/notification.go
@@ -17,7 +17,7 @@ import (
func ToNotificationThread(ctx context.Context, n *activities_model.Notification) *api.NotificationThread {
result := &api.NotificationThread{
ID: n.ID,
- Unread: !(n.Status == activities_model.NotificationStatusRead || n.Status == activities_model.NotificationStatusPinned),
+ Unread: n.Status != activities_model.NotificationStatusRead && n.Status != activities_model.NotificationStatusPinned,
Pinned: n.Status == activities_model.NotificationStatusPinned,
UpdatedAt: n.UpdatedUnix.AsTime(),
URL: n.APIURL(),
diff --git a/services/convert/pull_review.go b/services/convert/pull_review.go
index 08ccc0e1fc..97be118a83 100644
--- a/services/convert/pull_review.go
+++ b/services/convert/pull_review.go
@@ -66,7 +66,7 @@ func ToPullReviewList(ctx context.Context, rl []*issues_model.Review, doer *user
result := make([]*api.PullReview, 0, len(rl))
for i := range rl {
// show pending reviews only for the user who created them
- if rl[i].Type == issues_model.ReviewTypePending && (doer == nil || !(doer.IsAdmin || doer.ID == rl[i].ReviewerID)) {
+ if rl[i].Type == issues_model.ReviewTypePending && (doer == nil || (!doer.IsAdmin && doer.ID != rl[i].ReviewerID)) {
continue
}
r, err := ToPullReview(ctx, rl[i], doer)
diff --git a/services/convert/pull_test.go b/services/convert/pull_test.go
index 3e4875fc60..c0c69fd9ad 100644
--- a/services/convert/pull_test.go
+++ b/services/convert/pull_test.go
@@ -29,7 +29,7 @@ func TestPullRequest_APIFormat(t *testing.T) {
require.NoError(t, pr.LoadIssue(db.DefaultContext))
apiPullRequest := ToAPIPullRequest(git.DefaultContext, pr, nil)
assert.NotNil(t, apiPullRequest)
- assert.EqualValues(t, &structs.PRBranchInfo{
+ assert.Equal(t, &structs.PRBranchInfo{
Name: "branch1",
Ref: "refs/pull/2/head",
Sha: "4a357436d925b5c974181ff12a994538ddc5a269",
diff --git a/services/convert/release_test.go b/services/convert/release_test.go
index 3abd2ff3ef..1d214f0222 100644
--- a/services/convert/release_test.go
+++ b/services/convert/release_test.go
@@ -24,6 +24,6 @@ func TestRelease_ToRelease(t *testing.T) {
apiRelease := ToAPIRelease(db.DefaultContext, repo1, release1)
assert.NotNil(t, apiRelease)
assert.EqualValues(t, 1, apiRelease.ID)
- assert.EqualValues(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1", apiRelease.URL)
- assert.EqualValues(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1/assets", apiRelease.UploadURL)
+ assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1", apiRelease.URL)
+ assert.Equal(t, "https://try.gitea.io/api/v1/repos/user2/repo1/releases/1/assets", apiRelease.UploadURL)
}
diff --git a/services/convert/user_test.go b/services/convert/user_test.go
index 01ce8101da..8a42a9d97d 100644
--- a/services/convert/user_test.go
+++ b/services/convert/user_test.go
@@ -31,11 +31,11 @@ func TestUser_ToUser(t *testing.T) {
apiUser = toUser(db.DefaultContext, user1, false, false)
assert.False(t, apiUser.IsAdmin)
- assert.EqualValues(t, api.VisibleTypePublic.String(), apiUser.Visibility)
+ assert.Equal(t, api.VisibleTypePublic.String(), apiUser.Visibility)
user31 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 31, IsAdmin: false, Visibility: api.VisibleTypePrivate})
apiUser = toUser(db.DefaultContext, user31, true, true)
assert.False(t, apiUser.IsAdmin)
- assert.EqualValues(t, api.VisibleTypePrivate.String(), apiUser.Visibility)
+ assert.Equal(t, api.VisibleTypePrivate.String(), apiUser.Visibility)
}
diff --git a/services/convert/utils_test.go b/services/convert/utils_test.go
index b464d8bb68..6c3bf7d938 100644
--- a/services/convert/utils_test.go
+++ b/services/convert/utils_test.go
@@ -10,10 +10,10 @@ import (
)
func TestToCorrectPageSize(t *testing.T) {
- assert.EqualValues(t, 30, ToCorrectPageSize(0))
- assert.EqualValues(t, 30, ToCorrectPageSize(-10))
- assert.EqualValues(t, 20, ToCorrectPageSize(20))
- assert.EqualValues(t, 50, ToCorrectPageSize(100))
+ assert.Equal(t, 30, ToCorrectPageSize(0))
+ assert.Equal(t, 30, ToCorrectPageSize(-10))
+ assert.Equal(t, 20, ToCorrectPageSize(20))
+ assert.Equal(t, 50, ToCorrectPageSize(100))
}
func TestToGitServiceType(t *testing.T) {
diff --git a/services/cron/setting.go b/services/cron/setting.go
index 7fd4c4e1d8..2db6c15370 100644
--- a/services/cron/setting.go
+++ b/services/cron/setting.go
@@ -46,6 +46,13 @@ type CleanupHookTaskConfig struct {
NumberToKeep int
}
+// CleanupOfflineRunnersConfig represents a cron task with settings to clean up offline-runner
+type CleanupOfflineRunnersConfig struct {
+ BaseConfig
+ OlderThan time.Duration
+ GlobalScopeOnly bool
+}
+
// GetSchedule returns the schedule for the base config
func (b *BaseConfig) GetSchedule() string {
return b.Schedule
diff --git a/services/cron/tasks_actions.go b/services/cron/tasks_actions.go
index a7fd3cd0bc..2cd484fa69 100644
--- a/services/cron/tasks_actions.go
+++ b/services/cron/tasks_actions.go
@@ -5,6 +5,7 @@ package cron
import (
"context"
+ "time"
user_model "forgejo.org/models/user"
"forgejo.org/modules/setting"
@@ -20,6 +21,7 @@ func initActionsTasks() {
registerCancelAbandonedJobs()
registerScheduleTasks()
registerActionsCleanup()
+ registerOfflineRunnersCleanup()
}
func registerStopZombieTasks() {
@@ -74,3 +76,22 @@ func registerActionsCleanup() {
return actions_service.Cleanup(ctx)
})
}
+
+func registerOfflineRunnersCleanup() {
+ RegisterTaskFatal("cleanup_offline_runners", &CleanupOfflineRunnersConfig{
+ BaseConfig: BaseConfig{
+ Enabled: false,
+ RunAtStart: false,
+ Schedule: "@midnight",
+ },
+ GlobalScopeOnly: true,
+ OlderThan: time.Hour * 24,
+ }, func(ctx context.Context, _ *user_model.User, cfg Config) error {
+ c := cfg.(*CleanupOfflineRunnersConfig)
+ return actions_service.CleanupOfflineRunners(
+ ctx,
+ c.OlderThan,
+ c.GlobalScopeOnly,
+ )
+ })
+}
diff --git a/services/doctor/authorizedkeys.go b/services/doctor/authorizedkeys.go
index 04a3680ff5..465a3fc7c0 100644
--- a/services/doctor/authorizedkeys.go
+++ b/services/doctor/authorizedkeys.go
@@ -7,6 +7,7 @@ import (
"bufio"
"bytes"
"context"
+ "errors"
"fmt"
"os"
"path/filepath"
@@ -77,7 +78,7 @@ func checkAuthorizedKeys(ctx context.Context, logger log.Logger, autofix bool) e
fPath,
"forgejo admin regenerate keys",
"forgejo doctor check --run authorized-keys --fix")
- return fmt.Errorf(`authorized_keys is out of date and should be regenerated with "forgejo admin regenerate keys" or "forgejo doctor check --run authorized-keys --fix"`)
+ return errors.New(`authorized_keys is out of date and should be regenerated with "forgejo admin regenerate keys" or "forgejo doctor check --run authorized-keys --fix"`)
}
logger.Warn("authorized_keys is out of date. Attempting rewrite...")
err = asymkey_model.RewriteAllPublicKeys(ctx)
diff --git a/services/doctor/dbconsistency.go b/services/doctor/dbconsistency.go
index 6fcbd90940..6fe4c9c5e6 100644
--- a/services/doctor/dbconsistency.go
+++ b/services/doctor/dbconsistency.go
@@ -78,7 +78,14 @@ func genericOrphanCheck(name, subject, refobject, joincond string) consistencyCh
func checkDBConsistency(ctx context.Context, logger log.Logger, autofix bool) error {
// make sure DB version is up-to-date
- if err := db.InitEngineWithMigration(ctx, migrations.EnsureUpToDate); err != nil {
+ ensureUpToDateWrapper := func(e db.Engine) error {
+ engine, err := db.GetMasterEngine(e)
+ if err != nil {
+ return err
+ }
+ return migrations.EnsureUpToDate(engine)
+ }
+ if err := db.InitEngineWithMigration(ctx, ensureUpToDateWrapper); err != nil {
logger.Critical("Model version on the database does not match the current Gitea version. Model consistency will not be checked until the database is upgraded")
return err
}
diff --git a/services/doctor/dbversion.go b/services/doctor/dbversion.go
index 9c02c732e5..c0ff22915d 100644
--- a/services/doctor/dbversion.go
+++ b/services/doctor/dbversion.go
@@ -9,11 +9,15 @@ import (
"forgejo.org/models/db"
"forgejo.org/models/migrations"
"forgejo.org/modules/log"
+
+ "xorm.io/xorm"
)
func checkDBVersion(ctx context.Context, logger log.Logger, autofix bool) error {
logger.Info("Expected database version: %d", migrations.ExpectedDBVersion())
- if err := db.InitEngineWithMigration(ctx, migrations.EnsureUpToDate); err != nil {
+ if err := db.InitEngineWithMigration(ctx, func(eng db.Engine) error {
+ return migrations.EnsureUpToDate(eng.(*xorm.Engine))
+ }); err != nil {
if !autofix {
logger.Critical("Error: %v during ensure up to date", err)
return err
@@ -21,7 +25,9 @@ func checkDBVersion(ctx context.Context, logger log.Logger, autofix bool) error
logger.Warn("Got Error: %v during ensure up to date", err)
logger.Warn("Attempting to migrate to the latest DB version to fix this.")
- err = db.InitEngineWithMigration(ctx, migrations.Migrate)
+ err = db.InitEngineWithMigration(ctx, func(eng db.Engine) error {
+ return migrations.Migrate(eng.(*xorm.Engine))
+ })
if err != nil {
logger.Critical("Error: %v during migration", err)
}
diff --git a/services/doctor/fix16961_test.go b/services/doctor/fix16961_test.go
index 7a83c808c3..75f9f206ab 100644
--- a/services/doctor/fix16961_test.go
+++ b/services/doctor/fix16961_test.go
@@ -221,7 +221,7 @@ func Test_fixPullRequestsConfig_16961(t *testing.T) {
if gotFixed != tt.wantFixed {
t.Errorf("fixPullRequestsConfig_16961() = %v, want %v", gotFixed, tt.wantFixed)
}
- assert.EqualValues(t, &tt.expected, cfg)
+ assert.Equal(t, &tt.expected, cfg)
})
}
}
@@ -265,7 +265,7 @@ func Test_fixIssuesConfig_16961(t *testing.T) {
if gotFixed != tt.wantFixed {
t.Errorf("fixIssuesConfig_16961() = %v, want %v", gotFixed, tt.wantFixed)
}
- assert.EqualValues(t, &tt.expected, cfg)
+ assert.Equal(t, &tt.expected, cfg)
})
}
}
diff --git a/services/doctor/lfs.go b/services/doctor/lfs.go
index fed127de5d..fe858605f4 100644
--- a/services/doctor/lfs.go
+++ b/services/doctor/lfs.go
@@ -5,7 +5,7 @@ package doctor
import (
"context"
- "fmt"
+ "errors"
"time"
"forgejo.org/modules/log"
@@ -27,7 +27,7 @@ func init() {
func garbageCollectLFSCheck(ctx context.Context, logger log.Logger, autofix bool) error {
if !setting.LFS.StartServer {
- return fmt.Errorf("LFS support is disabled")
+ return errors.New("LFS support is disabled")
}
if err := repository.GarbageCollectLFSMetaObjects(ctx, repository.GarbageCollectLFSMetaObjectsOptions{
diff --git a/services/externalaccount/link.go b/services/externalaccount/link.go
index f5d29b5ce5..5672313181 100644
--- a/services/externalaccount/link.go
+++ b/services/externalaccount/link.go
@@ -5,7 +5,7 @@ package externalaccount
import (
"context"
- "fmt"
+ "errors"
user_model "forgejo.org/models/user"
@@ -23,7 +23,7 @@ type Store interface {
func LinkAccountFromStore(ctx context.Context, store Store, user *user_model.User) error {
gothUser := store.Get("linkAccountGothUser")
if gothUser == nil {
- return fmt.Errorf("not in LinkAccount session")
+ return errors.New("not in LinkAccount session")
}
return LinkAccountToUser(ctx, user, gothUser.(goth.User))
diff --git a/services/f3/driver/asset.go b/services/f3/driver/asset.go
deleted file mode 100644
index c9d2ecdf2f..0000000000
--- a/services/f3/driver/asset.go
+++ /dev/null
@@ -1,172 +0,0 @@
-// Copyright Earl Warren
-// Copyright Loïc Dachary
-// SPDX-License-Identifier: MIT
-
-package driver
-
-import (
- "context"
- "crypto/sha256"
- "encoding/hex"
- "fmt"
- "io"
- "os"
-
- "forgejo.org/models/db"
- repo_model "forgejo.org/models/repo"
- user_model "forgejo.org/models/user"
- "forgejo.org/modules/storage"
- "forgejo.org/modules/timeutil"
- "forgejo.org/services/attachment"
-
- "code.forgejo.org/f3/gof3/v3/f3"
- f3_id "code.forgejo.org/f3/gof3/v3/id"
- f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
- "code.forgejo.org/f3/gof3/v3/tree/generic"
- f3_util "code.forgejo.org/f3/gof3/v3/util"
- "github.com/google/uuid"
-)
-
-var _ f3_tree.ForgeDriverInterface = &issue{}
-
-type asset struct {
- common
-
- forgejoAsset *repo_model.Attachment
- sha string
- contentType string
- downloadFunc f3.DownloadFuncType
-}
-
-func (o *asset) SetNative(asset any) {
- o.forgejoAsset = asset.(*repo_model.Attachment)
-}
-
-func (o *asset) GetNativeID() string {
- return fmt.Sprintf("%d", o.forgejoAsset.ID)
-}
-
-func (o *asset) NewFormat() f3.Interface {
- node := o.GetNode()
- return node.GetTree().(f3_tree.TreeInterface).NewFormat(node.GetKind())
-}
-
-func (o *asset) ToFormat() f3.Interface {
- if o.forgejoAsset == nil {
- return o.NewFormat()
- }
-
- return &f3.ReleaseAsset{
- Common: f3.NewCommon(o.GetNativeID()),
- Name: o.forgejoAsset.Name,
- ContentType: o.contentType,
- Size: o.forgejoAsset.Size,
- DownloadCount: o.forgejoAsset.DownloadCount,
- Created: o.forgejoAsset.CreatedUnix.AsTime(),
- SHA256: o.sha,
- DownloadURL: o.forgejoAsset.DownloadURL(),
- DownloadFunc: o.downloadFunc,
- }
-}
-
-func (o *asset) FromFormat(content f3.Interface) {
- asset := content.(*f3.ReleaseAsset)
- o.forgejoAsset = &repo_model.Attachment{
- ID: f3_util.ParseInt(asset.GetID()),
- Name: asset.Name,
- Size: asset.Size,
- DownloadCount: asset.DownloadCount,
- CreatedUnix: timeutil.TimeStamp(asset.Created.Unix()),
- CustomDownloadURL: asset.DownloadURL,
- }
- o.contentType = asset.ContentType
- o.sha = asset.SHA256
- o.downloadFunc = asset.DownloadFunc
-}
-
-func (o *asset) Get(ctx context.Context) bool {
- node := o.GetNode()
- o.Trace("%s", node.GetID())
-
- id := node.GetID().Int64()
-
- asset, err := repo_model.GetAttachmentByID(ctx, id)
- if repo_model.IsErrAttachmentNotExist(err) {
- return false
- }
- if err != nil {
- panic(fmt.Errorf("asset %v %w", id, err))
- }
-
- o.forgejoAsset = asset
-
- path := o.forgejoAsset.RelativePath()
-
- {
- f, err := storage.Attachments.Open(path)
- if err != nil {
- panic(err)
- }
- hasher := sha256.New()
- if _, err := io.Copy(hasher, f); err != nil {
- panic(fmt.Errorf("io.Copy to hasher: %v", err))
- }
- o.sha = hex.EncodeToString(hasher.Sum(nil))
- }
-
- o.downloadFunc = func() io.ReadCloser {
- o.Trace("download %s from copy stored in temporary file %s", o.forgejoAsset.DownloadURL, path)
- f, err := os.Open(path)
- if err != nil {
- panic(err)
- }
- return f
- }
- return true
-}
-
-func (o *asset) Patch(ctx context.Context) {
- o.Trace("%d", o.forgejoAsset.ID)
- if _, err := db.GetEngine(ctx).ID(o.forgejoAsset.ID).Cols("name").Update(o.forgejoAsset); err != nil {
- panic(fmt.Errorf("UpdateAssetCols: %v %v", o.forgejoAsset, err))
- }
-}
-
-func (o *asset) Put(ctx context.Context) f3_id.NodeID {
- node := o.GetNode()
- o.Trace("%s", node.GetID())
-
- uploader, err := user_model.GetAdminUser(ctx)
- if err != nil {
- panic(fmt.Errorf("GetAdminUser %w", err))
- }
-
- o.forgejoAsset.UploaderID = uploader.ID
- o.forgejoAsset.RepoID = f3_tree.GetProjectID(o.GetNode())
- o.forgejoAsset.ReleaseID = f3_tree.GetReleaseID(o.GetNode())
- o.forgejoAsset.UUID = uuid.New().String()
-
- download := o.downloadFunc()
- defer download.Close()
-
- _, err = attachment.NewAttachment(ctx, o.forgejoAsset, download, o.forgejoAsset.Size)
- if err != nil {
- panic(err)
- }
-
- o.Trace("asset created %d", o.forgejoAsset.ID)
- return f3_id.NewNodeID(o.forgejoAsset.ID)
-}
-
-func (o *asset) Delete(ctx context.Context) {
- node := o.GetNode()
- o.Trace("%s", node.GetID())
-
- if err := repo_model.DeleteAttachment(ctx, o.forgejoAsset, true); err != nil {
- panic(err)
- }
-}
-
-func newAsset() generic.NodeDriverInterface {
- return &asset{}
-}
diff --git a/services/f3/driver/assets.go b/services/f3/driver/assets.go
deleted file mode 100644
index 106d5029f3..0000000000
--- a/services/f3/driver/assets.go
+++ /dev/null
@@ -1,42 +0,0 @@
-// Copyright Earl Warren
-// Copyright Loïc Dachary
-// SPDX-License-Identifier: MIT
-
-package driver
-
-import (
- "context"
- "fmt"
-
- repo_model "forgejo.org/models/repo"
-
- f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
- "code.forgejo.org/f3/gof3/v3/tree/generic"
-)
-
-type assets struct {
- container
-}
-
-func (o *assets) ListPage(ctx context.Context, page int) generic.ChildrenSlice {
- if page > 1 {
- return generic.NewChildrenSlice(0)
- }
-
- releaseID := f3_tree.GetReleaseID(o.GetNode())
-
- release, err := repo_model.GetReleaseByID(ctx, releaseID)
- if err != nil {
- panic(fmt.Errorf("GetReleaseByID %v %w", releaseID, err))
- }
-
- if err := release.LoadAttributes(ctx); err != nil {
- panic(fmt.Errorf("error while listing assets: %v", err))
- }
-
- return f3_tree.ConvertListed(ctx, o.GetNode(), f3_tree.ConvertToAny(release.Attachments...)...)
-}
-
-func newAssets() generic.NodeDriverInterface {
- return &assets{}
-}
diff --git a/services/f3/driver/attachment.go b/services/f3/driver/attachment.go
new file mode 100644
index 0000000000..64c188d6e0
--- /dev/null
+++ b/services/f3/driver/attachment.go
@@ -0,0 +1,185 @@
+// Copyright Earl Warren
+// Copyright Loïc Dachary
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "crypto/sha256"
+ "encoding/hex"
+ "fmt"
+ "io"
+ "os"
+
+ "forgejo.org/models/db"
+ repo_model "forgejo.org/models/repo"
+ user_model "forgejo.org/models/user"
+ "forgejo.org/modules/storage"
+ "forgejo.org/modules/timeutil"
+ forgejo_attachment "forgejo.org/services/attachment"
+
+ "code.forgejo.org/f3/gof3/v3/f3"
+ f3_id "code.forgejo.org/f3/gof3/v3/id"
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+ f3_util "code.forgejo.org/f3/gof3/v3/util"
+ "github.com/google/uuid"
+)
+
+var _ f3_tree.ForgeDriverInterface = &issue{}
+
+type attachment struct {
+ common
+
+ forgejoAttachment *repo_model.Attachment
+ sha string
+ contentType string
+ downloadFunc f3.DownloadFuncType
+}
+
+func (o *attachment) SetNative(attachment any) {
+ o.forgejoAttachment = attachment.(*repo_model.Attachment)
+}
+
+func (o *attachment) GetNativeID() string {
+ return fmt.Sprintf("%d", o.forgejoAttachment.ID)
+}
+
+func (o *attachment) NewFormat() f3.Interface {
+ node := o.GetNode()
+ return node.GetTree().(f3_tree.TreeInterface).NewFormat(node.GetKind())
+}
+
+func (o *attachment) ToFormat() f3.Interface {
+ if o.forgejoAttachment == nil {
+ return o.NewFormat()
+ }
+
+ return &f3.Attachment{
+ Common: f3.NewCommon(o.GetNativeID()),
+ Name: o.forgejoAttachment.Name,
+ ContentType: o.contentType,
+ Size: o.forgejoAttachment.Size,
+ DownloadCount: o.forgejoAttachment.DownloadCount,
+ Created: o.forgejoAttachment.CreatedUnix.AsTime(),
+ SHA256: o.sha,
+ DownloadURL: o.forgejoAttachment.DownloadURL(),
+ DownloadFunc: o.downloadFunc,
+ }
+}
+
+func (o *attachment) FromFormat(content f3.Interface) {
+ attachment := content.(*f3.Attachment)
+ o.forgejoAttachment = &repo_model.Attachment{
+ ID: f3_util.ParseInt(attachment.GetID()),
+ Name: attachment.Name,
+ Size: attachment.Size,
+ DownloadCount: attachment.DownloadCount,
+ CreatedUnix: timeutil.TimeStamp(attachment.Created.Unix()),
+ CustomDownloadURL: attachment.DownloadURL,
+ }
+ o.contentType = attachment.ContentType
+ o.sha = attachment.SHA256
+ o.downloadFunc = attachment.DownloadFunc
+}
+
+func (o *attachment) Get(ctx context.Context) bool {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ id := node.GetID().Int64()
+
+ attachment, err := repo_model.GetAttachmentByID(ctx, id)
+ if repo_model.IsErrAttachmentNotExist(err) {
+ return false
+ }
+ if err != nil {
+ panic(fmt.Errorf("attachment %v %w", id, err))
+ }
+
+ o.forgejoAttachment = attachment
+
+ path := o.forgejoAttachment.RelativePath()
+
+ {
+ f, err := storage.Attachments.Open(path)
+ if err != nil {
+ panic(err)
+ }
+ hasher := sha256.New()
+ if _, err := io.Copy(hasher, f); err != nil {
+ panic(fmt.Errorf("io.Copy to hasher: %v", err))
+ }
+ o.sha = hex.EncodeToString(hasher.Sum(nil))
+ }
+
+ o.downloadFunc = func() io.ReadCloser {
+ o.Trace("download %s from copy stored in temporary file %s", o.forgejoAttachment.DownloadURL, path)
+ f, err := os.Open(path)
+ if err != nil {
+ panic(err)
+ }
+ return f
+ }
+ return true
+}
+
+func (o *attachment) Patch(ctx context.Context) {
+ o.Trace("%d", o.forgejoAttachment.ID)
+ if _, err := db.GetEngine(ctx).ID(o.forgejoAttachment.ID).Cols("name").Update(o.forgejoAttachment); err != nil {
+ panic(fmt.Errorf("UpdateAttachmentCols: %v %v", o.forgejoAttachment, err))
+ }
+}
+
+func (o *attachment) Put(ctx context.Context) f3_id.NodeID {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ uploader, err := user_model.GetAdminUser(ctx)
+ if err != nil {
+ panic(fmt.Errorf("GetAdminUser %w", err))
+ }
+
+ attachable := f3_tree.GetAttachable(o.GetNode())
+ attachableID := f3_tree.GetAttachableID(o.GetNode())
+
+ switch attachable.GetKind() {
+ case f3_tree.KindRelease:
+ o.forgejoAttachment.ReleaseID = attachableID
+ case f3_tree.KindComment:
+ o.forgejoAttachment.CommentID = attachableID
+ case f3_tree.KindIssue, f3_tree.KindPullRequest:
+ o.forgejoAttachment.IssueID = attachableID
+ default:
+ panic(fmt.Errorf("unexpected type %s", attachable.GetKind()))
+ }
+
+ o.forgejoAttachment.UploaderID = uploader.ID
+ o.forgejoAttachment.RepoID = f3_tree.GetProjectID(o.GetNode())
+ o.forgejoAttachment.UUID = uuid.New().String()
+
+ download := o.downloadFunc()
+ defer download.Close()
+
+ _, err = forgejo_attachment.NewAttachment(ctx, o.forgejoAttachment, download, o.forgejoAttachment.Size)
+ if err != nil {
+ panic(err)
+ }
+
+ o.Trace("attachment created %d", o.forgejoAttachment.ID)
+ return f3_id.NewNodeID(o.forgejoAttachment.ID)
+}
+
+func (o *attachment) Delete(ctx context.Context) {
+ node := o.GetNode()
+ o.Trace("%s", node.GetID())
+
+ if err := repo_model.DeleteAttachment(ctx, o.forgejoAttachment, true); err != nil {
+ panic(err)
+ }
+}
+
+func newAttachment() generic.NodeDriverInterface {
+ return &attachment{}
+}
diff --git a/services/f3/driver/attachments.go b/services/f3/driver/attachments.go
new file mode 100644
index 0000000000..392afda52c
--- /dev/null
+++ b/services/f3/driver/attachments.go
@@ -0,0 +1,79 @@
+// Copyright Earl Warren
+// Copyright Loïc Dachary
+// SPDX-License-Identifier: MIT
+
+package driver
+
+import (
+ "context"
+ "fmt"
+
+ issues_model "forgejo.org/models/issues"
+ repo_model "forgejo.org/models/repo"
+
+ f3_tree "code.forgejo.org/f3/gof3/v3/tree/f3"
+ "code.forgejo.org/f3/gof3/v3/tree/generic"
+)
+
+type attachments struct {
+ container
+}
+
+func (o *attachments) ListPage(ctx context.Context, page int) generic.ChildrenSlice {
+ if page > 1 {
+ return generic.NewChildrenSlice(0)
+ }
+
+ attachable := f3_tree.GetAttachable(o.GetNode())
+ attachableID := f3_tree.GetAttachableID(o.GetNode())
+
+ var attachments []*repo_model.Attachment
+
+ switch attachable.GetKind() {
+ case f3_tree.KindRelease:
+ release, err := repo_model.GetReleaseByID(ctx, attachableID)
+ if err != nil {
+ panic(fmt.Errorf("GetReleaseByID %v %w", attachableID, err))
+ }
+
+ if err := release.LoadAttributes(ctx); err != nil {
+ panic(fmt.Errorf("error while listing attachments: %v", err))
+ }
+
+ attachments = release.Attachments
+
+ case f3_tree.KindComment:
+ comment, err := issues_model.GetCommentByID(ctx, attachableID)
+ if err != nil {
+ panic(fmt.Errorf("GetCommentByID %v %w", attachableID, err))
+ }
+
+ if err := comment.LoadAttachments(ctx); err != nil {
+ panic(fmt.Errorf("error while listing attachments: %v", err))
+ }
+
+ attachments = comment.Attachments
+
+ case f3_tree.KindIssue, f3_tree.KindPullRequest:
+ repoID := f3_tree.GetProjectID(o.GetNode())
+ issue, err := issues_model.GetIssueByIndex(ctx, repoID, attachableID)
+ if err != nil {
+ panic(fmt.Errorf("GetIssueByID %v %w", attachableID, err))
+ }
+
+ if err := issue.LoadAttachments(ctx); err != nil {
+ panic(fmt.Errorf("error while listing attachments: %v", err))
+ }
+
+ attachments = issue.Attachments
+
+ default:
+ panic(fmt.Errorf("unexpected type %s", attachable.GetKind()))
+ }
+
+ return f3_tree.ConvertListed(ctx, o.GetNode(), f3_tree.ConvertToAny(attachments...)...)
+}
+
+func newAttachments() generic.NodeDriverInterface {
+ return &attachments{}
+}
diff --git a/services/f3/driver/reaction.go b/services/f3/driver/reaction.go
index 74c50b9d13..b959206074 100644
--- a/services/f3/driver/reaction.go
+++ b/services/f3/driver/reaction.go
@@ -89,7 +89,7 @@ func (o *reaction) Patch(ctx context.Context) {
}
func (o *reaction) Put(ctx context.Context) f3_id.NodeID {
- o.Error("%v", o.forgejoReaction.User)
+ o.Trace("%v", o.forgejoReaction.User)
sess := db.GetEngine(ctx)
@@ -110,7 +110,7 @@ func (o *reaction) Put(ctx context.Context) f3_id.NodeID {
panic(fmt.Errorf("unexpected type %v", reactionable.GetKind()))
}
- o.Error("%v", o.forgejoReaction)
+ o.Trace("%v", o.forgejoReaction)
if _, err := sess.Insert(o.forgejoReaction); err != nil {
panic(err)
diff --git a/services/f3/driver/repository.go b/services/f3/driver/repository.go
index e7f4e43723..3cd9aa7f2e 100644
--- a/services/f3/driver/repository.go
+++ b/services/f3/driver/repository.go
@@ -72,7 +72,7 @@ func (o *repository) upsert(ctx context.Context) f3_id.NodeID {
return f3_id.NewNodeID(o.f.Name)
}
-func (o *repository) SetFetchFunc(fetchFunc func(ctx context.Context, destination string, internalRefs []string)) {
+func (o *repository) SetFetchFunc(fetchFunc func(ctx context.Context, destination, internalRef string)) {
o.f.FetchFunc = fetchFunc
}
@@ -93,10 +93,16 @@ func (o *repository) GetRepositoryPushURL() string {
return o.getURL()
}
-func (o *repository) GetRepositoryInternalRefs() []string {
- return []string{}
+func (o *repository) GetRepositoryInternalRef() string {
+ return ""
}
+func (o *repository) GetPullRequestBranch(pr *f3.PullRequestBranch) *f3.PullRequestBranch {
+ panic("")
+}
+func (o *repository) CreatePullRequestBranch(pr *f3.PullRequestBranch) {}
+func (o *repository) DeletePullRequestBranch(pr *f3.PullRequestBranch) {}
+
func newRepository(_ context.Context) generic.NodeDriverInterface {
r := &repository{
f: &f3.Repository{},
diff --git a/services/f3/driver/tree.go b/services/f3/driver/tree.go
index ff927df9d4..fe11b15f6e 100644
--- a/services/f3/driver/tree.go
+++ b/services/f3/driver/tree.go
@@ -49,10 +49,10 @@ func (o *treeDriver) Factory(ctx context.Context, kind f3_kind.Kind) generic.Nod
return newComments()
case f3_tree.KindComment:
return newComment()
- case f3_tree.KindAssets:
- return newAssets()
- case f3_tree.KindAsset:
- return newAsset()
+ case f3_tree.KindAttachments:
+ return newAttachments()
+ case f3_tree.KindAttachment:
+ return newAttachment()
case f3_tree.KindLabels:
return newLabels()
case f3_tree.KindLabel:
diff --git a/services/f3/util/logger_test.go b/services/f3/util/logger_test.go
index 4afd5dd57f..f62d9e2e82 100644
--- a/services/f3/util/logger_test.go
+++ b/services/f3/util/logger_test.go
@@ -23,7 +23,7 @@ func TestF3UtilMessage(t *testing.T) {
actual = fmt.Sprintf(message, args...)
}, nil)
logger.Message("EXPECTED %s", "MESSAGE")
- assert.EqualValues(t, expected, actual)
+ assert.Equal(t, expected, actual)
}
func TestF3UtilLogger(t *testing.T) {
diff --git a/services/federation/federation_service.go b/services/federation/federation_service.go
index 21c7be855b..a3b719d1a7 100644
--- a/services/federation/federation_service.go
+++ b/services/federation/federation_service.go
@@ -1,10 +1,11 @@
-// Copyright 2024 The Forgejo Authors. All rights reserved.
+// Copyright 2024, 2025 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package federation
import (
"context"
+ "errors"
"fmt"
"net/http"
"net/url"
@@ -46,7 +47,7 @@ func ProcessLikeActivity(ctx context.Context, form any, repositoryID int64) (int
return http.StatusInternalServerError, "Wrong FederationHost", err
}
if !activity.IsNewer(federationHost.LatestActivity) {
- return http.StatusNotAcceptable, "Activity out of order.", fmt.Errorf("Activity already processed")
+ return http.StatusNotAcceptable, "Activity out of order.", errors.New("Activity already processed")
}
actorID, err := fm.NewPersonID(actorURI, string(federationHost.NodeInfo.SoftwareName))
if err != nil {
@@ -98,39 +99,47 @@ func ProcessLikeActivity(ctx context.Context, form any, repositoryID int64) (int
}
func CreateFederationHostFromAP(ctx context.Context, actorID fm.ActorID) (*forgefed.FederationHost, error) {
- actionsUser := user.NewActionsUser()
+ actionsUser := user.NewAPServerActor()
clientFactory, err := activitypub.GetClientFactory(ctx)
if err != nil {
return nil, err
}
- client, err := clientFactory.WithKeys(ctx, actionsUser, "no idea where to get key material.")
+
+ client, err := clientFactory.WithKeys(ctx, actionsUser, actionsUser.APActorKeyID())
if err != nil {
return nil, err
}
+
body, err := client.GetBody(actorID.AsWellKnownNodeInfoURI())
if err != nil {
return nil, err
}
+
nodeInfoWellKnown, err := forgefed.NewNodeInfoWellKnown(body)
if err != nil {
return nil, err
}
+
body, err = client.GetBody(nodeInfoWellKnown.Href)
if err != nil {
return nil, err
}
+
nodeInfo, err := forgefed.NewNodeInfo(body)
if err != nil {
return nil, err
}
- result, err := forgefed.NewFederationHost(nodeInfo, actorID.Host)
+
+ result, err := forgefed.NewFederationHost(actorID.Host, nodeInfo, actorID.HostPort, actorID.HostSchema)
if err != nil {
return nil, err
}
+
err = forgefed.CreateFederationHost(ctx, &result)
if err != nil {
return nil, err
}
+
return &result, nil
}
@@ -140,7 +149,7 @@ func GetFederationHostForURI(ctx context.Context, actorURI string) (*forgefed.Fe
if err != nil {
return nil, err
}
- federationHost, err := forgefed.FindFederationHostByFqdn(ctx, rawActorID.Host)
+ federationHost, err := forgefed.FindFederationHostByFqdnAndPort(ctx, rawActorID.Host, rawActorID.HostPort)
if err != nil {
return nil, err
}
@@ -155,18 +164,18 @@ func GetFederationHostForURI(ctx context.Context, actorURI string) (*forgefed.Fe
}
func CreateUserFromAP(ctx context.Context, personID fm.PersonID, federationHostID int64) (*user.User, *user.FederatedUser, error) {
- // ToDo: Do we get a publicKeyId from server, repo or owner or repo?
- actionsUser := user.NewActionsUser()
+ actionsUser := user.NewAPServerActor()
clientFactory, err := activitypub.GetClientFactory(ctx)
if err != nil {
return nil, nil, err
}
- client, err := clientFactory.WithKeys(ctx, actionsUser, "no idea where to get key material.")
+
+ apClient, err := clientFactory.WithKeys(ctx, actionsUser, actionsUser.APActorKeyID())
if err != nil {
return nil, nil, err
}
- body, err := client.GetBody(personID.AsURI())
+ body, err := apClient.GetBody(personID.AsURI())
if err != nil {
return nil, nil, err
}
@@ -176,26 +185,37 @@ func CreateUserFromAP(ctx context.Context, personID fm.PersonID, federationHostI
if err != nil {
return nil, nil, err
}
+
if res, err := validation.IsValid(person); !res {
return nil, nil, err
}
+
log.Info("Fetched valid person:%q", person)
localFqdn, err := url.ParseRequestURI(setting.AppURL)
if err != nil {
return nil, nil, err
}
+
email := fmt.Sprintf("f%v@%v", uuid.New().String(), localFqdn.Hostname())
loginName := personID.AsLoginName()
name := fmt.Sprintf("%v%v", person.PreferredUsername.String(), personID.HostSuffix())
fullName := person.Name.String()
+
if len(person.Name) == 0 {
fullName = name
}
+
password, err := password.Generate(32)
if err != nil {
return nil, nil, err
}
+
+ inbox, err := url.ParseRequestURI(person.Inbox.GetLink().String())
+ if err != nil {
+ return nil, nil, err
+ }
+
newUser := user.User{
LowerName: strings.ToLower(name),
Name: name,
@@ -207,18 +227,21 @@ func CreateUserFromAP(ctx context.Context, personID fm.PersonID, federationHostI
LoginName: loginName,
Type: user.UserTypeRemoteUser,
IsAdmin: false,
- NormalizedFederatedURI: personID.AsURI(),
}
+
federatedUser := user.FederatedUser{
- ExternalID: personID.ID,
- FederationHostID: federationHostID,
+ ExternalID: personID.ID,
+ FederationHostID: federationHostID,
+ InboxPath: inbox.Path,
+ NormalizedOriginalURL: personID.AsURI(),
}
+
err = user.CreateFederatedUser(ctx, &newUser, &federatedUser)
if err != nil {
return nil, nil, err
}
- log.Info("Created federatedUser:%q", federatedUser)
+ log.Info("Created federatedUser:%q", federatedUser)
return &newUser, &federatedUser, nil
}
@@ -274,7 +297,8 @@ func SendLikeActivities(ctx context.Context, doer user.User, repoID int64) error
if err != nil {
return err
}
- apclient, err := apclientFactory.WithKeys(ctx, &doer, doer.APActorID())
+
+ apclient, err := apclientFactory.WithKeys(ctx, &doer, doer.APActorKeyID())
if err != nil {
return err
}
@@ -285,7 +309,7 @@ func SendLikeActivities(ctx context.Context, doer user.User, repoID int64) error
return err
}
- _, err = apclient.Post(json, fmt.Sprintf("%v/inbox/", activity.Object))
+ _, err = apclient.Post(json, fmt.Sprintf("%s/inbox", activity.Object))
if err != nil {
log.Error("error %v while sending activity: %q", err, activity)
}
diff --git a/services/feed/action.go b/services/feed/action.go
index a2cd0551a3..7d179bd1c8 100644
--- a/services/feed/action.go
+++ b/services/feed/action.go
@@ -39,6 +39,24 @@ func NewNotifier() notify_service.Notifier {
return &actionNotifier{}
}
+func notifyAll(ctx context.Context, action *activities_model.Action) error {
+ _, err := activities_model.NotifyWatchers(ctx, action)
+ if err != nil {
+ return err
+ }
+ return err
+ // return federation_service.NotifyActivityPubFollowers(ctx, out)
+}
+
+func notifyAllActions(ctx context.Context, acts []*activities_model.Action) error {
+ _, err := activities_model.NotifyWatchersActions(ctx, acts)
+ if err != nil {
+ return err
+ }
+ return nil
+ // return federation_service.NotifyActivityPubFollowers(ctx, out)
+}
+
func (a *actionNotifier) NewIssue(ctx context.Context, issue *issues_model.Issue, mentions []*user_model.User) {
if err := issue.LoadPoster(ctx); err != nil {
log.Error("issue.LoadPoster: %v", err)
@@ -50,7 +68,7 @@ func (a *actionNotifier) NewIssue(ctx context.Context, issue *issues_model.Issue
}
repo := issue.Repo
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := notifyAll(ctx, &activities_model.Action{
ActUserID: issue.Poster.ID,
ActUser: issue.Poster,
OpType: activities_model.ActionCreateIssue,
@@ -91,7 +109,7 @@ func (a *actionNotifier) IssueChangeStatus(ctx context.Context, doer *user_model
}
// Notify watchers for whatever action comes in, ignore if no action type.
- if err := activities_model.NotifyWatchers(ctx, act); err != nil {
+ if err := notifyAll(ctx, act); err != nil {
log.Error("NotifyWatchers: %v", err)
}
}
@@ -127,7 +145,7 @@ func (a *actionNotifier) CreateIssueComment(ctx context.Context, doer *user_mode
}
// Notify watchers for whatever action comes in, ignore if no action type.
- if err := activities_model.NotifyWatchers(ctx, act); err != nil {
+ if err := notifyAll(ctx, act); err != nil {
log.Error("NotifyWatchers: %v", err)
}
}
@@ -146,7 +164,7 @@ func (a *actionNotifier) NewPullRequest(ctx context.Context, pull *issues_model.
return
}
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := notifyAll(ctx, &activities_model.Action{
ActUserID: pull.Issue.Poster.ID,
ActUser: pull.Issue.Poster,
OpType: activities_model.ActionCreatePullRequest,
@@ -160,7 +178,7 @@ func (a *actionNotifier) NewPullRequest(ctx context.Context, pull *issues_model.
}
func (a *actionNotifier) RenameRepository(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, oldRepoName string) {
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := notifyAll(ctx, &activities_model.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: activities_model.ActionRenameRepo,
@@ -174,7 +192,7 @@ func (a *actionNotifier) RenameRepository(ctx context.Context, doer *user_model.
}
func (a *actionNotifier) TransferRepository(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, oldOwnerName string) {
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := notifyAll(ctx, &activities_model.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: activities_model.ActionTransferRepo,
@@ -188,7 +206,7 @@ func (a *actionNotifier) TransferRepository(ctx context.Context, doer *user_mode
}
func (a *actionNotifier) CreateRepository(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository) {
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := notifyAll(ctx, &activities_model.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: activities_model.ActionCreateRepo,
@@ -201,7 +219,7 @@ func (a *actionNotifier) CreateRepository(ctx context.Context, doer, u *user_mod
}
func (a *actionNotifier) ForkRepository(ctx context.Context, doer *user_model.User, oldRepo, repo *repo_model.Repository) {
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := notifyAll(ctx, &activities_model.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: activities_model.ActionCreateRepo,
@@ -266,13 +284,13 @@ func (a *actionNotifier) PullRequestReview(ctx context.Context, pr *issues_model
actions = append(actions, action)
}
- if err := activities_model.NotifyWatchersActions(ctx, actions); err != nil {
+ if err := notifyAllActions(ctx, actions); err != nil {
log.Error("notify watchers '%d/%d': %v", review.Reviewer.ID, review.Issue.RepoID, err)
}
}
func (*actionNotifier) MergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := notifyAll(ctx, &activities_model.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: activities_model.ActionMergePullRequest,
@@ -286,7 +304,7 @@ func (*actionNotifier) MergePullRequest(ctx context.Context, doer *user_model.Us
}
func (*actionNotifier) AutoMergePullRequest(ctx context.Context, doer *user_model.User, pr *issues_model.PullRequest) {
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := notifyAll(ctx, &activities_model.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: activities_model.ActionAutoMergePullRequest,
@@ -304,7 +322,7 @@ func (*actionNotifier) NotifyPullRevieweDismiss(ctx context.Context, doer *user_
if len(review.OriginalAuthor) > 0 {
reviewerName = review.OriginalAuthor
}
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := notifyAll(ctx, &activities_model.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: activities_model.ActionPullReviewDismissed,
@@ -342,7 +360,7 @@ func (a *actionNotifier) PushCommits(ctx context.Context, pusher *user_model.Use
opType = activities_model.ActionDeleteBranch
}
- if err = activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err = notifyAll(ctx, &activities_model.Action{
ActUserID: pusher.ID,
ActUser: pusher,
OpType: opType,
@@ -362,7 +380,7 @@ func (a *actionNotifier) CreateRef(ctx context.Context, doer *user_model.User, r
// has sent same action in `PushCommits`, so skip it.
return
}
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := notifyAll(ctx, &activities_model.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: opType,
@@ -381,7 +399,7 @@ func (a *actionNotifier) DeleteRef(ctx context.Context, doer *user_model.User, r
// has sent same action in `PushCommits`, so skip it.
return
}
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := notifyAll(ctx, &activities_model.Action{
ActUserID: doer.ID,
ActUser: doer,
OpType: opType,
@@ -405,7 +423,7 @@ func (a *actionNotifier) SyncPushCommits(ctx context.Context, pusher *user_model
return
}
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := notifyAll(ctx, &activities_model.Action{
ActUserID: repo.OwnerID,
ActUser: repo.MustOwner(ctx),
OpType: activities_model.ActionMirrorSyncPush,
@@ -420,7 +438,7 @@ func (a *actionNotifier) SyncPushCommits(ctx context.Context, pusher *user_model
}
func (a *actionNotifier) SyncCreateRef(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, refFullName git.RefName, refID string) {
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := notifyAll(ctx, &activities_model.Action{
ActUserID: repo.OwnerID,
ActUser: repo.MustOwner(ctx),
OpType: activities_model.ActionMirrorSyncCreate,
@@ -434,7 +452,7 @@ func (a *actionNotifier) SyncCreateRef(ctx context.Context, doer *user_model.Use
}
func (a *actionNotifier) SyncDeleteRef(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, refFullName git.RefName) {
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := notifyAll(ctx, &activities_model.Action{
ActUserID: repo.OwnerID,
ActUser: repo.MustOwner(ctx),
OpType: activities_model.ActionMirrorSyncDelete,
@@ -452,7 +470,7 @@ func (a *actionNotifier) NewRelease(ctx context.Context, rel *repo_model.Release
log.Error("LoadAttributes: %v", err)
return
}
- if err := activities_model.NotifyWatchers(ctx, &activities_model.Action{
+ if err := notifyAll(ctx, &activities_model.Action{
ActUserID: rel.PublisherID,
ActUser: rel.Publisher,
OpType: activities_model.ActionPublishRelease,
diff --git a/services/feed/action_test.go b/services/feed/action_test.go
index b0bbcdc3b6..93ca543a1a 100644
--- a/services/feed/action_test.go
+++ b/services/feed/action_test.go
@@ -1,4 +1,5 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
+// Copyright 2025 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package feed
@@ -102,7 +103,7 @@ func TestSyncPushCommits(t *testing.T) {
NewNotifier().SyncPushCommits(db.DefaultContext, user, repo, &repository.PushUpdateOptions{RefFullName: git.RefNameFromBranch("master")}, pushCommits())
newNotification := unittest.AssertExistsAndLoadBean(t, &activities_model.Action{ActUserID: user.ID, RefName: "refs/heads/master"}, unittest.Cond("id > ?", maxID))
- assert.JSONEq(t, `{"Commits":[{"Sha1":"69554a6","Message":"not signed commit","AuthorEmail":"user2@example.com","AuthorName":"User2","CommitterEmail":"user2@example.com","CommitterName":"User2","Timestamp":"0001-01-01T00:00:00Z"},{"Sha1":"27566bd","Message":"good signed commit (with not yet validated email)","AuthorEmail":"user2@example.com","AuthorName":"User2","CommitterEmail":"user2@example.com","CommitterName":"User2","Timestamp":"0001-01-01T00:00:00Z"},{"Sha1":"5099b81","Message":"good signed commit","AuthorEmail":"user2@example.com","AuthorName":"User2","CommitterEmail":"user2@example.com","CommitterName":"User2","Timestamp":"0001-01-01T00:00:00Z"}],"HeadCommit":{"Sha1":"69554a6","Message":"","AuthorEmail":"","AuthorName":"","CommitterEmail":"","CommitterName":"","Timestamp":"0001-01-01T00:00:00Z"},"CompareURL":"","Len":0}`, newNotification.Content)
+ assert.JSONEq(t, `{"Commits":[{"Sha1":"69554a6","Message":"not signed commit","AuthorEmail":"user2@example.com","AuthorName":"User2","CommitterEmail":"user2@example.com","CommitterName":"User2","Signature":null,"Verification":null,"Timestamp":"0001-01-01T00:00:00Z"},{"Sha1":"27566bd","Message":"good signed commit (with not yet validated email)","AuthorEmail":"user2@example.com","AuthorName":"User2","CommitterEmail":"user2@example.com","CommitterName":"User2","Signature":null,"Verification":null,"Timestamp":"0001-01-01T00:00:00Z"},{"Sha1":"5099b81","Message":"good signed commit","AuthorEmail":"user2@example.com","AuthorName":"User2","CommitterEmail":"user2@example.com","CommitterName":"User2","Signature":null,"Verification":null,"Timestamp":"0001-01-01T00:00:00Z"}],"HeadCommit":{"Sha1":"69554a6","Message":"","AuthorEmail":"","AuthorName":"","CommitterEmail":"","CommitterName":"","Signature":null,"Verification":null,"Timestamp":"0001-01-01T00:00:00Z"},"CompareURL":"","Len":0}`, newNotification.Content)
})
t.Run("Only one commit", func(t *testing.T) {
@@ -112,7 +113,7 @@ func TestSyncPushCommits(t *testing.T) {
NewNotifier().SyncPushCommits(db.DefaultContext, user, repo, &repository.PushUpdateOptions{RefFullName: git.RefNameFromBranch("main")}, pushCommits())
newNotification := unittest.AssertExistsAndLoadBean(t, &activities_model.Action{ActUserID: user.ID, RefName: "refs/heads/main"}, unittest.Cond("id > ?", maxID))
- assert.JSONEq(t, `{"Commits":[{"Sha1":"69554a6","Message":"not signed commit","AuthorEmail":"user2@example.com","AuthorName":"User2","CommitterEmail":"user2@example.com","CommitterName":"User2","Timestamp":"0001-01-01T00:00:00Z"}],"HeadCommit":{"Sha1":"69554a6","Message":"","AuthorEmail":"","AuthorName":"","CommitterEmail":"","CommitterName":"","Timestamp":"0001-01-01T00:00:00Z"},"CompareURL":"","Len":0}`, newNotification.Content)
+ assert.JSONEq(t, `{"Commits":[{"Sha1":"69554a6","Message":"not signed commit","AuthorEmail":"user2@example.com","AuthorName":"User2","CommitterEmail":"user2@example.com","CommitterName":"User2","Signature":null,"Verification":null,"Timestamp":"0001-01-01T00:00:00Z"}],"HeadCommit":{"Sha1":"69554a6","Message":"","AuthorEmail":"","AuthorName":"","CommitterEmail":"","CommitterName":"","Signature":null,"Verification":null,"Timestamp":"0001-01-01T00:00:00Z"},"CompareURL":"","Len":0}`, newNotification.Content)
})
}
@@ -129,7 +130,7 @@ func TestPushCommits(t *testing.T) {
NewNotifier().PushCommits(db.DefaultContext, user, repo, &repository.PushUpdateOptions{RefFullName: git.RefNameFromBranch("master")}, pushCommits())
newNotification := unittest.AssertExistsAndLoadBean(t, &activities_model.Action{ActUserID: user.ID, RefName: "refs/heads/master"}, unittest.Cond("id > ?", maxID))
- assert.JSONEq(t, `{"Commits":[{"Sha1":"69554a6","Message":"not signed commit","AuthorEmail":"user2@example.com","AuthorName":"User2","CommitterEmail":"user2@example.com","CommitterName":"User2","Timestamp":"0001-01-01T00:00:00Z"},{"Sha1":"27566bd","Message":"good signed commit (with not yet validated email)","AuthorEmail":"user2@example.com","AuthorName":"User2","CommitterEmail":"user2@example.com","CommitterName":"User2","Timestamp":"0001-01-01T00:00:00Z"},{"Sha1":"5099b81","Message":"good signed commit","AuthorEmail":"user2@example.com","AuthorName":"User2","CommitterEmail":"user2@example.com","CommitterName":"User2","Timestamp":"0001-01-01T00:00:00Z"}],"HeadCommit":{"Sha1":"69554a6","Message":"","AuthorEmail":"","AuthorName":"","CommitterEmail":"","CommitterName":"","Timestamp":"0001-01-01T00:00:00Z"},"CompareURL":"","Len":0}`, newNotification.Content)
+ assert.JSONEq(t, `{"Commits":[{"Sha1":"69554a6","Message":"not signed commit","AuthorEmail":"user2@example.com","AuthorName":"User2","CommitterEmail":"user2@example.com","CommitterName":"User2","Signature":null,"Verification":null,"Timestamp":"0001-01-01T00:00:00Z"},{"Sha1":"27566bd","Message":"good signed commit (with not yet validated email)","AuthorEmail":"user2@example.com","AuthorName":"User2","CommitterEmail":"user2@example.com","CommitterName":"User2","Signature":null,"Verification":null,"Timestamp":"0001-01-01T00:00:00Z"},{"Sha1":"5099b81","Message":"good signed commit","AuthorEmail":"user2@example.com","AuthorName":"User2","CommitterEmail":"user2@example.com","CommitterName":"User2","Signature":null,"Verification":null,"Timestamp":"0001-01-01T00:00:00Z"}],"HeadCommit":{"Sha1":"69554a6","Message":"","AuthorEmail":"","AuthorName":"","CommitterEmail":"","CommitterName":"","Signature":null,"Verification":null,"Timestamp":"0001-01-01T00:00:00Z"},"CompareURL":"","Len":0}`, newNotification.Content)
})
t.Run("Only one commit", func(t *testing.T) {
@@ -139,6 +140,6 @@ func TestPushCommits(t *testing.T) {
NewNotifier().PushCommits(db.DefaultContext, user, repo, &repository.PushUpdateOptions{RefFullName: git.RefNameFromBranch("main")}, pushCommits())
newNotification := unittest.AssertExistsAndLoadBean(t, &activities_model.Action{ActUserID: user.ID, RefName: "refs/heads/main"}, unittest.Cond("id > ?", maxID))
- assert.JSONEq(t, `{"Commits":[{"Sha1":"69554a6","Message":"not signed commit","AuthorEmail":"user2@example.com","AuthorName":"User2","CommitterEmail":"user2@example.com","CommitterName":"User2","Timestamp":"0001-01-01T00:00:00Z"}],"HeadCommit":{"Sha1":"69554a6","Message":"","AuthorEmail":"","AuthorName":"","CommitterEmail":"","CommitterName":"","Timestamp":"0001-01-01T00:00:00Z"},"CompareURL":"","Len":0}`, newNotification.Content)
+ assert.JSONEq(t, `{"Commits":[{"Sha1":"69554a6","Message":"not signed commit","AuthorEmail":"user2@example.com","AuthorName":"User2","CommitterEmail":"user2@example.com","CommitterName":"User2","Signature":null,"Verification":null,"Timestamp":"0001-01-01T00:00:00Z"}],"HeadCommit":{"Sha1":"69554a6","Message":"","AuthorEmail":"","AuthorName":"","CommitterEmail":"","CommitterName":"","Signature":null,"Verification":null,"Timestamp":"0001-01-01T00:00:00Z"},"CompareURL":"","Len":0}`, newNotification.Content)
})
}
diff --git a/services/forms/repo_form.go b/services/forms/repo_form.go
index c39c6a7b36..bb81e939b0 100644
--- a/services/forms/repo_form.go
+++ b/services/forms/repo_form.go
@@ -277,6 +277,9 @@ type WebhookCoreForm struct {
Wiki bool
Repository bool
Package bool
+ ActionFailure bool
+ ActionRecover bool
+ ActionSuccess bool
Active bool
BranchFilter string `binding:"GlobPattern"`
AuthorizationHeader string
@@ -725,8 +728,8 @@ func (f *DeleteRepoFileForm) Validate(req *http.Request, errs binding.Errors) bi
// AddTimeManuallyForm form that adds spent time manually.
type AddTimeManuallyForm struct {
- Hours int `binding:"Range(0,1000)"`
- Minutes int `binding:"Range(0,1000)"`
+ Hours int `binding:"Range(0,1000)" locale:"repo.issues.add_time_hours"`
+ Minutes int `binding:"Range(0,1000)" locale:"repo.issues.add_time_minutes"`
}
// Validate validates the fields
diff --git a/services/forms/report_abuse.go b/services/forms/report_abuse.go
new file mode 100644
index 0000000000..5e9d7dc45f
--- /dev/null
+++ b/services/forms/report_abuse.go
@@ -0,0 +1,28 @@
+// Copyright 2025 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package forms
+
+import (
+ "net/http"
+
+ "forgejo.org/models/moderation"
+ "forgejo.org/modules/web/middleware"
+ "forgejo.org/services/context"
+
+ "code.forgejo.org/go-chi/binding"
+)
+
+// ReportAbuseForm is used to interact with the UI of the form that submits new abuse reports.
+type ReportAbuseForm struct {
+ ContentID int64
+ ContentType moderation.ReportedContentType
+ AbuseCategory moderation.AbuseCategoryType `binding:"Required" locale:"moderation.abuse_category"`
+ Remarks string `binding:"Required;MinSize(20);MaxSize(500)" preprocess:"TrimSpace" locale:"moderation.report_remarks"`
+}
+
+// Validate validates the fields of ReportAbuseForm.
+func (f *ReportAbuseForm) Validate(req *http.Request, errs binding.Errors) binding.Errors {
+ ctx := context.GetValidateContext(req)
+ return middleware.Validate(errs, ctx.Data, f, ctx.Locale)
+}
diff --git a/services/forms/user_form_test.go b/services/forms/user_form_test.go
index 67fb64cabf..ae08f65f23 100644
--- a/services/forms/user_form_test.go
+++ b/services/forms/user_form_test.go
@@ -9,18 +9,14 @@ import (
auth_model "forgejo.org/models/auth"
"forgejo.org/modules/setting"
+ "forgejo.org/modules/test"
"github.com/gobwas/glob"
"github.com/stretchr/testify/assert"
)
func TestRegisterForm_IsDomainAllowed_Empty(t *testing.T) {
- oldService := setting.Service
- defer func() {
- setting.Service = oldService
- }()
-
- setting.Service.EmailDomainAllowList = nil
+ defer test.MockVariableValue(&setting.Service.EmailDomainAllowList, nil)()
form := RegisterForm{}
@@ -28,12 +24,7 @@ func TestRegisterForm_IsDomainAllowed_Empty(t *testing.T) {
}
func TestRegisterForm_IsDomainAllowed_InvalidEmail(t *testing.T) {
- oldService := setting.Service
- defer func() {
- setting.Service = oldService
- }()
-
- setting.Service.EmailDomainAllowList = []glob.Glob{glob.MustCompile("gitea.io")}
+ defer test.MockVariableValue(&setting.Service.EmailDomainAllowList, []glob.Glob{glob.MustCompile("gitea.io")})()
tt := []struct {
email string
@@ -50,12 +41,7 @@ func TestRegisterForm_IsDomainAllowed_InvalidEmail(t *testing.T) {
}
func TestRegisterForm_IsDomainAllowed_AllowedEmail(t *testing.T) {
- oldService := setting.Service
- defer func() {
- setting.Service = oldService
- }()
-
- setting.Service.EmailDomainAllowList = []glob.Glob{glob.MustCompile("gitea.io"), glob.MustCompile("*.allow")}
+ defer test.MockVariableValue(&setting.Service.EmailDomainAllowList, []glob.Glob{glob.MustCompile("gitea.io"), glob.MustCompile("*.allow")})()
tt := []struct {
email string
@@ -78,13 +64,7 @@ func TestRegisterForm_IsDomainAllowed_AllowedEmail(t *testing.T) {
}
func TestRegisterForm_IsDomainAllowed_BlockedEmail(t *testing.T) {
- oldService := setting.Service
- defer func() {
- setting.Service = oldService
- }()
-
- setting.Service.EmailDomainAllowList = nil
- setting.Service.EmailDomainBlockList = []glob.Glob{glob.MustCompile("gitea.io"), glob.MustCompile("*.block")}
+ defer test.MockVariableValue(&setting.Service.EmailDomainBlockList, []glob.Glob{glob.MustCompile("gitea.io"), glob.MustCompile("*.block")})()
tt := []struct {
email string
diff --git a/services/gitdiff/gitdiff.go b/services/gitdiff/gitdiff.go
index 2e1fecda2a..7033264f18 100644
--- a/services/gitdiff/gitdiff.go
+++ b/services/gitdiff/gitdiff.go
@@ -440,11 +440,29 @@ func getCommitFileLineCount(commit *git.Commit, filePath string) int {
if err != nil {
return 0
}
- lineCount, err := blob.GetBlobLineCount()
+ reader, err := blob.DataAsync()
if err != nil {
return 0
}
- return lineCount
+ defer reader.Close()
+ buf := make([]byte, 32*1024)
+ count := 1
+ lineSep := []byte{'\n'}
+
+ c, err := reader.Read(buf)
+ if c == 0 && err == io.EOF {
+ return 0
+ }
+ for {
+ count += bytes.Count(buf[:c], lineSep)
+ switch {
+ case err == io.EOF:
+ return count
+ case err != nil:
+ return count
+ }
+ c, err = reader.Read(buf)
+ }
}
// Diff represents a difference between two git trees.
@@ -1060,7 +1078,7 @@ func readFileName(rd *strings.Reader) (string, bool) {
_, _ = fmt.Fscanf(rd, "%s ", &name)
char, _ := rd.ReadByte()
_ = rd.UnreadByte()
- for !(char == 0 || char == '"' || char == 'b') {
+ for char != 0 && char != '"' && char != 'b' {
var suffix string
_, _ = fmt.Fscanf(rd, "%s ", &suffix)
name += " " + suffix
@@ -1088,62 +1106,63 @@ type DiffOptions struct {
FileOnly bool
}
-// GetDiff builds a Diff between two commits of a repository.
+// GetDiffSimple builds a Diff between two commits of a repository.
// Passing the empty string as beforeCommitID returns a diff from the parent commit.
-// The whitespaceBehavior is either an empty string or a git flag
-func GetDiff(ctx context.Context, gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff, error) {
- repoPath := gitRepo.Path
-
- var beforeCommit *git.Commit
- commit, err := gitRepo.GetCommit(opts.AfterCommitID)
+// The whitespaceBehavior is either an empty string or a git flag.
+func GetDiffSimple(ctx context.Context, gitRepo *git.Repository, opts *DiffOptions, files ...string) (diff *Diff, afterCommit *git.Commit, err error) {
+ afterCommit, err = gitRepo.GetCommit(opts.AfterCommitID)
if err != nil {
- return nil, err
+ return nil, nil, fmt.Errorf("unable to get the after commit %q: %w", opts.AfterCommitID, err)
}
cmdCtx, cmdCancel := context.WithCancel(ctx)
defer cmdCancel()
- cmdDiff := git.NewCommand(cmdCtx)
+ cmdDiff := git.NewCommand(cmdCtx).
+ AddArguments("diff", "--src-prefix=\\a/", "--dst-prefix=\\b/", "-M").
+ AddArguments(opts.WhitespaceBehavior...)
+
objectFormat, err := gitRepo.GetObjectFormat()
if err != nil {
- return nil, err
+ return nil, nil, fmt.Errorf("not able to determine the object format: %w", err)
}
- if (len(opts.BeforeCommitID) == 0 || opts.BeforeCommitID == objectFormat.EmptyObjectID().String()) && commit.ParentCount() == 0 {
- cmdDiff.AddArguments("diff", "--src-prefix=\\a/", "--dst-prefix=\\b/", "-M").
- AddArguments(opts.WhitespaceBehavior...).
- AddDynamicArguments(objectFormat.EmptyTree().String()).
- AddDynamicArguments(opts.AfterCommitID)
+ // If before commit is empty or the empty object and the after commit has no
+ // parents, then use the empty tree as before commit.
+ //
+ // This is the case for a 'initial commit' of a Git tree, which obviously has
+ // no parents.
+ if (len(opts.BeforeCommitID) == 0 || opts.BeforeCommitID == objectFormat.EmptyObjectID().String()) && afterCommit.ParentCount() == 0 {
+ // Reset before commit ID to indicate empty tree was used.
+ opts.BeforeCommitID = ""
+ // Add enpty tree as before commit.
+ cmdDiff.AddDynamicArguments(objectFormat.EmptyTree().String())
} else {
- actualBeforeCommitID := opts.BeforeCommitID
- if len(actualBeforeCommitID) == 0 {
- parentCommit, err := commit.Parent(0)
+ // If before commit ID is empty, use the first parent of the after commit.
+ if len(opts.BeforeCommitID) == 0 {
+ parentCommit, err := afterCommit.Parent(0)
if err != nil {
- return nil, err
+ return nil, nil, fmt.Errorf("not able to get first parent of %q: %w", afterCommit.ID.String(), err)
}
- actualBeforeCommitID = parentCommit.ID.String()
+ opts.BeforeCommitID = parentCommit.ID.String()
}
- cmdDiff.AddArguments("diff", "--src-prefix=\\a/", "--dst-prefix=\\b/", "-M").
- AddArguments(opts.WhitespaceBehavior...).
- AddDynamicArguments(actualBeforeCommitID, opts.AfterCommitID)
- opts.BeforeCommitID = actualBeforeCommitID
-
- beforeCommit, err = gitRepo.GetCommit(opts.BeforeCommitID)
- if err != nil {
- return nil, err
- }
+ cmdDiff.AddDynamicArguments(opts.BeforeCommitID)
}
+ // Add the after commit to the diff command.
+ cmdDiff.AddDynamicArguments(opts.AfterCommitID)
+
// In git 2.31, git diff learned --skip-to which we can use to shortcut skip to file
// so if we are using at least this version of git we don't have to tell ParsePatch to do
// the skipping for us
parsePatchSkipToFile := opts.SkipTo
- if opts.SkipTo != "" && git.CheckGitVersionAtLeast("2.31") == nil {
+ if opts.SkipTo != "" {
cmdDiff.AddOptionFormat("--skip-to=%s", opts.SkipTo)
parsePatchSkipToFile = ""
}
+ // If we only want to diff for some files, add that as well.
cmdDiff.AddDashesAndList(files...)
reader, writer := io.Pipe()
@@ -1153,6 +1172,7 @@ func GetDiff(ctx context.Context, gitRepo *git.Repository, opts *DiffOptions, fi
}()
go func() {
+ repoPath := gitRepo.Path
stderr := &bytes.Buffer{}
cmdDiff.SetDescription(fmt.Sprintf("GetDiffRange [repo_path: %s]", repoPath))
if err := cmdDiff.Run(&git.RunOpts{
@@ -1167,14 +1187,33 @@ func GetDiff(ctx context.Context, gitRepo *git.Repository, opts *DiffOptions, fi
_ = writer.Close()
}()
- diff, err := ParsePatch(cmdCtx, opts.MaxLines, opts.MaxLineCharacters, opts.MaxFiles, reader, parsePatchSkipToFile)
+ diff, err = ParsePatch(cmdCtx, opts.MaxLines, opts.MaxLineCharacters, opts.MaxFiles, reader, parsePatchSkipToFile)
// Ensure the git process is killed if it didn't exit already
cmdCancel()
if err != nil {
- return nil, fmt.Errorf("unable to ParsePatch: %w", err)
+ return nil, nil, fmt.Errorf("unable to parse a git diff: %w", err)
}
diff.Start = opts.SkipTo
+ return diff, afterCommit, nil
+}
+
+func GetDiffFull(ctx context.Context, gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff, error) {
+ diff, afterCommit, err := GetDiffSimple(ctx, gitRepo, opts, files...)
+ if err != nil {
+ return nil, err
+ }
+
+ // If there's a before commit, then GetDiffSimple will set it, otherwise it
+ // is empty.
+ var beforeCommit *git.Commit
+ if len(opts.BeforeCommitID) != 0 {
+ beforeCommit, err = gitRepo.GetCommit(opts.BeforeCommitID)
+ if err != nil {
+ return nil, fmt.Errorf("unable to get before commit %q: %w", opts.BeforeCommitID, err)
+ }
+ }
+
checker, err := gitRepo.GitAttributeChecker(opts.AfterCommitID, git.LinguistAttributes...)
if err != nil {
return nil, fmt.Errorf("unable to GitAttributeChecker: %w", err)
@@ -1210,7 +1249,7 @@ func GetDiff(ctx context.Context, gitRepo *git.Repository, opts *DiffOptions, fi
diffFile.IsGenerated = analyze.IsGenerated(diffFile.Name)
}
- tailSection := diffFile.GetTailSection(gitRepo, beforeCommit, commit)
+ tailSection := diffFile.GetTailSection(gitRepo, beforeCommit, afterCommit)
if tailSection != nil {
diffFile.Sections = append(diffFile.Sections, tailSection)
}
@@ -1272,7 +1311,7 @@ func GetPullDiffStats(gitRepo *git.Repository, opts *DiffOptions) (*PullDiffStat
// SyncAndGetUserSpecificDiff is like GetDiff, except that user specific data such as which files the given user has already viewed on the given PR will also be set
// Additionally, the database asynchronously is updated if files have changed since the last review
func SyncAndGetUserSpecificDiff(ctx context.Context, userID int64, pull *issues_model.PullRequest, gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff, error) {
- diff, err := GetDiff(ctx, gitRepo, opts, files...)
+ diff, err := GetDiffFull(ctx, gitRepo, opts, files...)
if err != nil {
return nil, err
}
diff --git a/services/gitdiff/gitdiff_test.go b/services/gitdiff/gitdiff_test.go
index 532255fe84..695b177b8b 100644
--- a/services/gitdiff/gitdiff_test.go
+++ b/services/gitdiff/gitdiff_test.go
@@ -635,7 +635,7 @@ func TestGetDiffRangeWithWhitespaceBehavior(t *testing.T) {
defer gitRepo.Close()
for _, behavior := range []git.TrustedCmdArgs{{"-w"}, {"--ignore-space-at-eol"}, {"-b"}, nil} {
- diffs, err := GetDiff(db.DefaultContext, gitRepo,
+ diffs, _, err := GetDiffSimple(db.DefaultContext, gitRepo,
&DiffOptions{
AfterCommitID: "bd7063cc7c04689c4d082183d32a604ed27a24f9",
BeforeCommitID: "559c156f8e0178b71cb44355428f24001b08fc68",
@@ -651,6 +651,72 @@ func TestGetDiffRangeWithWhitespaceBehavior(t *testing.T) {
}
}
+func TestGetDiffFull(t *testing.T) {
+ gitRepo, err := git.OpenRepository(git.DefaultContext, "./../../modules/git/tests/repos/language_stats_repo")
+ require.NoError(t, err)
+
+ defer gitRepo.Close()
+
+ t.Run("Initial commit", func(t *testing.T) {
+ diff, err := GetDiffFull(db.DefaultContext, gitRepo,
+ &DiffOptions{
+ AfterCommitID: "8fee858da5796dfb37704761701bb8e800ad9ef3",
+ MaxLines: setting.Git.MaxGitDiffLines,
+ MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters,
+ MaxFiles: setting.Git.MaxGitDiffFiles,
+ })
+ require.NoError(t, err)
+
+ assert.Empty(t, diff.Start)
+ assert.Empty(t, diff.End)
+ assert.False(t, diff.IsIncomplete)
+ assert.Equal(t, 5, diff.NumFiles)
+ assert.Equal(t, 23, diff.TotalAddition)
+ assert.Len(t, diff.Files, 5)
+
+ assert.True(t, diff.Files[0].IsVendored)
+ assert.Equal(t, ".gitattributes", diff.Files[0].Name)
+ assert.Equal(t, "24139dae656713ba861751fb2c2ac38839349a7a", diff.Files[0].NameHash)
+
+ assert.Equal(t, "Python", diff.Files[1].Language)
+ assert.Equal(t, "i-am-a-python.p", diff.Files[1].Name)
+ assert.Equal(t, "32154957b043de62cbcdbe254a53ec4c3e00c5a0", diff.Files[1].NameHash)
+
+ assert.Equal(t, "java-hello/main.java", diff.Files[2].Name)
+ assert.Equal(t, "ef9f6a406a4cde7bb5480ba7b027bdc8cd6fa11d", diff.Files[2].NameHash)
+
+ assert.Equal(t, "main.vendor.java", diff.Files[3].Name)
+ assert.Equal(t, "c94fd7272f109d4d21d6df2b637c864a5ab63f46", diff.Files[3].NameHash)
+
+ assert.Equal(t, "python-hello/hello.py", diff.Files[4].Name)
+ assert.Equal(t, "021705ba8b98778dc4e277d3a6ea1b8c6122a7f9", diff.Files[4].NameHash)
+ })
+
+ t.Run("Normal diff", func(t *testing.T) {
+ diff, err := GetDiffFull(db.DefaultContext, gitRepo,
+ &DiffOptions{
+ AfterCommitID: "341fca5b5ea3de596dc483e54c2db28633cd2f97",
+ BeforeCommitID: "8fee858da5796dfb37704761701bb8e800ad9ef3",
+ MaxLines: setting.Git.MaxGitDiffLines,
+ MaxLineCharacters: setting.Git.MaxGitDiffLineCharacters,
+ MaxFiles: setting.Git.MaxGitDiffFiles,
+ })
+ require.NoError(t, err)
+
+ assert.Empty(t, diff.Start)
+ assert.Empty(t, diff.End)
+ assert.False(t, diff.IsIncomplete)
+ assert.Equal(t, 1, diff.NumFiles)
+ assert.Equal(t, 1, diff.TotalAddition)
+ assert.Len(t, diff.Files, 1)
+
+ assert.Equal(t, ".gitattributes", diff.Files[0].Name)
+ assert.Equal(t, "24139dae656713ba861751fb2c2ac38839349a7a", diff.Files[0].NameHash)
+ assert.Len(t, diff.Files[0].Sections, 2)
+ assert.Equal(t, 4, diff.Files[0].Sections[1].Lines[0].SectionInfo.LeftIdx)
+ })
+}
+
func TestNoCrashes(t *testing.T) {
type testcase struct {
gitdiff string
diff --git a/services/gitdiff/highlightdiff.go b/services/gitdiff/highlightdiff.go
index 08681b8617..61d52d91e6 100644
--- a/services/gitdiff/highlightdiff.go
+++ b/services/gitdiff/highlightdiff.go
@@ -14,13 +14,14 @@ import (
// token is a html tag or entity, eg: "", "", "<"
func extractHTMLToken(s string) (before, token, after string, valid bool) {
for pos1 := 0; pos1 < len(s); pos1++ {
- if s[pos1] == '<' {
+ switch s[pos1] {
+ case '<':
pos2 := strings.IndexByte(s[pos1:], '>')
if pos2 == -1 {
return "", "", s, false
}
return s[:pos1], s[pos1 : pos1+pos2+1], s[pos1+pos2+1:], true
- } else if s[pos1] == '&' {
+ case '&':
pos2 := strings.IndexByte(s[pos1:], ';')
if pos2 == -1 {
return "", "", s, false
diff --git a/services/gitdiff/highlightdiff_test.go b/services/gitdiff/highlightdiff_test.go
index 2ff4472bcc..0070173b9f 100644
--- a/services/gitdiff/highlightdiff_test.go
+++ b/services/gitdiff/highlightdiff_test.go
@@ -43,7 +43,7 @@ func TestDiffWithHighlight(t *testing.T) {
diff.Text = "C"
hcd.recoverOneDiff(&diff)
- assert.Equal(t, "", diff.Text)
+ assert.Empty(t, diff.Text)
}
func TestDiffWithHighlightPlaceholder(t *testing.T) {
@@ -53,8 +53,8 @@ func TestDiffWithHighlightPlaceholder(t *testing.T) {
"a='\U00100000'",
"a='\U0010FFFD''",
)
- assert.Equal(t, "", hcd.PlaceholderTokenMap[0x00100000])
- assert.Equal(t, "", hcd.PlaceholderTokenMap[0x0010FFFD])
+ assert.Empty(t, hcd.PlaceholderTokenMap[0x00100000])
+ assert.Empty(t, hcd.PlaceholderTokenMap[0x0010FFFD])
expected := fmt.Sprintf(`a='%s'`, "\U00100000")
output := diffToHTML(hcd.lineWrapperTags, diffs, DiffLineDel)
diff --git a/services/issue/comments.go b/services/issue/comments.go
index dedef6cc87..2cac900d41 100644
--- a/services/issue/comments.go
+++ b/services/issue/comments.go
@@ -5,6 +5,7 @@ package issue
import (
"context"
+ "errors"
"fmt"
"forgejo.org/models/db"
@@ -18,7 +19,7 @@ import (
// CreateRefComment creates a commit reference comment to issue.
func CreateRefComment(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, issue *issues_model.Issue, content, commitSHA string) error {
if len(commitSHA) == 0 {
- return fmt.Errorf("cannot create reference with empty commit SHA")
+ return errors.New("cannot create reference with empty commit SHA")
}
// Check if same reference from same commit has already existed.
@@ -119,7 +120,28 @@ func UpdateComment(ctx context.Context, c *issues_model.Comment, contentVersion
// DeleteComment deletes the comment
func DeleteComment(ctx context.Context, doer *user_model.User, comment *issues_model.Comment) error {
err := db.WithTx(ctx, func(ctx context.Context) error {
- return issues_model.DeleteComment(ctx, comment)
+ reviewID := comment.ReviewID
+
+ err := issues_model.DeleteComment(ctx, comment)
+ if err != nil {
+ return err
+ }
+
+ if comment.Review != nil {
+ reviewType := comment.Review.Type
+ if reviewType == issues_model.ReviewTypePending {
+ found, err := db.GetEngine(ctx).Table("comment").Where("review_id = ?", reviewID).Exist()
+ if err != nil {
+ return err
+ } else if !found {
+ _, err := db.GetEngine(ctx).Table("review").Where("id = ?", reviewID).Delete()
+ if err != nil {
+ return err
+ }
+ }
+ }
+ }
+ return nil
})
if err != nil {
return err
diff --git a/services/issue/comments_test.go b/services/issue/comments_test.go
index 728af15529..8fa410c0f0 100644
--- a/services/issue/comments_test.go
+++ b/services/issue/comments_test.go
@@ -48,9 +48,9 @@ func TestDeleteComment(t *testing.T) {
// Reactions don't exist anymore for this comment.
unittest.AssertNotExistsBean(t, &issues_model.Reaction{CommentID: comment.ID})
// Number of comments was decreased.
- assert.EqualValues(t, issue.NumComments-1, unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: comment.IssueID}).NumComments)
+ assert.Equal(t, issue.NumComments-1, unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: comment.IssueID}).NumComments)
// A notification was fired for the deletion of this comment.
- assert.EqualValues(t, hookTaskCount+1, unittest.GetCount(t, &webhook_model.HookTask{}))
+ assert.Equal(t, hookTaskCount+1, unittest.GetCount(t, &webhook_model.HookTask{}))
})
t.Run("Comment of pending review", func(t *testing.T) {
@@ -59,7 +59,7 @@ func TestDeleteComment(t *testing.T) {
// We have to ensure that this comment's linked review is pending.
comment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 4}, "review_id != 0")
review := unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: comment.ReviewID})
- assert.EqualValues(t, issues_model.ReviewTypePending, review.Type)
+ assert.Equal(t, issues_model.ReviewTypePending, review.Type)
issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: comment.IssueID})
require.NoError(t, webhook_model.CreateWebhook(db.DefaultContext, &webhook_model.Webhook{
@@ -69,14 +69,17 @@ func TestDeleteComment(t *testing.T) {
}))
hookTaskCount := unittest.GetCount(t, &webhook_model.HookTask{})
+ require.NoError(t, comment.LoadReview(t.Context()))
require.NoError(t, issue_service.DeleteComment(db.DefaultContext, nil, comment))
// The comment doesn't exist anymore.
unittest.AssertNotExistsBean(t, &issues_model.Comment{ID: comment.ID})
// Ensure that the number of comments wasn't decreased.
- assert.EqualValues(t, issue.NumComments, unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: comment.IssueID}).NumComments)
+ assert.Equal(t, issue.NumComments, unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: comment.IssueID}).NumComments)
// No notification was fired for the deletion of this comment.
- assert.EqualValues(t, hookTaskCount, unittest.GetCount(t, &webhook_model.HookTask{}))
+ assert.Equal(t, hookTaskCount, unittest.GetCount(t, &webhook_model.HookTask{}))
+ // The review doesn't exist anymore.
+ unittest.AssertNotExistsBean(t, &issues_model.Review{ID: comment.ReviewID})
})
}
@@ -105,11 +108,11 @@ func TestUpdateComment(t *testing.T) {
newComment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 2})
// Content was updated.
- assert.EqualValues(t, comment.Content, newComment.Content)
+ assert.Equal(t, comment.Content, newComment.Content)
// Content version was updated.
- assert.EqualValues(t, 2, newComment.ContentVersion)
+ assert.Equal(t, 2, newComment.ContentVersion)
// A notification was fired for the update of this comment.
- assert.EqualValues(t, hookTaskCount+1, unittest.GetCount(t, &webhook_model.HookTask{}))
+ assert.Equal(t, hookTaskCount+1, unittest.GetCount(t, &webhook_model.HookTask{}))
// Issue history was saved for this comment.
unittest.AssertExistsAndLoadBean(t, &issues_model.ContentHistory{CommentID: comment.ID, IsFirstCreated: true, ContentText: oldContent})
unittest.AssertExistsAndLoadBean(t, &issues_model.ContentHistory{CommentID: comment.ID, ContentText: comment.Content}, "is_first_created = false")
@@ -120,7 +123,7 @@ func TestUpdateComment(t *testing.T) {
comment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 4}, "review_id != 0")
review := unittest.AssertExistsAndLoadBean(t, &issues_model.Review{ID: comment.ReviewID})
- assert.EqualValues(t, issues_model.ReviewTypePending, review.Type)
+ assert.Equal(t, issues_model.ReviewTypePending, review.Type)
issue := unittest.AssertExistsAndLoadBean(t, &issues_model.Issue{ID: comment.IssueID})
unittest.AssertNotExistsBean(t, &issues_model.ContentHistory{CommentID: comment.ID})
require.NoError(t, webhook_model.CreateWebhook(db.DefaultContext, &webhook_model.Webhook{
@@ -136,11 +139,11 @@ func TestUpdateComment(t *testing.T) {
newComment := unittest.AssertExistsAndLoadBean(t, &issues_model.Comment{ID: 2})
// Content was updated.
- assert.EqualValues(t, comment.Content, newComment.Content)
+ assert.Equal(t, comment.Content, newComment.Content)
// Content version was updated.
- assert.EqualValues(t, 2, newComment.ContentVersion)
+ assert.Equal(t, 2, newComment.ContentVersion)
// No notification was fired for the update of this comment.
- assert.EqualValues(t, hookTaskCount, unittest.GetCount(t, &webhook_model.HookTask{}))
+ assert.Equal(t, hookTaskCount, unittest.GetCount(t, &webhook_model.HookTask{}))
// Issue history was not saved for this comment.
unittest.AssertNotExistsBean(t, &issues_model.ContentHistory{CommentID: comment.ID})
})
diff --git a/services/issue/issue.go b/services/issue/issue.go
index f6a3e90b10..7071a912b0 100644
--- a/services/issue/issue.go
+++ b/services/issue/issue.go
@@ -1,10 +1,12 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
+// Copyright 2024 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package issue
import (
"context"
+ "errors"
"fmt"
"time"
@@ -59,7 +61,6 @@ func NewIssue(ctx context.Context, repo *repo_model.Repository, issue *issues_mo
// ChangeTitle changes the title of this issue, as the given user.
func ChangeTitle(ctx context.Context, issue *issues_model.Issue, doer *user_model.User, title string) error {
oldTitle := issue.Title
- issue.Title = title
if oldTitle == title {
return nil
@@ -73,6 +74,12 @@ func ChangeTitle(ctx context.Context, issue *issues_model.Issue, doer *user_mode
return user_model.ErrBlockedByUser
}
+ // If the issue was reported as abusive, a shadow copy should be created before first update.
+ if err := issues_model.IfNeededCreateShadowCopyForIssue(ctx, issue); err != nil {
+ return err
+ }
+
+ issue.Title = title
if err := issues_model.ChangeIssueTitle(ctx, issue, doer, oldTitle); err != nil {
return err
}
@@ -252,6 +259,12 @@ func deleteIssue(ctx context.Context, issue *issues_model.Issue) error {
defer committer.Close()
e := db.GetEngine(ctx)
+
+ // If the issue was reported as abusive, a shadow copy should be created before deletion.
+ if err := issues_model.IfNeededCreateShadowCopyForIssue(ctx, issue); err != nil {
+ return err
+ }
+
if _, err := e.ID(issue.ID).NoAutoCondition().Delete(issue); err != nil {
return err
}
@@ -333,13 +346,13 @@ func SetIssueUpdateDate(ctx context.Context, issue *issues_model.Issue, updated
return err
}
if !perm.IsAdmin() && !perm.IsOwner() {
- return fmt.Errorf("user needs to have admin or owner right")
+ return errors.New("user needs to have admin or owner right")
}
// A simple guard against potential inconsistent calls
updatedUnix := timeutil.TimeStamp(updated.Unix())
if updatedUnix < issue.CreatedUnix || updatedUnix > timeutil.TimeStampNow() {
- return fmt.Errorf("unallowed update date")
+ return errors.New("unallowed update date")
}
issue.UpdatedUnix = updatedUnix
diff --git a/services/issue/issue_test.go b/services/issue/issue_test.go
index e15a0118ad..fb2b2870bd 100644
--- a/services/issue/issue_test.go
+++ b/services/issue/issue_test.go
@@ -25,8 +25,8 @@ func TestGetRefEndNamesAndURLs(t *testing.T) {
repoLink := "/foo/bar"
endNames, urls := GetRefEndNamesAndURLs(issues, repoLink)
- assert.EqualValues(t, map[int64]string{1: "branch1", 2: "tag1", 3: "c0ffee"}, endNames)
- assert.EqualValues(t, map[int64]string{
+ assert.Equal(t, map[int64]string{1: "branch1", 2: "tag1", 3: "c0ffee"}, endNames)
+ assert.Equal(t, map[int64]string{
1: repoLink + "/src/branch/branch1",
2: repoLink + "/src/tag/tag1",
3: repoLink + "/src/commit/c0ffee",
diff --git a/services/issue/label.go b/services/issue/label.go
index bcac54272a..f18dd67a19 100644
--- a/services/issue/label.go
+++ b/services/issue/label.go
@@ -8,7 +8,6 @@ import (
"forgejo.org/models/db"
issues_model "forgejo.org/models/issues"
- access_model "forgejo.org/models/perm/access"
user_model "forgejo.org/models/user"
notify_service "forgejo.org/services/notify"
)
@@ -56,17 +55,6 @@ func RemoveLabel(ctx context.Context, issue *issues_model.Issue, doer *user_mode
return err
}
- perm, err := access_model.GetUserRepoPermission(dbCtx, issue.Repo, doer)
- if err != nil {
- return err
- }
- if !perm.CanWriteIssuesOrPulls(issue.IsPull) {
- if label.OrgID > 0 {
- return issues_model.ErrOrgLabelNotExist{}
- }
- return issues_model.ErrRepoLabelNotExist{}
- }
-
if err := issues_model.DeleteIssueLabel(dbCtx, issue, label, doer); err != nil {
return err
}
diff --git a/services/issue/milestone.go b/services/issue/milestone.go
index 3fa7083812..a561bf8eee 100644
--- a/services/issue/milestone.go
+++ b/services/issue/milestone.go
@@ -5,6 +5,7 @@ package issue
import (
"context"
+ "errors"
"fmt"
"forgejo.org/models/db"
@@ -47,7 +48,7 @@ func changeMilestoneAssign(ctx context.Context, doer *user_model.User, issue *is
return fmt.Errorf("HasMilestoneByRepoID: %w", err)
}
if !has {
- return fmt.Errorf("HasMilestoneByRepoID: issue doesn't exist")
+ return errors.New("HasMilestoneByRepoID: issue doesn't exist")
}
}
diff --git a/services/issue/pull.go b/services/issue/pull.go
index b0a0c47d88..2eef1fbfa8 100644
--- a/services/issue/pull.go
+++ b/services/issue/pull.go
@@ -43,8 +43,6 @@ type ReviewRequestNotifier struct {
}
func PullRequestCodeOwnersReview(ctx context.Context, issue *issues_model.Issue, pr *issues_model.PullRequest) ([]*ReviewRequestNotifier, error) {
- files := []string{"CODEOWNERS", "docs/CODEOWNERS", ".gitea/CODEOWNERS"}
-
if pr.IsWorkInProgress(ctx) {
return nil, nil
}
@@ -72,18 +70,17 @@ func PullRequestCodeOwnersReview(ctx context.Context, issue *issues_model.Issue,
return nil, err
}
- var data string
- for _, file := range files {
+ var rules []*issues_model.CodeOwnerRule
+ for _, file := range []string{"CODEOWNERS", "docs/CODEOWNERS", ".gitea/CODEOWNERS"} {
if blob, err := commit.GetBlobByPath(file); err == nil {
- data, err = blob.GetBlobContent(setting.UI.MaxDisplayFileSize)
+ rc, size, err := blob.NewTruncatedReader(setting.UI.MaxDisplayFileSize)
if err == nil {
+ rules, _ = issues_model.GetCodeOwnersFromReader(ctx, rc, size > setting.UI.MaxDisplayFileSize)
break
}
}
}
- rules, _ := issues_model.GetCodeOwnersFromContent(ctx, data)
-
// get the mergebase
mergeBase, err := getMergeBase(repo, pr, git.BranchPrefix+pr.BaseBranch, pr.GetGitRefName())
if err != nil {
diff --git a/services/lfs/server.go b/services/lfs/server.go
index 8eef62eabe..17e6d0eec7 100644
--- a/services/lfs/server.go
+++ b/services/lfs/server.go
@@ -163,11 +163,12 @@ func BatchHandler(ctx *context.Context) {
}
var isUpload bool
- if br.Operation == "upload" {
+ switch br.Operation {
+ case "upload":
isUpload = true
- } else if br.Operation == "download" {
+ case "download":
isUpload = false
- } else {
+ default:
log.Trace("Attempt to BATCH with invalid operation: %s", br.Operation)
writeStatus(ctx, http.StatusBadRequest)
return
@@ -594,15 +595,15 @@ func handleLFSToken(ctx stdCtx.Context, tokenSHA string, target *repo_model.Repo
claims, claimsOk := token.Claims.(*Claims)
if !token.Valid || !claimsOk {
- return nil, fmt.Errorf("invalid token claim")
+ return nil, errors.New("invalid token claim")
}
if claims.RepoID != target.ID {
- return nil, fmt.Errorf("invalid token claim")
+ return nil, errors.New("invalid token claim")
}
if mode == perm.AccessModeWrite && claims.Op != "upload" {
- return nil, fmt.Errorf("invalid token claim")
+ return nil, errors.New("invalid token claim")
}
u, err := user_model.GetUserByID(ctx, claims.UserID)
@@ -615,12 +616,12 @@ func handleLFSToken(ctx stdCtx.Context, tokenSHA string, target *repo_model.Repo
func parseToken(ctx stdCtx.Context, authorization string, target *repo_model.Repository, mode perm.AccessMode) (*user_model.User, error) {
if authorization == "" {
- return nil, fmt.Errorf("no token")
+ return nil, errors.New("no token")
}
parts := strings.SplitN(authorization, " ", 2)
if len(parts) != 2 {
- return nil, fmt.Errorf("no token")
+ return nil, errors.New("no token")
}
tokenSHA := parts[1]
switch strings.ToLower(parts[0]) {
@@ -629,7 +630,7 @@ func parseToken(ctx stdCtx.Context, authorization string, target *repo_model.Rep
case "token":
return handleLFSToken(ctx, tokenSHA, target, mode)
}
- return nil, fmt.Errorf("token not found")
+ return nil, errors.New("token not found")
}
func requireAuth(ctx *context.Context) {
diff --git a/services/mailer/mail.go b/services/mailer/mail.go
index 4269686f2d..410fdf6894 100644
--- a/services/mailer/mail.go
+++ b/services/mailer/mail.go
@@ -685,19 +685,14 @@ func SendRemovedSecurityKey(ctx context.Context, u *user_model.User, securityKey
}
locale := translation.NewLocale(u.Language)
- hasWebAuthn, err := auth_model.HasWebAuthnRegistrationsByUID(ctx, u.ID)
- if err != nil {
- return err
- }
- hasTOTP, err := auth_model.HasTOTPByUID(ctx, u.ID)
+ hasTwoFactor, err := auth_model.HasTwoFactorByUID(ctx, u.ID)
if err != nil {
return err
}
data := map[string]any{
"locale": locale,
- "HasWebAuthn": hasWebAuthn,
- "HasTOTP": hasTOTP,
+ "HasTwoFactor": hasTwoFactor,
"SecurityKeyName": securityKeyName,
"DisplayName": u.DisplayName(),
"Username": u.Name,
diff --git a/services/mailer/mail_actions.go b/services/mailer/mail_actions.go
new file mode 100644
index 0000000000..09763e164e
--- /dev/null
+++ b/services/mailer/mail_actions.go
@@ -0,0 +1,89 @@
+// Copyright 2025 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: GPL-3.0-or-later
+package mailer
+
+import (
+ "bytes"
+
+ actions_model "forgejo.org/models/actions"
+ user_model "forgejo.org/models/user"
+ "forgejo.org/modules/base"
+ "forgejo.org/modules/setting"
+ "forgejo.org/modules/translation"
+)
+
+const (
+ tplActionNowDone base.TplName = "actions/now_done"
+)
+
+// requires !run.Status.IsSuccess() or !lastRun.Status.IsSuccess()
+func MailActionRun(run *actions_model.ActionRun, priorStatus actions_model.Status, lastRun *actions_model.ActionRun) error {
+ if setting.MailService == nil {
+ // No mail service configured
+ return nil
+ }
+
+ if !run.NotifyEmail {
+ return nil
+ }
+
+ user := run.TriggerUser
+ // this happens e.g. when this is a scheduled run
+ if user.IsSystem() {
+ user = run.Repo.Owner
+ }
+ if user.IsSystem() || user.Email == "" {
+ return nil
+ }
+
+ if user.EmailNotificationsPreference == user_model.EmailNotificationsDisabled {
+ return nil
+ }
+
+ return sendMailActionRun(user, run, priorStatus, lastRun)
+}
+
+func sendMailActionRun(to *user_model.User, run *actions_model.ActionRun, priorStatus actions_model.Status, lastRun *actions_model.ActionRun) error {
+ var (
+ locale = translation.NewLocale(to.Language)
+ content bytes.Buffer
+ )
+
+ var subject string
+ if run.Status.IsSuccess() {
+ subject = locale.TrString("mail.actions.successful_run_after_failure_subject", run.Title, run.Repo.FullName())
+ } else {
+ subject = locale.TrString("mail.actions.not_successful_run", run.Title, run.Repo.FullName())
+ }
+
+ commitSHA := run.CommitSHA
+ if len(commitSHA) > 7 {
+ commitSHA = commitSHA[:7]
+ }
+ branch := run.PrettyRef()
+
+ data := map[string]any{
+ "locale": locale,
+ "Link": run.HTMLURL(),
+ "Subject": subject,
+ "Language": locale.Language(),
+ "RepoFullName": run.Repo.FullName(),
+ "Run": run,
+ "TriggerUserLink": run.TriggerUser.HTMLURL(),
+ "LastRun": lastRun,
+ "PriorStatus": priorStatus,
+ "CommitSHA": commitSHA,
+ "Branch": branch,
+ "IsSuccess": run.Status.IsSuccess(),
+ }
+
+ if err := bodyTemplates.ExecuteTemplate(&content, string(tplActionNowDone), data); err != nil {
+ return err
+ }
+
+ msg := NewMessage(to.EmailTo(), subject, content.String())
+ msg.Info = subject
+ SendAsync(msg)
+
+ return nil
+}
diff --git a/services/mailer/mail_actions_now_done_test.go b/services/mailer/mail_actions_now_done_test.go
new file mode 100644
index 0000000000..6a01ea7631
--- /dev/null
+++ b/services/mailer/mail_actions_now_done_test.go
@@ -0,0 +1,240 @@
+// Copyright 2025 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package mailer
+
+import (
+ "slices"
+ "testing"
+
+ actions_model "forgejo.org/models/actions"
+ "forgejo.org/models/db"
+ organization_model "forgejo.org/models/organization"
+ repo_model "forgejo.org/models/repo"
+ user_model "forgejo.org/models/user"
+ "forgejo.org/modules/optional"
+ "forgejo.org/modules/setting"
+ "forgejo.org/modules/test"
+ notify_service "forgejo.org/services/notify"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func getActionsNowDoneTestUser(t *testing.T, name, email, notifications string) *user_model.User {
+ t.Helper()
+ user := new(user_model.User)
+ user.Name = name
+ user.Language = "en_US"
+ user.IsAdmin = false
+ user.Email = email
+ user.LastLoginUnix = 1693648327
+ user.CreatedUnix = 1693648027
+ opts := user_model.CreateUserOverwriteOptions{
+ AllowCreateOrganization: optional.Some(true),
+ EmailNotificationsPreference: ¬ifications,
+ }
+ require.NoError(t, user_model.AdminCreateUser(db.DefaultContext, user, &opts))
+ return user
+}
+
+func getActionsNowDoneTestOrg(t *testing.T, name, email string, owner *user_model.User) *user_model.User {
+ t.Helper()
+ org := new(organization_model.Organization)
+ org.Name = name
+ org.Language = "en_US"
+ org.IsAdmin = false
+ // contact email for the organization, for display purposes but otherwise not used as of v12
+ org.Email = email
+ org.LastLoginUnix = 1693648327
+ org.CreatedUnix = 1693648027
+ org.Email = email
+ require.NoError(t, organization_model.CreateOrganization(db.DefaultContext, org, owner))
+ return (*user_model.User)(org)
+}
+
+func assertTranslatedLocaleMailActionsNowDone(t *testing.T, msgBody string) {
+ AssertTranslatedLocale(t, msgBody, "mail.actions.successful_run_after_failure", "mail.actions.not_successful_run", "mail.actions.run_info_cur_status", "mail.actions.run_info_ref", "mail.actions.run_info_previous_status", "mail.actions.run_info_trigger", "mail.view_it_on")
+}
+
+func TestActionRunNowDoneNotificationMail(t *testing.T) {
+ ctx := t.Context()
+
+ defer test.MockVariableValue(&setting.Admin.DisableRegularOrgCreation, false)()
+
+ actionsUser := user_model.NewActionsUser()
+ require.NotEmpty(t, actionsUser.Email)
+
+ repo := repo_model.Repository{
+ Name: "some repo",
+ Description: "rockets are cool",
+ }
+
+ // Do some funky stuff with the action run's ids:
+ // The run with the larger ID finished first.
+ // This is odd but something that must work.
+ run1 := &actions_model.ActionRun{ID: 2, Repo: &repo, RepoID: repo.ID, Title: "some workflow", Status: actions_model.StatusFailure, Stopped: 1745821796, TriggerEvent: "workflow_dispatch"}
+ run2 := &actions_model.ActionRun{ID: 1, Repo: &repo, RepoID: repo.ID, Title: "some workflow", Status: actions_model.StatusSuccess, Stopped: 1745822796, TriggerEvent: "push"}
+
+ assignUsers := func(triggerUser, owner *user_model.User) {
+ for _, run := range []*actions_model.ActionRun{run1, run2} {
+ run.TriggerUser = triggerUser
+ run.TriggerUserID = triggerUser.ID
+ run.NotifyEmail = true
+ }
+ repo.Owner = owner
+ repo.OwnerID = owner.ID
+ }
+
+ notify_service.RegisterNotifier(NewNotifier())
+
+ orgOwner := getActionsNowDoneTestUser(t, "org_owner", "org_owner@example.com", "disabled")
+ defer CleanUpUsers(ctx, []*user_model.User{orgOwner})
+
+ t.Run("DontSendNotificationEmailOnFirstActionSuccess", func(t *testing.T) {
+ user := getActionsNowDoneTestUser(t, "new_user", "new_user@example.com", "enabled")
+ defer CleanUpUsers(ctx, []*user_model.User{user})
+ assignUsers(user, user)
+ defer MockMailSettings(func(msgs ...*Message) {
+ assert.Fail(t, "no mail should be sent")
+ })()
+ notify_service.ActionRunNowDone(ctx, run2, actions_model.StatusRunning, nil)
+ })
+
+ t.Run("WorkflowEnableEmailNotificationIsFalse", func(t *testing.T) {
+ user := getActionsNowDoneTestUser(t, "new_user1", "new_user1@example.com", "enabled")
+ defer CleanUpUsers(ctx, []*user_model.User{user})
+ assignUsers(user, user)
+ defer MockMailSettings(func(msgs ...*Message) {
+ assert.Fail(t, "no mail should be sent")
+ })()
+ run2.NotifyEmail = false
+ notify_service.ActionRunNowDone(ctx, run2, actions_model.StatusRunning, nil)
+ })
+
+ for _, testCase := range []struct {
+ name string
+ triggerUser *user_model.User
+ owner *user_model.User
+ expected string
+ expectMail bool
+ }{
+ {
+ // if the action is assigned a trigger user in a repository
+ // owned by a regular user, the mail is sent to the trigger user
+ name: "RegularTriggerUser",
+ triggerUser: getActionsNowDoneTestUser(t, "new_trigger_user0", "new_trigger_user0@example.com", user_model.EmailNotificationsEnabled),
+ owner: getActionsNowDoneTestUser(t, "new_owner0", "new_owner0@example.com", user_model.EmailNotificationsEnabled),
+ expected: "trigger",
+ expectMail: true,
+ },
+ {
+ // if the action is assigned to a system user (e.g. ActionsUser)
+ // in a repository owned by a regular user, the mail is sent to
+ // the user that owns the repository
+ name: "SystemTriggerUserAndRegularOwner",
+ triggerUser: actionsUser,
+ owner: getActionsNowDoneTestUser(t, "new_owner1", "new_owner1@example.com", user_model.EmailNotificationsEnabled),
+ expected: "owner",
+ expectMail: true,
+ },
+ {
+ // if the action is assigned a trigger user with disabled notifications in a repository
+ // owned by a regular user, no mail is sent
+ name: "RegularTriggerUserNotificationsDisabled",
+ triggerUser: getActionsNowDoneTestUser(t, "new_trigger_user2", "new_trigger_user2@example.com", user_model.EmailNotificationsDisabled),
+ owner: getActionsNowDoneTestUser(t, "new_owner2", "new_owner2@example.com", user_model.EmailNotificationsEnabled),
+ expectMail: false,
+ },
+ {
+ // if the action is assigned to a system user (e.g. ActionsUser)
+ // owned by a regular user with disabled notifications, no mail is sent
+ name: "SystemTriggerUserAndRegularOwnerNotificationsDisabled",
+ triggerUser: actionsUser,
+ owner: getActionsNowDoneTestUser(t, "new_owner3", "new_owner3@example.com", user_model.EmailNotificationsDisabled),
+ expectMail: false,
+ },
+ {
+ // if the action is assigned to a system user (e.g. ActionsUser)
+ // in a repository owned by an organization with an email contact, the mail is sent to
+ // this email contact
+ name: "SystemTriggerUserAndOrgOwner",
+ triggerUser: actionsUser,
+ owner: getActionsNowDoneTestOrg(t, "new_org1", "new_org_owner0@example.com", orgOwner),
+ expected: "owner",
+ expectMail: true,
+ },
+ {
+ // if the action is assigned to a system user (e.g. ActionsUser)
+ // in a repository owned by an organization without an email contact, no mail is sent
+ name: "SystemTriggerUserAndNoMailOrgOwner",
+ triggerUser: actionsUser,
+ owner: getActionsNowDoneTestOrg(t, "new_org2", "", orgOwner),
+ expectMail: false,
+ },
+ } {
+ t.Run(testCase.name, func(t *testing.T) {
+ assignUsers(testCase.triggerUser, testCase.owner)
+ defer CleanUpUsers(ctx, slices.DeleteFunc([]*user_model.User{testCase.triggerUser, testCase.owner}, func(user *user_model.User) bool {
+ return user.IsSystem()
+ }))
+
+ t.Run("SendNotificationEmailOnActionRunFailed", func(t *testing.T) {
+ mailSent := false
+ defer MockMailSettings(func(msgs ...*Message) {
+ assert.Len(t, msgs, 1)
+ msg := msgs[0]
+ assert.False(t, mailSent, "sent mail twice")
+ expectedEmail := testCase.triggerUser.Email
+ if testCase.expected == "owner" { // otherwise "trigger"
+ expectedEmail = testCase.owner.Email
+ }
+ require.Contains(t, msg.To, expectedEmail, "sent mail to unknown sender")
+ mailSent = true
+ assert.Contains(t, msg.Body, testCase.triggerUser.HTMLURL())
+ assert.Contains(t, msg.Body, testCase.triggerUser.Name)
+ // what happened
+ assert.Contains(t, msg.Body, "failed")
+ // new status of run
+ assert.Contains(t, msg.Body, "failure")
+ // prior status of this run
+ assert.Contains(t, msg.Body, "waiting")
+ assertTranslatedLocaleMailActionsNowDone(t, msg.Body)
+ })()
+ require.NotNil(t, setting.MailService)
+
+ notify_service.ActionRunNowDone(ctx, run1, actions_model.StatusWaiting, nil)
+ assert.Equal(t, testCase.expectMail, mailSent)
+ })
+
+ t.Run("SendNotificationEmailOnActionRunRecovered", func(t *testing.T) {
+ mailSent := false
+ defer MockMailSettings(func(msgs ...*Message) {
+ assert.Len(t, msgs, 1)
+ msg := msgs[0]
+ assert.False(t, mailSent, "sent mail twice")
+ expectedEmail := testCase.triggerUser.Email
+ if testCase.expected == "owner" { // otherwise "trigger"
+ expectedEmail = testCase.owner.Email
+ }
+ require.Contains(t, msg.To, expectedEmail, "sent mail to unknown sender")
+ mailSent = true
+ assert.Contains(t, msg.Body, testCase.triggerUser.HTMLURL())
+ assert.Contains(t, msg.Body, testCase.triggerUser.Name)
+ // what happened
+ assert.Contains(t, msg.Body, "recovered")
+ // old status of run
+ assert.Contains(t, msg.Body, "failure")
+ // new status of run
+ assert.Contains(t, msg.Body, "success")
+ // prior status of this run
+ assert.Contains(t, msg.Body, "running")
+ })()
+ require.NotNil(t, setting.MailService)
+
+ notify_service.ActionRunNowDone(ctx, run2, actions_model.StatusRunning, run1)
+ assert.Equal(t, testCase.expectMail, mailSent)
+ })
+ })
+ }
+}
diff --git a/services/mailer/mail_admin_new_user_test.go b/services/mailer/mail_admin_new_user_test.go
index 9273691792..58afcfcda6 100644
--- a/services/mailer/mail_admin_new_user_test.go
+++ b/services/mailer/mail_admin_new_user_test.go
@@ -4,7 +4,6 @@
package mailer
import (
- "context"
"strconv"
"testing"
@@ -17,7 +16,7 @@ import (
"github.com/stretchr/testify/require"
)
-func getTestUsers(t *testing.T) []*user_model.User {
+func getAdminNewUserTestUsers(t *testing.T) []*user_model.User {
t.Helper()
admin := new(user_model.User)
admin.Name = "testadmin"
@@ -38,16 +37,10 @@ func getTestUsers(t *testing.T) []*user_model.User {
return []*user_model.User{admin, newUser}
}
-func cleanUpUsers(ctx context.Context, users []*user_model.User) {
- for _, u := range users {
- db.DeleteByID[user_model.User](ctx, u.ID)
- }
-}
-
func TestAdminNotificationMail_test(t *testing.T) {
ctx := t.Context()
- users := getTestUsers(t)
+ users := getAdminNewUserTestUsers(t)
t.Run("SendNotificationEmailOnNewUser_true", func(t *testing.T) {
defer test.MockVariableValue(&setting.Admin.SendNotificationEmailOnNewUser, true)()
@@ -75,5 +68,5 @@ func TestAdminNotificationMail_test(t *testing.T) {
MailNewUser(ctx, users[1])
})
- cleanUpUsers(ctx, users)
+ CleanUpUsers(ctx, users)
}
diff --git a/services/mailer/mail_issue.go b/services/mailer/mail_issue.go
index b0329caa0b..0d8e054041 100644
--- a/services/mailer/mail_issue.go
+++ b/services/mailer/mail_issue.go
@@ -85,7 +85,7 @@ func mailIssueCommentToParticipants(ctx *mailCommentContext, mentions []*user_mo
// =========== Repo watchers ===========
// Make repo watchers last, since it's likely the list with the most users
- if !(ctx.Issue.IsPull && ctx.Issue.PullRequest.IsWorkInProgress(ctx) && ctx.ActionType != activities_model.ActionCreatePullRequest) {
+ if !ctx.Issue.IsPull || !ctx.Issue.PullRequest.IsWorkInProgress(ctx) || ctx.ActionType == activities_model.ActionCreatePullRequest {
ids, err = repo_model.GetRepoWatchersIDs(ctx, ctx.Issue.RepoID)
if err != nil {
return fmt.Errorf("GetRepoWatchersIDs(%d): %w", ctx.Issue.RepoID, err)
@@ -137,9 +137,8 @@ func mailIssueCommentBatch(ctx *mailCommentContext, users []*user_model.User, vi
}
// At this point we exclude:
// user that don't have all mails enabled or users only get mail on mention and this is one ...
- if !(user.EmailNotificationsPreference == user_model.EmailNotificationsEnabled ||
- user.EmailNotificationsPreference == user_model.EmailNotificationsAndYourOwn ||
- fromMention && user.EmailNotificationsPreference == user_model.EmailNotificationsOnMention) {
+ if user.EmailNotificationsPreference != user_model.EmailNotificationsEnabled &&
+ user.EmailNotificationsPreference != user_model.EmailNotificationsAndYourOwn && (!fromMention || user.EmailNotificationsPreference != user_model.EmailNotificationsOnMention) {
continue
}
diff --git a/services/mailer/mail_team_invite.go b/services/mailer/mail_team_invite.go
index a2a871d3c3..5375133415 100644
--- a/services/mailer/mail_team_invite.go
+++ b/services/mailer/mail_team_invite.go
@@ -6,6 +6,7 @@ package mailer
import (
"bytes"
"context"
+ "errors"
"fmt"
"net/url"
@@ -39,7 +40,7 @@ func MailTeamInvite(ctx context.Context, inviter *user_model.User, team *org_mod
if err != nil && !user_model.IsErrUserNotExist(err) {
return err
} else if user != nil && user.ProhibitLogin {
- return fmt.Errorf("login is prohibited for the invited user")
+ return errors.New("login is prohibited for the invited user")
}
inviteRedirect := url.QueryEscape(fmt.Sprintf("/org/invite/%s", invite.Token))
diff --git a/services/mailer/mail_test.go b/services/mailer/mail_test.go
index 616eea2d85..afbcb8064e 100644
--- a/services/mailer/mail_test.go
+++ b/services/mailer/mail_test.go
@@ -494,8 +494,7 @@ func Test_createReference(t *testing.T) {
func TestFromDisplayName(t *testing.T) {
template, err := texttmpl.New("mailFrom").Parse("{{ .DisplayName }}")
require.NoError(t, err)
- setting.MailService = &setting.Mailer{FromDisplayNameFormatTemplate: template}
- defer func() { setting.MailService = nil }()
+ defer test.MockVariableValue(&setting.MailService, &setting.Mailer{FromDisplayNameFormatTemplate: template})()
tests := []struct {
userDisplayName string
@@ -518,24 +517,18 @@ func TestFromDisplayName(t *testing.T) {
t.Run(tc.userDisplayName, func(t *testing.T) {
user := &user_model.User{FullName: tc.userDisplayName, Name: "tmp"}
got := fromDisplayName(user)
- assert.EqualValues(t, tc.fromDisplayName, got)
+ assert.Equal(t, tc.fromDisplayName, got)
})
}
t.Run("template with all available vars", func(t *testing.T) {
template, err = texttmpl.New("mailFrom").Parse("{{ .DisplayName }} (by {{ .AppName }} on [{{ .Domain }}])")
require.NoError(t, err)
- setting.MailService = &setting.Mailer{FromDisplayNameFormatTemplate: template}
- oldAppName := setting.AppName
- setting.AppName = "Code IT"
- oldDomain := setting.Domain
- setting.Domain = "code.it"
- defer func() {
- setting.AppName = oldAppName
- setting.Domain = oldDomain
- }()
+ defer test.MockVariableValue(&setting.MailService, &setting.Mailer{FromDisplayNameFormatTemplate: template})()
+ defer test.MockVariableValue(&setting.AppName, "Code IT")()
+ defer test.MockVariableValue(&setting.Domain, "code.it")()
- assert.EqualValues(t, "Mister X (by Code IT on [code.it])", fromDisplayName(&user_model.User{FullName: "Mister X", Name: "tmp"}))
+ assert.Equal(t, "Mister X (by Code IT on [code.it])", fromDisplayName(&user_model.User{FullName: "Mister X", Name: "tmp"}))
})
}
diff --git a/services/mailer/mailer.go b/services/mailer/mailer.go
index 4561240df5..d8646d9ddd 100644
--- a/services/mailer/mailer.go
+++ b/services/mailer/mailer.go
@@ -8,6 +8,7 @@ import (
"bytes"
"context"
"crypto/tls"
+ "errors"
"fmt"
"hash/fnv"
"io"
@@ -28,7 +29,7 @@ import (
notify_service "forgejo.org/services/notify"
ntlmssp "github.com/Azure/go-ntlmssp"
- "github.com/jaytaylor/html2text"
+ "github.com/inbucket/html2text"
"gopkg.in/gomail.v2"
)
@@ -176,7 +177,7 @@ func (a *ntlmAuth) Start(server *smtp.ServerInfo) (string, []byte, error) {
func (a *ntlmAuth) Next(fromServer []byte, more bool) ([]byte, error) {
if more {
if len(fromServer) == 0 {
- return nil, fmt.Errorf("ntlm ChallengeMessage is empty")
+ return nil, errors.New("ntlm ChallengeMessage is empty")
}
authenticateMessage, err := ntlmssp.ProcessChallenge(fromServer, a.username, a.password, a.domainNeeded)
return authenticateMessage, err
@@ -264,7 +265,7 @@ func (s *smtpSender) Send(from string, to []string, msg io.WriterTo) error {
canAuth, options := client.Extension("AUTH")
if len(opts.User) > 0 {
if !canAuth {
- return fmt.Errorf("SMTP server does not support AUTH, but credentials provided")
+ return errors.New("SMTP server does not support AUTH, but credentials provided")
}
var auth smtp.Auth
diff --git a/services/mailer/mailer_test.go b/services/mailer/mailer_test.go
index aef242d908..34fd847c05 100644
--- a/services/mailer/mailer_test.go
+++ b/services/mailer/mailer_test.go
@@ -72,7 +72,7 @@ func TestToMessage(t *testing.T) {
_, err := m1.ToMessage().WriteTo(buf)
require.NoError(t, err)
header, _ := extractMailHeaderAndContent(t, buf.String())
- assert.EqualValues(t, map[string]string{
+ assert.Equal(t, map[string]string{
"Content-Type": "multipart/alternative;",
"Date": "Mon, 01 Jan 0001 00:00:00 +0000",
"From": "\"Test Gitea\" ",
@@ -92,7 +92,7 @@ func TestToMessage(t *testing.T) {
_, err = m1.ToMessage().WriteTo(buf)
require.NoError(t, err)
header, _ = extractMailHeaderAndContent(t, buf.String())
- assert.EqualValues(t, map[string]string{
+ assert.Equal(t, map[string]string{
"Content-Type": "multipart/alternative;",
"Date": "Mon, 01 Jan 0001 00:00:00 +0000",
"From": "\"Test Gitea\" ",
diff --git a/services/mailer/main_test.go b/services/mailer/main_test.go
index 9ef71dbdb3..5e9cbe3e99 100644
--- a/services/mailer/main_test.go
+++ b/services/mailer/main_test.go
@@ -7,7 +7,10 @@ import (
"context"
"testing"
+ "forgejo.org/models/db"
+ organization_model "forgejo.org/models/organization"
"forgejo.org/models/unittest"
+ user_model "forgejo.org/models/user"
"forgejo.org/modules/setting"
"forgejo.org/modules/templates"
"forgejo.org/modules/test"
@@ -46,3 +49,14 @@ func MockMailSettings(send func(msgs ...*Message)) func() {
}
}
}
+
+func CleanUpUsers(ctx context.Context, users []*user_model.User) {
+ for _, u := range users {
+ if u.IsOrganization() {
+ organization_model.DeleteOrganization(ctx, (*organization_model.Organization)(u))
+ } else {
+ db.DeleteByID[user_model.User](ctx, u.ID)
+ db.DeleteByBean(ctx, &user_model.EmailAddress{UID: u.ID})
+ }
+ }
+}
diff --git a/services/mailer/notify.go b/services/mailer/notify.go
index e61ecd0511..7461a67181 100644
--- a/services/mailer/notify.go
+++ b/services/mailer/notify.go
@@ -7,6 +7,7 @@ import (
"context"
"fmt"
+ actions_model "forgejo.org/models/actions"
activities_model "forgejo.org/models/activities"
issues_model "forgejo.org/models/issues"
repo_model "forgejo.org/models/repo"
@@ -30,15 +31,16 @@ func (m *mailNotifier) CreateIssueComment(ctx context.Context, doer *user_model.
issue *issues_model.Issue, comment *issues_model.Comment, mentions []*user_model.User,
) {
var act activities_model.ActionType
- if comment.Type == issues_model.CommentTypeClose {
+ switch comment.Type {
+ case issues_model.CommentTypeClose:
act = activities_model.ActionCloseIssue
- } else if comment.Type == issues_model.CommentTypeReopen {
+ case issues_model.CommentTypeReopen:
act = activities_model.ActionReopenIssue
- } else if comment.Type == issues_model.CommentTypeComment {
+ case issues_model.CommentTypeComment:
act = activities_model.ActionCommentIssue
- } else if comment.Type == issues_model.CommentTypeCode {
+ case issues_model.CommentTypeCode:
act = activities_model.ActionCommentIssue
- } else if comment.Type == issues_model.CommentTypePullRequestPush {
+ case issues_model.CommentTypePullRequestPush:
act = 0
}
@@ -94,11 +96,12 @@ func (m *mailNotifier) NewPullRequest(ctx context.Context, pr *issues_model.Pull
func (m *mailNotifier) PullRequestReview(ctx context.Context, pr *issues_model.PullRequest, r *issues_model.Review, comment *issues_model.Comment, mentions []*user_model.User) {
var act activities_model.ActionType
- if comment.Type == issues_model.CommentTypeClose {
+ switch comment.Type {
+ case issues_model.CommentTypeClose:
act = activities_model.ActionCloseIssue
- } else if comment.Type == issues_model.CommentTypeReopen {
+ case issues_model.CommentTypeReopen:
act = activities_model.ActionReopenIssue
- } else if comment.Type == issues_model.CommentTypeComment {
+ case issues_model.CommentTypeComment:
act = activities_model.ActionCommentPull
}
if err := MailParticipantsComment(ctx, comment, act, pr.Issue, mentions); err != nil {
@@ -206,3 +209,13 @@ func (m *mailNotifier) RepoPendingTransfer(ctx context.Context, doer, newOwner *
func (m *mailNotifier) NewUserSignUp(ctx context.Context, newUser *user_model.User) {
MailNewUser(ctx, newUser)
}
+
+func (m *mailNotifier) ActionRunNowDone(ctx context.Context, run *actions_model.ActionRun, priorStatus actions_model.Status, lastRun *actions_model.ActionRun) {
+ // Only send a mail on a successful run when the workflow recovered (i.e., the run before failed).
+ if run.Status.IsSuccess() && (lastRun == nil || lastRun.Status.IsSuccess()) {
+ return
+ }
+ if err := MailActionRun(run, priorStatus, lastRun); err != nil {
+ log.Error("MailActionRunNowDone: %v", err)
+ }
+}
diff --git a/services/markup/processorhelper.go b/services/markup/processorhelper.go
index b5fcd78cb7..2f1b1e738c 100644
--- a/services/markup/processorhelper.go
+++ b/services/markup/processorhelper.go
@@ -5,7 +5,7 @@ package markup
import (
"context"
- "fmt"
+ "errors"
"forgejo.org/models/perm/access"
"forgejo.org/models/repo"
@@ -55,7 +55,7 @@ func ProcessorHelper() *markup.ProcessorHelper {
return nil, err
}
if !perms.CanRead(unit.TypeCode) {
- return nil, fmt.Errorf("cannot access repository code")
+ return nil, errors.New("cannot access repository code")
}
gitRepo, err := gitrepo.OpenRepository(ctx, repo)
diff --git a/services/migrations/gitea_downloader_test.go b/services/migrations/gitea_downloader_test.go
index 24c53af023..5acc3b86a9 100644
--- a/services/migrations/gitea_downloader_test.go
+++ b/services/migrations/gitea_downloader_test.go
@@ -45,7 +45,7 @@ func TestGiteaDownloadRepo(t *testing.T) {
topics, err := downloader.GetTopics()
require.NoError(t, err)
sort.Strings(topics)
- assert.EqualValues(t, []string{"ci", "gitea", "migration", "test"}, topics)
+ assert.Equal(t, []string{"ci", "gitea", "migration", "test"}, topics)
labels, err := downloader.GetLabels()
require.NoError(t, err)
@@ -132,7 +132,7 @@ func TestGiteaDownloadRepo(t *testing.T) {
require.NoError(t, err)
assert.True(t, isEnd)
assert.Len(t, issues, 7)
- assert.EqualValues(t, "open", issues[0].State)
+ assert.Equal(t, "open", issues[0].State)
issues, isEnd, err = downloader.GetIssues(3, 2)
require.NoError(t, err)
diff --git a/services/migrations/gitea_uploader.go b/services/migrations/gitea_uploader.go
index 55adad9685..7887dacdb1 100644
--- a/services/migrations/gitea_uploader.go
+++ b/services/migrations/gitea_uploader.go
@@ -766,7 +766,7 @@ func (g *GiteaLocalUploader) newPullRequest(pr *base.PullRequest) (*issues_model
issue := issues_model.Issue{
RepoID: g.repo.ID,
Repo: g.repo,
- Title: prTitle,
+ Title: util.TruncateRunes(prTitle, 255),
Index: pr.Number,
Content: pr.Content,
MilestoneID: milestoneID,
diff --git a/services/migrations/gitea_uploader_test.go b/services/migrations/gitea_uploader_test.go
index e07c621acc..85e733cc51 100644
--- a/services/migrations/gitea_uploader_test.go
+++ b/services/migrations/gitea_uploader_test.go
@@ -64,7 +64,7 @@ func TestGiteaUploadRepo(t *testing.T) {
repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerID: user.ID, Name: repoName})
assert.True(t, repo.HasWiki())
- assert.EqualValues(t, repo_model.RepositoryReady, repo.Status)
+ assert.Equal(t, repo_model.RepositoryReady, repo.Status)
milestones, err := db.Find[issues_model.Milestone](db.DefaultContext, issues_model.FindMilestoneOptions{
RepoID: repo.ID,
@@ -173,7 +173,7 @@ func TestGiteaUploadRemapLocalUser(t *testing.T) {
uploader.userMap = make(map[int64]int64)
err = uploader.remapUser(&source, &target)
require.NoError(t, err)
- assert.EqualValues(t, user.ID, target.GetUserID())
+ assert.Equal(t, user.ID, target.GetUserID())
}
func TestGiteaUploadRemapExternalUser(t *testing.T) {
@@ -224,7 +224,7 @@ func TestGiteaUploadRemapExternalUser(t *testing.T) {
target = repo_model.Release{}
err = uploader.remapUser(&source, &target)
require.NoError(t, err)
- assert.EqualValues(t, linkedUser.ID, target.GetUserID())
+ assert.Equal(t, linkedUser.ID, target.GetUserID())
}
func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) {
@@ -504,14 +504,14 @@ func TestGiteaUploadUpdateGitForPullRequest(t *testing.T) {
head, err := uploader.updateGitForPullRequest(&testCase.pr)
require.NoError(t, err)
- assert.EqualValues(t, testCase.head, head)
+ assert.Equal(t, testCase.head, head)
log.Info(stopMark)
logFiltered, logStopped := logChecker.Check(5 * time.Second)
assert.True(t, logStopped)
if len(testCase.logFilter) > 0 {
- assert.EqualValues(t, testCase.logFiltered, logFiltered, "for log message filters: %v", testCase.logFilter)
+ assert.Equal(t, testCase.logFiltered, logFiltered, "for log message filters: %v", testCase.logFilter)
}
})
}
diff --git a/services/migrations/github.go b/services/migrations/github.go
index 5052a68114..9721c86180 100644
--- a/services/migrations/github.go
+++ b/services/migrations/github.go
@@ -140,7 +140,7 @@ func (g *GithubDownloaderV3) LogString() string {
func (g *GithubDownloaderV3) addClient(client *http.Client, baseURL string) {
githubClient := github.NewClient(client)
if baseURL != "https://github.com" {
- githubClient, _ = github.NewClient(client).WithEnterpriseURLs(baseURL, baseURL)
+ githubClient, _ = githubClient.WithEnterpriseURLs(baseURL, baseURL)
}
g.clients = append(g.clients, githubClient)
g.rates = append(g.rates, nil)
@@ -885,3 +885,18 @@ func (g *GithubDownloaderV3) GetReviews(reviewable base.Reviewable) ([]*base.Rev
}
return allReviews, nil
}
+
+// FormatCloneURL add authentication into remote URLs
+func (g *GithubDownloaderV3) FormatCloneURL(opts MigrateOptions, remoteAddr string) (string, error) {
+ u, err := url.Parse(remoteAddr)
+ if err != nil {
+ return "", err
+ }
+ if len(opts.AuthToken) > 0 {
+ // "multiple tokens" are used to benefit more "API rate limit quota"
+ // git clone doesn't count for rate limits, so only use the first token.
+ // source: https://github.com/orgs/community/discussions/44515
+ u.User = url.UserPassword("oauth2", strings.Split(opts.AuthToken, ",")[0])
+ }
+ return u.String(), nil
+}
diff --git a/services/migrations/github_test.go b/services/migrations/github_test.go
index b1f20c4716..c5e24ebbcd 100644
--- a/services/migrations/github_test.go
+++ b/services/migrations/github_test.go
@@ -433,3 +433,36 @@ func TestGitHubDownloadRepo(t *testing.T) {
},
}, reviews)
}
+
+func TestGithubMultiToken(t *testing.T) {
+ testCases := []struct {
+ desc string
+ token string
+ expectedCloneURL string
+ }{
+ {
+ desc: "Single Token",
+ token: "single_token",
+ expectedCloneURL: "https://oauth2:single_token@github.com",
+ },
+ {
+ desc: "Multi Token",
+ token: "token1,token2",
+ expectedCloneURL: "https://oauth2:token1@github.com",
+ },
+ }
+ factory := GithubDownloaderV3Factory{}
+
+ for _, tC := range testCases {
+ t.Run(tC.desc, func(t *testing.T) {
+ opts := base.MigrateOptions{CloneAddr: "https://github.com/go-gitea/gitea", AuthToken: tC.token}
+ client, err := factory.New(t.Context(), opts)
+ require.NoError(t, err)
+
+ cloneURL, err := client.FormatCloneURL(opts, "https://github.com")
+ require.NoError(t, err)
+
+ assert.Equal(t, tC.expectedCloneURL, cloneURL)
+ })
+ }
+}
diff --git a/services/migrations/gitlab.go b/services/migrations/gitlab.go
index ac0d3bcf7a..f54f682c47 100644
--- a/services/migrations/gitlab.go
+++ b/services/migrations/gitlab.go
@@ -99,6 +99,7 @@ func NewGitlabDownloader(ctx context.Context, baseURL, repoPath, username, passw
// Only use basic auth if token is blank and password is NOT
// Basic auth will fail with empty strings, but empty token will allow anonymous public API usage
if token == "" && password != "" {
+ //nolint // SA1019 gitlab.NewBasicAuthClient is deprecated: GitLab recommends against using this authentication method
gitlabClient, err = gitlab.NewBasicAuthClient(username, password, gitlab.WithBaseURL(baseURL), gitlab.WithHTTPClient(NewMigrationHTTPClient()))
}
@@ -213,7 +214,7 @@ func (g *GitlabDownloader) GetTopics() ([]string, error) {
if err != nil {
return nil, err
}
- return gr.TagList, err
+ return gr.Topics, err
}
// GetMilestones returns milestones
diff --git a/services/migrations/gitlab_test.go b/services/migrations/gitlab_test.go
index 924dab5144..30b24f09e8 100644
--- a/services/migrations/gitlab_test.go
+++ b/services/migrations/gitlab_test.go
@@ -49,7 +49,7 @@ func TestGitlabDownloadRepo(t *testing.T) {
topics, err := downloader.GetTopics()
require.NoError(t, err)
assert.Len(t, topics, 2)
- assert.EqualValues(t, []string{"migration", "test"}, topics)
+ assert.Equal(t, []string{"migration", "test"}, topics)
milestones, err := downloader.GetMilestones()
require.NoError(t, err)
@@ -352,7 +352,7 @@ func TestGitlabSkippedIssueNumber(t *testing.T) {
// the only issue in this repository has number 2
assert.Len(t, issues, 1)
assert.EqualValues(t, 2, issues[0].Number)
- assert.EqualValues(t, "vpn unlimited errors", issues[0].Title)
+ assert.Equal(t, "vpn unlimited errors", issues[0].Title)
prs, _, err := downloader.GetPullRequests(1, 10)
require.NoError(t, err)
@@ -361,7 +361,7 @@ func TestGitlabSkippedIssueNumber(t *testing.T) {
// pull request 3 in Forgejo
assert.Len(t, prs, 1)
assert.EqualValues(t, 3, prs[0].Number)
- assert.EqualValues(t, "Review", prs[0].Title)
+ assert.Equal(t, "Review", prs[0].Title)
}
func gitlabClientMockSetup(t *testing.T) (*http.ServeMux, *httptest.Server, *gitlab.Client) {
@@ -531,7 +531,7 @@ func TestAwardsToReactions(t *testing.T) {
require.NoError(t, json.Unmarshal([]byte(testResponse), &awards))
reactions := downloader.awardsToReactions(awards)
- assert.EqualValues(t, []*base.Reaction{
+ assert.Equal(t, []*base.Reaction{
{
UserName: "lafriks",
UserID: 1241334,
@@ -623,7 +623,7 @@ func TestNoteToComment(t *testing.T) {
for i, note := range notes {
actualComment := *downloader.convertNoteToComment(17, ¬e)
- assert.EqualValues(t, actualComment, comments[i])
+ assert.Equal(t, actualComment, comments[i])
}
}
diff --git a/services/migrations/gogs_test.go b/services/migrations/gogs_test.go
index 7d7f10c2b9..bf0d063ca4 100644
--- a/services/migrations/gogs_test.go
+++ b/services/migrations/gogs_test.go
@@ -25,7 +25,7 @@ func TestGogsDownloadRepo(t *testing.T) {
resp, err := http.Get("https://try.gogs.io/lunnytest/TESTREPO")
if err != nil || resp.StatusCode/100 != 2 {
// skip and don't run test
- t.Skipf("visit test repo failed, ignored")
+ t.Skip("visit test repo failed, ignored")
return
}
@@ -215,9 +215,9 @@ func TestGogsDownloaderFactory_New(t *testing.T) {
}
assert.IsType(t, &GogsDownloader{}, got)
- assert.EqualValues(t, tt.baseURL, got.(*GogsDownloader).baseURL)
- assert.EqualValues(t, tt.repoOwner, got.(*GogsDownloader).repoOwner)
- assert.EqualValues(t, tt.repoName, got.(*GogsDownloader).repoName)
+ assert.Equal(t, tt.baseURL, got.(*GogsDownloader).baseURL)
+ assert.Equal(t, tt.repoOwner, got.(*GogsDownloader).repoOwner)
+ assert.Equal(t, tt.repoName, got.(*GogsDownloader).repoName)
})
}
}
diff --git a/services/migrations/migrate.go b/services/migrations/migrate.go
index 81d1c203fe..61630d9c6d 100644
--- a/services/migrations/migrate.go
+++ b/services/migrations/migrate.go
@@ -6,6 +6,7 @@ package migrations
import (
"context"
+ "errors"
"fmt"
"net"
"net/url"
@@ -227,7 +228,7 @@ func migrateRepository(_ context.Context, doer *user_model.User, downloader base
if cloneURL.Scheme == "file" || cloneURL.Scheme == "" {
if cloneAddrURL.Scheme != "file" && cloneAddrURL.Scheme != "" {
- return fmt.Errorf("repo info has changed from external to local filesystem")
+ return errors.New("repo info has changed from external to local filesystem")
}
}
diff --git a/services/mirror/mirror.go b/services/mirror/mirror.go
index 6d871ad5ff..514b7c3969 100644
--- a/services/mirror/mirror.go
+++ b/services/mirror/mirror.go
@@ -5,7 +5,7 @@ package mirror
import (
"context"
- "fmt"
+ "errors"
quota_model "forgejo.org/models/quota"
repo_model "forgejo.org/models/repo"
@@ -31,7 +31,7 @@ func doMirrorSync(ctx context.Context, req *SyncRequest) {
}
}
-var errLimit = fmt.Errorf("reached limit")
+var errLimit = errors.New("reached limit")
// Update checks and updates mirror repositories.
func Update(ctx context.Context, pullLimit, pushLimit int) error {
@@ -70,7 +70,7 @@ func Update(ctx context.Context, pullLimit, pushLimit int) error {
// Check we've not been cancelled
select {
case <-ctx.Done():
- return fmt.Errorf("aborted")
+ return errors.New("aborted")
default:
}
diff --git a/services/mirror/mirror_pull.go b/services/mirror/mirror_pull.go
index a63cbcf40c..c46323f283 100644
--- a/services/mirror/mirror_pull.go
+++ b/services/mirror/mirror_pull.go
@@ -244,6 +244,24 @@ func pruneBrokenReferences(ctx context.Context,
return pruneErr
}
+// checkRecoverableSyncError takes an error message from a git fetch command and returns false if it should be a fatal/blocking error
+func checkRecoverableSyncError(stderrMessage string) bool {
+ switch {
+ case strings.Contains(stderrMessage, "unable to resolve reference") && strings.Contains(stderrMessage, "reference broken"):
+ return true
+ case strings.Contains(stderrMessage, "remote error") && strings.Contains(stderrMessage, "not our ref"):
+ return true
+ case strings.Contains(stderrMessage, "cannot lock ref") && strings.Contains(stderrMessage, "but expected"):
+ return true
+ case strings.Contains(stderrMessage, "cannot lock ref") && strings.Contains(stderrMessage, "unable to resolve reference"):
+ return true
+ case strings.Contains(stderrMessage, "Unable to create") && strings.Contains(stderrMessage, ".lock"):
+ return true
+ default:
+ return false
+ }
+}
+
// runSync returns true if sync finished without error.
func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bool) {
repoPath := m.Repo.RepoPath()
@@ -286,7 +304,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
stdoutMessage := util.SanitizeCredentialURLs(stdout)
// Now check if the error is a resolve reference due to broken reference
- if strings.Contains(stderr, "unable to resolve reference") && strings.Contains(stderr, "reference broken") {
+ if checkRecoverableSyncError(stderr) {
log.Warn("SyncMirrors [repo: %-v]: failed to update mirror repository due to broken references:\nStdout: %s\nStderr: %s\nErr: %v\nAttempting Prune", m.Repo, stdoutMessage, stderrMessage, err)
err = nil
@@ -337,6 +355,15 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
return nil, false
}
+ if m.LFS && setting.LFS.StartServer {
+ log.Trace("SyncMirrors [repo: %-v]: syncing LFS objects...", m.Repo)
+ endpoint := lfs.DetermineEndpoint(remoteURL.String(), m.LFSEndpoint)
+ lfsClient := lfs.NewClient(endpoint, nil)
+ if err = repo_module.StoreMissingLfsObjectsInRepository(ctx, m.Repo, gitRepo, lfsClient); err != nil {
+ log.Error("SyncMirrors [repo: %-v]: failed to synchronize LFS objects for repository: %v", m.Repo, err)
+ }
+ }
+
log.Trace("SyncMirrors [repo: %-v]: syncing branches...", m.Repo)
if _, err = repo_module.SyncRepoBranchesWithRepo(ctx, m.Repo, gitRepo, 0); err != nil {
log.Error("SyncMirrors [repo: %-v]: failed to synchronize branches: %v", m.Repo, err)
@@ -346,15 +373,6 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
if err = repo_module.SyncReleasesWithTags(ctx, m.Repo, gitRepo); err != nil {
log.Error("SyncMirrors [repo: %-v]: failed to synchronize tags to releases: %v", m.Repo, err)
}
-
- if m.LFS && setting.LFS.StartServer {
- log.Trace("SyncMirrors [repo: %-v]: syncing LFS objects...", m.Repo)
- endpoint := lfs.DetermineEndpoint(remoteURL.String(), m.LFSEndpoint)
- lfsClient := lfs.NewClient(endpoint, nil)
- if err = repo_module.StoreMissingLfsObjectsInRepository(ctx, m.Repo, gitRepo, lfsClient); err != nil {
- log.Error("SyncMirrors [repo: %-v]: failed to synchronize LFS objects for repository: %v", m.Repo, err)
- }
- }
gitRepo.Close()
log.Trace("SyncMirrors [repo: %-v]: updating size of repository", m.Repo)
@@ -382,7 +400,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo
stdoutMessage := util.SanitizeCredentialURLs(stdout)
// Now check if the error is a resolve reference due to broken reference
- if strings.Contains(stderrMessage, "unable to resolve reference") && strings.Contains(stderrMessage, "reference broken") {
+ if checkRecoverableSyncError(stderrMessage) {
log.Warn("SyncMirrors [repo: %-v Wiki]: failed to update mirror wiki repository due to broken references:\nStdout: %s\nStderr: %s\nErr: %v\nAttempting Prune", m.Repo, stdoutMessage, stderrMessage, err)
err = nil
diff --git a/services/mirror/mirror_pull_test.go b/services/mirror/mirror_pull_test.go
new file mode 100644
index 0000000000..97859be5b0
--- /dev/null
+++ b/services/mirror/mirror_pull_test.go
@@ -0,0 +1,94 @@
+// Copyright 2023 The Gitea Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package mirror
+
+import (
+ "testing"
+
+ "github.com/stretchr/testify/assert"
+)
+
+func Test_parseRemoteUpdateOutput(t *testing.T) {
+ output := `
+ * [new tag] v0.1.8 -> v0.1.8
+ * [new branch] master -> origin/master
+ - [deleted] (none) -> origin/test1
+ - [deleted] (none) -> tag1
+ + f895a1e...957a993 test2 -> origin/test2 (forced update)
+ 957a993..a87ba5f test3 -> origin/test3
+ * [new ref] refs/pull/26595/head -> refs/pull/26595/head
+ * [new ref] refs/pull/26595/merge -> refs/pull/26595/merge
+ e0639e38fb..6db2410489 refs/pull/25873/head -> refs/pull/25873/head
+ + 1c97ebc746...976d27d52f refs/pull/25873/merge -> refs/pull/25873/merge (forced update)
+`
+ results := parseRemoteUpdateOutput(output, "origin")
+ assert.Len(t, results, 10)
+ assert.Equal(t, "refs/tags/v0.1.8", results[0].refName.String())
+ assert.Equal(t, gitShortEmptySha, results[0].oldCommitID)
+ assert.Empty(t, results[0].newCommitID)
+
+ assert.Equal(t, "refs/heads/master", results[1].refName.String())
+ assert.Equal(t, gitShortEmptySha, results[1].oldCommitID)
+ assert.Empty(t, results[1].newCommitID)
+
+ assert.Equal(t, "refs/heads/test1", results[2].refName.String())
+ assert.Empty(t, results[2].oldCommitID)
+ assert.Equal(t, gitShortEmptySha, results[2].newCommitID)
+
+ assert.Equal(t, "refs/tags/tag1", results[3].refName.String())
+ assert.Empty(t, results[3].oldCommitID)
+ assert.Equal(t, gitShortEmptySha, results[3].newCommitID)
+
+ assert.Equal(t, "refs/heads/test2", results[4].refName.String())
+ assert.Equal(t, "f895a1e", results[4].oldCommitID)
+ assert.Equal(t, "957a993", results[4].newCommitID)
+
+ assert.Equal(t, "refs/heads/test3", results[5].refName.String())
+ assert.Equal(t, "957a993", results[5].oldCommitID)
+ assert.Equal(t, "a87ba5f", results[5].newCommitID)
+
+ assert.Equal(t, "refs/pull/26595/head", results[6].refName.String())
+ assert.Equal(t, gitShortEmptySha, results[6].oldCommitID)
+ assert.Empty(t, results[6].newCommitID)
+
+ assert.Equal(t, "refs/pull/26595/merge", results[7].refName.String())
+ assert.Equal(t, gitShortEmptySha, results[7].oldCommitID)
+ assert.Empty(t, results[7].newCommitID)
+
+ assert.Equal(t, "refs/pull/25873/head", results[8].refName.String())
+ assert.Equal(t, "e0639e38fb", results[8].oldCommitID)
+ assert.Equal(t, "6db2410489", results[8].newCommitID)
+
+ assert.Equal(t, "refs/pull/25873/merge", results[9].refName.String())
+ assert.Equal(t, "1c97ebc746", results[9].oldCommitID)
+ assert.Equal(t, "976d27d52f", results[9].newCommitID)
+}
+
+func Test_checkRecoverableSyncError(t *testing.T) {
+ cases := []struct {
+ recoverable bool
+ message string
+ }{
+ // A race condition in http git-fetch where certain refs were listed on the remote and are no longer there, would exit status 128
+ {true, "fatal: remote error: upload-pack: not our ref 988881adc9fc3655077dc2d4d757d480b5ea0e11"},
+ // A race condition where a local gc/prune removes a named ref during a git-fetch would exit status 1
+ {true, "cannot lock ref 'refs/pull/123456/merge': unable to resolve reference 'refs/pull/134153/merge'"},
+ // A race condition in http git-fetch where named refs were listed on the remote and are no longer there
+ {true, "error: cannot lock ref 'refs/remotes/origin/foo': unable to resolve reference 'refs/remotes/origin/foo': reference broken"},
+ // A race condition in http git-fetch where named refs were force-pushed during the update, would exit status 128
+ {true, "error: cannot lock ref 'refs/pull/123456/merge': is at 988881adc9fc3655077dc2d4d757d480b5ea0e11 but expected 7f894307ffc9553edbd0b671cab829786866f7b2"},
+ // A race condition with other local git operations, such as git-maintenance, would exit status 128 (well, "Unable" the "U" is uppercase)
+ {true, "fatal: Unable to create '/data/gitea-repositories/foo-org/bar-repo.git/./objects/info/commit-graphs/commit-graph-chain.lock': File exists."},
+ // Missing or unauthorized credentials, would exit status 128
+ {false, "fatal: Authentication failed for 'https://example.com/foo-does-not-exist/bar.git/'"},
+ // A non-existent remote repository, would exit status 128
+ {false, "fatal: Could not read from remote repository."},
+ // A non-functioning proxy, would exit status 128
+ {false, "fatal: unable to access 'https://example.com/foo-does-not-exist/bar.git/': Failed to connect to configured-https-proxy port 1080 after 0 ms: Couldn't connect to server"},
+ }
+
+ for _, c := range cases {
+ assert.Equal(t, c.recoverable, checkRecoverableSyncError(c.message), "test case: %s", c.message)
+ }
+}
diff --git a/services/mirror/mirror_test.go b/services/mirror/mirror_test.go
deleted file mode 100644
index 76632b6872..0000000000
--- a/services/mirror/mirror_test.go
+++ /dev/null
@@ -1,66 +0,0 @@
-// Copyright 2023 The Gitea Authors. All rights reserved.
-// SPDX-License-Identifier: MIT
-
-package mirror
-
-import (
- "testing"
-
- "github.com/stretchr/testify/assert"
-)
-
-func Test_parseRemoteUpdateOutput(t *testing.T) {
- output := `
- * [new tag] v0.1.8 -> v0.1.8
- * [new branch] master -> origin/master
- - [deleted] (none) -> origin/test1
- - [deleted] (none) -> tag1
- + f895a1e...957a993 test2 -> origin/test2 (forced update)
- 957a993..a87ba5f test3 -> origin/test3
- * [new ref] refs/pull/26595/head -> refs/pull/26595/head
- * [new ref] refs/pull/26595/merge -> refs/pull/26595/merge
- e0639e38fb..6db2410489 refs/pull/25873/head -> refs/pull/25873/head
- + 1c97ebc746...976d27d52f refs/pull/25873/merge -> refs/pull/25873/merge (forced update)
-`
- results := parseRemoteUpdateOutput(output, "origin")
- assert.Len(t, results, 10)
- assert.EqualValues(t, "refs/tags/v0.1.8", results[0].refName.String())
- assert.EqualValues(t, gitShortEmptySha, results[0].oldCommitID)
- assert.EqualValues(t, "", results[0].newCommitID)
-
- assert.EqualValues(t, "refs/heads/master", results[1].refName.String())
- assert.EqualValues(t, gitShortEmptySha, results[1].oldCommitID)
- assert.EqualValues(t, "", results[1].newCommitID)
-
- assert.EqualValues(t, "refs/heads/test1", results[2].refName.String())
- assert.EqualValues(t, "", results[2].oldCommitID)
- assert.EqualValues(t, gitShortEmptySha, results[2].newCommitID)
-
- assert.EqualValues(t, "refs/tags/tag1", results[3].refName.String())
- assert.EqualValues(t, "", results[3].oldCommitID)
- assert.EqualValues(t, gitShortEmptySha, results[3].newCommitID)
-
- assert.EqualValues(t, "refs/heads/test2", results[4].refName.String())
- assert.EqualValues(t, "f895a1e", results[4].oldCommitID)
- assert.EqualValues(t, "957a993", results[4].newCommitID)
-
- assert.EqualValues(t, "refs/heads/test3", results[5].refName.String())
- assert.EqualValues(t, "957a993", results[5].oldCommitID)
- assert.EqualValues(t, "a87ba5f", results[5].newCommitID)
-
- assert.EqualValues(t, "refs/pull/26595/head", results[6].refName.String())
- assert.EqualValues(t, gitShortEmptySha, results[6].oldCommitID)
- assert.EqualValues(t, "", results[6].newCommitID)
-
- assert.EqualValues(t, "refs/pull/26595/merge", results[7].refName.String())
- assert.EqualValues(t, gitShortEmptySha, results[7].oldCommitID)
- assert.EqualValues(t, "", results[7].newCommitID)
-
- assert.EqualValues(t, "refs/pull/25873/head", results[8].refName.String())
- assert.EqualValues(t, "e0639e38fb", results[8].oldCommitID)
- assert.EqualValues(t, "6db2410489", results[8].newCommitID)
-
- assert.EqualValues(t, "refs/pull/25873/merge", results[9].refName.String())
- assert.EqualValues(t, "1c97ebc746", results[9].oldCommitID)
- assert.EqualValues(t, "976d27d52f", results[9].newCommitID)
-}
diff --git a/services/moderation/reporting.go b/services/moderation/reporting.go
new file mode 100644
index 0000000000..e01156dc11
--- /dev/null
+++ b/services/moderation/reporting.go
@@ -0,0 +1,129 @@
+// Copyright 2025 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package moderation
+
+import (
+ "errors"
+
+ "forgejo.org/models/issues"
+ "forgejo.org/models/moderation"
+ "forgejo.org/models/perm"
+ access_model "forgejo.org/models/perm/access"
+ repo_model "forgejo.org/models/repo"
+ "forgejo.org/models/unit"
+ "forgejo.org/models/user"
+ "forgejo.org/modules/log"
+ "forgejo.org/services/context"
+)
+
+var (
+ ErrContentDoesNotExist = errors.New("the content to be reported does not exist")
+ ErrDoerNotAllowed = errors.New("doer not allowed to access the content to be reported")
+)
+
+// CanReport checks if doer has access to the content they are reporting
+// (user, organization, repository, issue, pull request or comment).
+// When reporting repositories the user should have at least read access to any repo unit type.
+// When reporting issues, pull requests or comments the user should have at least read access
+// to 'TypeIssues', respectively 'TypePullRequests' unit for the repository where the content belongs.
+// When reporting users or organizations doer should be able to view the reported entity.
+func CanReport(ctx context.Context, doer *user.User, contentType moderation.ReportedContentType, contentID int64) (bool, error) {
+ hasAccess := false
+ var issueID int64
+ var repoID int64
+ unitType := unit.TypeInvalid // used when checking access for issues, pull requests or comments
+
+ if contentType == moderation.ReportedContentTypeUser {
+ reportedUser, err := user.GetUserByID(ctx, contentID)
+ if err != nil {
+ if user.IsErrUserNotExist(err) {
+ log.Warn("User #%d wanted to report user #%d but it does not exist.", doer.ID, contentID)
+ return false, ErrContentDoesNotExist
+ }
+ return false, err
+ }
+
+ hasAccess = user.IsUserVisibleToViewer(ctx, reportedUser, ctx.Doer)
+ if !hasAccess {
+ log.Warn("User #%d wanted to report user/org #%d but they are not able to see that profile.", doer.ID, contentID)
+ return false, ErrDoerNotAllowed
+ }
+ } else {
+ // for comments and issues/pulls we need to get the parent repository
+ switch contentType {
+ case moderation.ReportedContentTypeComment:
+ comment, err := issues.GetCommentByID(ctx, contentID)
+ if err != nil {
+ if issues.IsErrCommentNotExist(err) {
+ log.Warn("User #%d wanted to report comment #%d but it does not exist.", doer.ID, contentID)
+ return false, ErrContentDoesNotExist
+ }
+ return false, err
+ }
+ if !comment.Type.HasContentSupport() {
+ // this is not a comment with text and/or attachments
+ log.Warn("User #%d wanted to report comment #%d but it is not a comment with content.", doer.ID, contentID)
+ return false, nil
+ }
+ issueID = comment.IssueID
+ case moderation.ReportedContentTypeIssue:
+ issueID = contentID
+ case moderation.ReportedContentTypeRepository:
+ repoID = contentID
+ }
+
+ if issueID > 0 {
+ issue, err := issues.GetIssueByID(ctx, issueID)
+ if err != nil {
+ if issues.IsErrIssueNotExist(err) {
+ log.Warn("User #%d wanted to report issue #%d (or one of its comments) but it does not exist.", doer.ID, issueID)
+ return false, ErrContentDoesNotExist
+ }
+ return false, err
+ }
+
+ repoID = issue.RepoID
+ if issue.IsPull {
+ unitType = unit.TypePullRequests
+ } else {
+ unitType = unit.TypeIssues
+ }
+ }
+
+ if repoID > 0 {
+ repo, err := repo_model.GetRepositoryByID(ctx, repoID)
+ if err != nil {
+ if repo_model.IsErrRepoNotExist(err) {
+ log.Warn("User #%d wanted to report repository #%d (or one of its issues / comments) but it does not exist.", doer.ID, repoID)
+ return false, ErrContentDoesNotExist
+ }
+ return false, err
+ }
+
+ if issueID > 0 {
+ // for comments and issues/pulls doer should have at least read access to the corresponding repo unit (issues, respectively pull requests)
+ hasAccess, err = access_model.HasAccessUnit(ctx, doer, repo, unitType, perm.AccessModeRead)
+ if err != nil {
+ return false, err
+ } else if !hasAccess {
+ log.Warn("User #%d wanted to report issue #%d or one of its comments from repository #%d but they don't have access to it.", doer.ID, issueID, repoID)
+ return false, ErrDoerNotAllowed
+ }
+ } else {
+ // for repositories doer should have at least read access to at least one repo unit
+ perm, err := access_model.GetUserRepoPermission(ctx, repo, doer)
+ if err != nil {
+ return false, err
+ }
+ hasAccess = perm.CanReadAny(unit.AllRepoUnitTypes...)
+ if !hasAccess {
+ log.Warn("User #%d wanted to report repository #%d but they don't have access to it.", doer.ID, repoID)
+ return false, ErrDoerNotAllowed
+ }
+ }
+ }
+ }
+
+ return hasAccess, nil
+}
diff --git a/services/notify/notifier.go b/services/notify/notifier.go
index 00f98942d9..4d88a7ab95 100644
--- a/services/notify/notifier.go
+++ b/services/notify/notifier.go
@@ -6,6 +6,7 @@ package notify
import (
"context"
+ actions_model "forgejo.org/models/actions"
issues_model "forgejo.org/models/issues"
packages_model "forgejo.org/models/packages"
repo_model "forgejo.org/models/repo"
@@ -76,4 +77,6 @@ type Notifier interface {
PackageDelete(ctx context.Context, doer *user_model.User, pd *packages_model.PackageDescriptor)
ChangeDefaultBranch(ctx context.Context, repo *repo_model.Repository)
+
+ ActionRunNowDone(ctx context.Context, run *actions_model.ActionRun, priorStatus actions_model.Status, lastRun *actions_model.ActionRun)
}
diff --git a/services/notify/notify.go b/services/notify/notify.go
index fb30dfb609..02c18272cb 100644
--- a/services/notify/notify.go
+++ b/services/notify/notify.go
@@ -6,6 +6,7 @@ package notify
import (
"context"
+ actions_model "forgejo.org/models/actions"
issues_model "forgejo.org/models/issues"
packages_model "forgejo.org/models/packages"
repo_model "forgejo.org/models/repo"
@@ -374,3 +375,15 @@ func ChangeDefaultBranch(ctx context.Context, repo *repo_model.Repository) {
notifier.ChangeDefaultBranch(ctx, repo)
}
}
+
+// ActionRunNowDone notifies that the old status priorStatus with (priorStatus.isDone() == false) of an ActionRun changed to run.Status with (run.Status.isDone() == true)
+// run represents the new state of the ActionRun.
+// lastRun represents the ActionRun of the same workflow that finished before run.
+// lastRun might be nil (e.g. when the run is the first for this workflow). It is the last run of the same workflow for the same repo.
+// It can be used to figure out if a successful run follows a failed one.
+// Both run and lastRun need their attributes loaded.
+func ActionRunNowDone(ctx context.Context, run *actions_model.ActionRun, priorStatus actions_model.Status, lastRun *actions_model.ActionRun) {
+ for _, notifier := range notifiers {
+ notifier.ActionRunNowDone(ctx, run, priorStatus, lastRun)
+ }
+}
diff --git a/services/notify/null.go b/services/notify/null.go
index 7182e69abb..9c76e5cbd3 100644
--- a/services/notify/null.go
+++ b/services/notify/null.go
@@ -6,6 +6,7 @@ package notify
import (
"context"
+ actions_model "forgejo.org/models/actions"
issues_model "forgejo.org/models/issues"
packages_model "forgejo.org/models/packages"
repo_model "forgejo.org/models/repo"
@@ -211,3 +212,7 @@ func (*NullNotifier) PackageDelete(ctx context.Context, doer *user_model.User, p
// ChangeDefaultBranch places a place holder function
func (*NullNotifier) ChangeDefaultBranch(ctx context.Context, repo *repo_model.Repository) {
}
+
+// ActionRunNowDone places a place holder function
+func (*NullNotifier) ActionRunNowDone(ctx context.Context, run *actions_model.ActionRun, priorStatus actions_model.Status, lastRun *actions_model.ActionRun) {
+}
diff --git a/services/packages/alpine/repository.go b/services/packages/alpine/repository.go
index 9435887a46..dd66c7d74e 100644
--- a/services/packages/alpine/repository.go
+++ b/services/packages/alpine/repository.go
@@ -258,7 +258,7 @@ func buildPackagesIndex(ctx context.Context, ownerID int64, repoVersion *package
privPem, _ := pem.Decode([]byte(priv))
if privPem == nil {
- return fmt.Errorf("failed to decode private key pem")
+ return errors.New("failed to decode private key pem")
}
privKey, err := x509.ParsePKCS1PrivateKey(privPem.Bytes)
diff --git a/services/packages/auth.go b/services/packages/auth.go
index ab2c347bc9..205125cf8b 100644
--- a/services/packages/auth.go
+++ b/services/packages/auth.go
@@ -4,6 +4,7 @@
package packages
import (
+ "errors"
"fmt"
"net/http"
"strings"
@@ -53,7 +54,7 @@ func ParseAuthorizationToken(req *http.Request) (int64, auth_model.AccessTokenSc
parts := strings.SplitN(h, " ", 2)
if len(parts) != 2 {
log.Error("split token failed: %s", h)
- return 0, "", fmt.Errorf("split token failed")
+ return 0, "", errors.New("split token failed")
}
token, err := jwt.ParseWithClaims(parts[1], &packageClaims{}, func(t *jwt.Token) (any, error) {
@@ -68,7 +69,7 @@ func ParseAuthorizationToken(req *http.Request) (int64, auth_model.AccessTokenSc
c, ok := token.Claims.(*packageClaims)
if !token.Valid || !ok {
- return 0, "", fmt.Errorf("invalid token claim")
+ return 0, "", errors.New("invalid token claim")
}
return c.UserID, c.Scope, nil
diff --git a/services/packages/cleanup/cleanup.go b/services/packages/cleanup/cleanup.go
index 7e821e2630..f8b08a0b59 100644
--- a/services/packages/cleanup/cleanup.go
+++ b/services/packages/cleanup/cleanup.go
@@ -122,23 +122,24 @@ func ExecuteCleanupRules(outerCtx context.Context) error {
}
if anyVersionDeleted {
- if pcr.Type == packages_model.TypeDebian {
+ switch pcr.Type {
+ case packages_model.TypeDebian:
if err := debian_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil {
return fmt.Errorf("CleanupRule [%d]: debian.BuildAllRepositoryFiles failed: %w", pcr.ID, err)
}
- } else if pcr.Type == packages_model.TypeAlpine {
+ case packages_model.TypeAlpine:
if err := alpine_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil {
return fmt.Errorf("CleanupRule [%d]: alpine.BuildAllRepositoryFiles failed: %w", pcr.ID, err)
}
- } else if pcr.Type == packages_model.TypeRpm {
+ case packages_model.TypeRpm:
if err := rpm_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil {
return fmt.Errorf("CleanupRule [%d]: rpm.BuildAllRepositoryFiles failed: %w", pcr.ID, err)
}
- } else if pcr.Type == packages_model.TypeArch {
+ case packages_model.TypeArch:
if err := arch_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil {
return fmt.Errorf("CleanupRule [%d]: arch.BuildAllRepositoryFiles failed: %w", pcr.ID, err)
}
- } else if pcr.Type == packages_model.TypeAlt {
+ case packages_model.TypeAlt:
if err := alt_service.BuildAllRepositoryFiles(ctx, pcr.OwnerID); err != nil {
return fmt.Errorf("CleanupRule [%d]: alt.BuildAllRepositoryFiles failed: %w", pcr.ID, err)
}
diff --git a/services/packages/cleanup/cleanup_sha256_test.go b/services/packages/cleanup/cleanup_sha256_test.go
index f26b98b4b0..efa254fc68 100644
--- a/services/packages/cleanup/cleanup_sha256_test.go
+++ b/services/packages/cleanup/cleanup_sha256_test.go
@@ -78,7 +78,7 @@ func TestCleanupSHA256(t *testing.T) {
for range expected {
filtered = append(filtered, true)
}
- assert.EqualValues(t, filtered, logFiltered, expected)
+ assert.Equal(t, filtered, logFiltered, expected)
}
ancient := 1 * time.Hour
diff --git a/services/packages/container/blob_uploader.go b/services/packages/container/blob_uploader.go
index cc009d1f5c..ffc47f3853 100644
--- a/services/packages/container/blob_uploader.go
+++ b/services/packages/container/blob_uploader.go
@@ -92,7 +92,7 @@ func (u *BlobUploader) Append(ctx context.Context, r io.Reader) error {
u.BytesReceived += n
- u.HashStateBytes, err = u.MultiHasher.MarshalBinary()
+ u.HashStateBytes, err = u.MarshalBinary()
if err != nil {
return err
}
diff --git a/services/packages/packages.go b/services/packages/packages.go
index 1232e5914f..418ceab798 100644
--- a/services/packages/packages.go
+++ b/services/packages/packages.go
@@ -127,12 +127,12 @@ func createPackageAndVersion(ctx context.Context, pvci *PackageCreationInfo, all
OwnerID: pvci.Owner.ID,
Type: pvci.PackageType,
Name: pvci.Name,
- LowerName: strings.ToLower(pvci.Name),
+ LowerName: packages_model.ResolvePackageName(pvci.Name, pvci.PackageType),
SemverCompatible: pvci.SemverCompatible,
}
var err error
if p, err = packages_model.TryInsertPackage(ctx, p); err != nil {
- if err == packages_model.ErrDuplicatePackage {
+ if errors.Is(err, packages_model.ErrDuplicatePackage) {
packageCreated = false
} else {
log.Error("Error inserting package: %v", err)
@@ -208,7 +208,7 @@ func AddFileToExistingPackage(ctx context.Context, pvi *PackageInfo, pfci *Packa
// This method skips quota checks and should only be used for system-managed packages.
func AddFileToPackageVersionInternal(ctx context.Context, pv *packages_model.PackageVersion, pfci *PackageFileCreationInfo) (*packages_model.PackageFile, error) {
return addFileToPackageWrapper(ctx, func(ctx context.Context) (*packages_model.PackageFile, *packages_model.PackageBlob, bool, error) {
- return addFileToPackageVersionUnchecked(ctx, pv, pfci)
+ return addFileToPackageVersionUnchecked(ctx, pv, pfci, "")
})
}
@@ -261,10 +261,10 @@ func addFileToPackageVersion(ctx context.Context, pv *packages_model.PackageVers
return nil, nil, false, err
}
- return addFileToPackageVersionUnchecked(ctx, pv, pfci)
+ return addFileToPackageVersionUnchecked(ctx, pv, pfci, pvi.PackageType)
}
-func addFileToPackageVersionUnchecked(ctx context.Context, pv *packages_model.PackageVersion, pfci *PackageFileCreationInfo) (*packages_model.PackageFile, *packages_model.PackageBlob, bool, error) {
+func addFileToPackageVersionUnchecked(ctx context.Context, pv *packages_model.PackageVersion, pfci *PackageFileCreationInfo, packageType packages_model.Type) (*packages_model.PackageFile, *packages_model.PackageBlob, bool, error) {
log.Trace("Adding package file: %v, %s", pv.ID, pfci.Filename)
pb, exists, err := packages_model.GetOrInsertBlob(ctx, NewPackageBlob(pfci.Data))
@@ -304,7 +304,7 @@ func addFileToPackageVersionUnchecked(ctx context.Context, pv *packages_model.Pa
VersionID: pv.ID,
BlobID: pb.ID,
Name: pfci.Filename,
- LowerName: strings.ToLower(pfci.Filename),
+ LowerName: packages_model.ResolvePackageName(pfci.Filename, packageType),
CompositeKey: pfci.CompositeKey,
IsLead: pfci.IsLead,
}
diff --git a/services/packages/rpm/repository.go b/services/packages/rpm/repository.go
index 961de7828f..26f34be2bc 100644
--- a/services/packages/rpm/repository.go
+++ b/services/packages/rpm/repository.go
@@ -26,10 +26,10 @@ import (
"forgejo.org/modules/util"
packages_service "forgejo.org/services/packages"
+ "code.forgejo.org/forgejo/go-rpmutils"
"github.com/ProtonMail/go-crypto/openpgp"
"github.com/ProtonMail/go-crypto/openpgp/armor"
"github.com/ProtonMail/go-crypto/openpgp/packet"
- "github.com/sassoftware/go-rpmutils"
)
// GetOrCreateRepositoryVersion gets or creates the internal repository package
@@ -410,7 +410,6 @@ func buildPrimary(ctx context.Context, pv *packages_model.PackageVersion, pfs []
files = append(files, f)
}
}
- packageVersion := fmt.Sprintf("%s-%s", pd.FileMetadata.Version, pd.FileMetadata.Release)
packages = append(packages, &Package{
Type: "rpm",
Name: pd.Package.Name,
@@ -439,7 +438,7 @@ func buildPrimary(ctx context.Context, pv *packages_model.PackageVersion, pfs []
Archive: pd.FileMetadata.ArchiveSize,
},
Location: Location{
- Href: fmt.Sprintf("package/%s/%s/%s/%s-%s.%s.rpm", pd.Package.Name, packageVersion, pd.FileMetadata.Architecture, pd.Package.Name, packageVersion, pd.FileMetadata.Architecture),
+ Href: fmt.Sprintf("package/%s/%s/%s/%s-%s.%s.rpm", pd.Package.Name, pd.Version.Version, pd.FileMetadata.Architecture, pd.Package.Name, pd.Version.Version, pd.FileMetadata.Architecture),
},
Format: Format{
License: pd.VersionMetadata.License,
diff --git a/services/pull/check.go b/services/pull/check.go
index d038b3d829..6002e2ae26 100644
--- a/services/pull/check.go
+++ b/services/pull/check.go
@@ -28,6 +28,7 @@ import (
"forgejo.org/modules/timeutil"
asymkey_service "forgejo.org/services/asymkey"
notify_service "forgejo.org/services/notify"
+ shared_automerge "forgejo.org/services/shared/automerge"
)
// prPatchCheckerQueue represents a queue to handle update pull request tests
@@ -170,7 +171,7 @@ func isSignedIfRequired(ctx context.Context, pr *issues_model.PullRequest, doer
// checkAndUpdateStatus checks if pull request is possible to leaving checking status,
// and set to be either conflict or mergeable.
-func checkAndUpdateStatus(ctx context.Context, pr *issues_model.PullRequest) {
+func checkAndUpdateStatus(ctx context.Context, pr *issues_model.PullRequest) bool {
// If status has not been changed to conflict by testPatch then we are mergeable
if pr.Status == issues_model.PullRequestStatusChecking {
pr.Status = issues_model.PullRequestStatusMergeable
@@ -184,12 +185,15 @@ func checkAndUpdateStatus(ctx context.Context, pr *issues_model.PullRequest) {
if has {
log.Trace("Not updating status for %-v as it is due to be rechecked", pr)
- return
+ return false
}
if err := pr.UpdateColsIfNotMerged(ctx, "merge_base", "status", "conflicted_files", "changed_protected_files"); err != nil {
log.Error("Update[%-v]: %v", pr, err)
+ return false
}
+
+ return true
}
// getMergeCommit checks if a pull request has been merged
@@ -339,15 +343,22 @@ func handler(items ...string) []string {
}
func testPR(id int64) {
- pullWorkingPool.CheckIn(fmt.Sprint(id))
- defer pullWorkingPool.CheckOut(fmt.Sprint(id))
ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), fmt.Sprintf("Test PR[%d] from patch checking queue", id))
defer finished()
+ if pr, updated := testPRProtected(ctx, id); pr != nil && updated {
+ shared_automerge.AddToQueueIfMergeable(ctx, pr)
+ }
+}
+
+func testPRProtected(ctx context.Context, id int64) (*issues_model.PullRequest, bool) {
+ pullWorkingPool.CheckIn(fmt.Sprint(id))
+ defer pullWorkingPool.CheckOut(fmt.Sprint(id))
+
pr, err := issues_model.GetPullRequestByID(ctx, id)
if err != nil {
log.Error("Unable to GetPullRequestByID[%d] for testPR: %v", id, err)
- return
+ return nil, false
}
log.Trace("Testing %-v", pr)
@@ -357,12 +368,12 @@ func testPR(id int64) {
if pr.HasMerged {
log.Trace("%-v is already merged (status: %s, merge commit: %s)", pr, pr.Status, pr.MergedCommitID)
- return
+ return nil, false
}
if manuallyMerged(ctx, pr) {
log.Trace("%-v is manually merged (status: %s, merge commit: %s)", pr, pr.Status, pr.MergedCommitID)
- return
+ return nil, false
}
if err := TestPatch(pr); err != nil {
@@ -371,9 +382,10 @@ func testPR(id int64) {
if err := pr.UpdateCols(ctx, "status"); err != nil {
log.Error("update pr [%-v] status to PullRequestStatusError failed: %v", pr, err)
}
- return
+ return nil, false
}
- checkAndUpdateStatus(ctx, pr)
+
+ return pr, checkAndUpdateStatus(ctx, pr)
}
// CheckPRsForBaseBranch check all pulls with baseBrannch
@@ -395,7 +407,7 @@ func Init() error {
prPatchCheckerQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "pr_patch_checker", handler)
if prPatchCheckerQueue == nil {
- return fmt.Errorf("unable to create pr_patch_checker queue")
+ return errors.New("unable to create pr_patch_checker queue")
}
go graceful.GetManager().RunWithCancel(prPatchCheckerQueue)
diff --git a/services/pull/check_test.go b/services/pull/check_test.go
index b965d90236..9b7e1660bc 100644
--- a/services/pull/check_test.go
+++ b/services/pull/check_test.go
@@ -52,7 +52,7 @@ func TestPullRequest_AddToTaskQueue(t *testing.T) {
select {
case id := <-idChan:
- assert.EqualValues(t, pr.ID, id)
+ assert.Equal(t, pr.ID, id)
case <-time.After(time.Second):
assert.FailNow(t, "Timeout: nothing was added to pullRequestQueue")
}
diff --git a/services/pull/merge.go b/services/pull/merge.go
index 9b0d632377..f69f8a87b4 100644
--- a/services/pull/merge.go
+++ b/services/pull/merge.go
@@ -6,6 +6,7 @@ package pull
import (
"context"
+ "errors"
"fmt"
"net/url"
"os"
@@ -13,6 +14,7 @@ import (
"regexp"
"strconv"
"strings"
+ "unicode"
"forgejo.org/models"
"forgejo.org/models/db"
@@ -168,6 +170,41 @@ func GetDefaultMergeMessage(ctx context.Context, baseGitRepo *git.Repository, pr
return getMergeMessage(ctx, baseGitRepo, pr, mergeStyle, nil)
}
+func AddCommitMessageTrailer(message, tailerKey, tailerValue string) string {
+ trailerLine := tailerKey + ": " + tailerValue
+ message = strings.ReplaceAll(message, "\r\n", "\n")
+ message = strings.ReplaceAll(message, "\r", "\n")
+ if strings.Contains(message, "\n"+trailerLine+"\n") || strings.HasSuffix(message, "\n"+trailerLine) {
+ return message
+ }
+
+ if !strings.HasSuffix(message, "\n") {
+ message += "\n"
+ }
+ lastNewLine := strings.LastIndexByte(message[:len(message)-1], '\n')
+ keyEnd := -1
+ if lastNewLine != -1 {
+ keyEnd = strings.IndexByte(message[lastNewLine:], ':')
+ if keyEnd != -1 {
+ keyEnd += lastNewLine
+ }
+ }
+ var lastLineKey string
+ if lastNewLine != -1 && keyEnd != -1 {
+ lastLineKey = message[lastNewLine+1 : keyEnd]
+ }
+
+ isLikelyTrailerLine := lastLineKey != "" && unicode.IsUpper(rune(lastLineKey[0])) && strings.Contains(message, "-")
+ for i := 0; isLikelyTrailerLine && i < len(lastLineKey); i++ {
+ r := rune(lastLineKey[i])
+ isLikelyTrailerLine = unicode.IsLetter(r) || unicode.IsDigit(r) || r == '-'
+ }
+ if !strings.HasSuffix(message, "\n\n") && !isLikelyTrailerLine {
+ message += "\n"
+ }
+ return message + trailerLine
+}
+
// Merge merges pull request to base repository.
// Caller should check PR is ready to be merged (review and status checks)
func Merge(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, baseGitRepo *git.Repository, mergeStyle repo_model.MergeStyle, expectedHeadCommitID, message string, wasAutoMerged bool) error {
@@ -518,13 +555,13 @@ func MergedManually(ctx context.Context, pr *issues_model.PullRequest, doer *use
objectFormat := git.ObjectFormatFromName(pr.BaseRepo.ObjectFormatName)
if len(commitID) != objectFormat.FullLength() {
- return fmt.Errorf("Wrong commit ID")
+ return errors.New("Wrong commit ID")
}
commit, err := baseGitRepo.GetCommit(commitID)
if err != nil {
if git.IsErrNotExist(err) {
- return fmt.Errorf("Wrong commit ID")
+ return errors.New("Wrong commit ID")
}
return err
}
@@ -535,7 +572,7 @@ func MergedManually(ctx context.Context, pr *issues_model.PullRequest, doer *use
return err
}
if !ok {
- return fmt.Errorf("Wrong commit ID")
+ return errors.New("Wrong commit ID")
}
pr.MergedCommitID = commitID
@@ -548,7 +585,7 @@ func MergedManually(ctx context.Context, pr *issues_model.PullRequest, doer *use
if merged, err = pr.SetMerged(ctx); err != nil {
return err
} else if !merged {
- return fmt.Errorf("SetMerged failed")
+ return errors.New("SetMerged failed")
}
return nil
}); err != nil {
diff --git a/services/pull/merge_prepare.go b/services/pull/merge_prepare.go
index fb09515dbd..fc70da10a4 100644
--- a/services/pull/merge_prepare.go
+++ b/services/pull/merge_prepare.go
@@ -236,10 +236,72 @@ func getDiffTree(ctx context.Context, repoPath, baseBranch, headBranch string, o
// rebaseTrackingOnToBase checks out the tracking branch as staging and rebases it on to the base branch
// if there is a conflict it will return a models.ErrRebaseConflicts
func rebaseTrackingOnToBase(ctx *mergeContext, mergeStyle repo_model.MergeStyle) error {
- // Checkout head branch
- if err := git.NewCommand(ctx, "checkout", "-b").AddDynamicArguments(stagingBranch, trackingBranch).
+ // Create staging branch
+ if err := git.NewCommand(ctx, "branch").AddDynamicArguments(stagingBranch, trackingBranch).
Run(ctx.RunOpts()); err != nil {
- return fmt.Errorf("unable to git checkout tracking as staging in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
+ return fmt.Errorf(
+ "unable to git branch tracking as staging in temp repo for %v: %w\n%s\n%s",
+ ctx.pr, err,
+ ctx.outbuf.String(),
+ ctx.errbuf.String(),
+ )
+ }
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+
+ // Check git version for availability of git-replay. If it is available, we use
+ // it for performance and to preserve unknown commit headers like the
+ // "change-id" header used by Jujutsu and GitButler to track changes across
+ // rebase, amend etc.
+ if err := git.CheckGitVersionAtLeast("2.44"); err == nil {
+ // Use git-replay for performance and to preserve unknown headers,
+ // like the "change-id" header used by Jujutsu and GitButler.
+ if err := git.NewCommand(ctx, "replay", "--onto").AddDynamicArguments(baseBranch).
+ AddDynamicArguments(fmt.Sprintf("%s..%s", baseBranch, stagingBranch)).
+ Run(ctx.RunOpts()); err != nil {
+ // git-replay doesn't tell us which commit first created a merge conflict.
+ // In order to preserve the quality of our error messages, fall back to
+ // regular git-rebase.
+ goto regular_rebase
+ }
+ // git-replay worked, stdout contains the instructions for update-ref
+ updateRefInstructions := ctx.outbuf.String()
+ opts := ctx.RunOpts()
+ opts.Stdin = strings.NewReader(updateRefInstructions)
+ if err := git.NewCommand(ctx, "update-ref", "--stdin").Run(opts); err != nil {
+ return fmt.Errorf(
+ "Failed to update ref for %v: %w\n%s\n%s",
+ ctx.pr,
+ err,
+ ctx.outbuf.String(),
+ ctx.errbuf.String(),
+ )
+ }
+ // Checkout staging branch
+ if err := git.NewCommand(ctx, "checkout").AddDynamicArguments(stagingBranch).
+ Run(ctx.RunOpts()); err != nil {
+ return fmt.Errorf(
+ "unable to git checkout staging in temp repo for %v: %w\n%s\n%s",
+ ctx.pr,
+ err,
+ ctx.outbuf.String(),
+ ctx.errbuf.String(),
+ )
+ }
+ ctx.outbuf.Reset()
+ ctx.errbuf.Reset()
+ return nil
+ }
+
+ // The available git version is too old to support git-replay, or git-replay
+ // failed and we want to determine the first commit that produced a
+ // merge-conflict. Fall back to regular rebase.
+regular_rebase:
+
+ // Checkout head branch
+ if err := git.NewCommand(ctx, "checkout").AddDynamicArguments(stagingBranch).
+ Run(ctx.RunOpts()); err != nil {
+ return fmt.Errorf("unable to git checkout staging in temp repo for %v: %w\n%s\n%s", ctx.pr, err, ctx.outbuf.String(), ctx.errbuf.String())
}
ctx.outbuf.Reset()
ctx.errbuf.Reset()
diff --git a/services/pull/merge_squash.go b/services/pull/merge_squash.go
index 1c6f734a25..f655224c5e 100644
--- a/services/pull/merge_squash.go
+++ b/services/pull/merge_squash.go
@@ -5,7 +5,6 @@ package pull
import (
"fmt"
- "strings"
repo_model "forgejo.org/models/repo"
user_model "forgejo.org/models/user"
@@ -67,10 +66,8 @@ func doMergeStyleSquash(ctx *mergeContext, message string) error {
if setting.Repository.PullRequest.AddCoCommitterTrailers && ctx.committer.String() != sig.String() {
// add trailer
- if !strings.Contains(message, fmt.Sprintf("Co-authored-by: %s", sig.String())) {
- message += fmt.Sprintf("\nCo-authored-by: %s", sig.String())
- }
- message += fmt.Sprintf("\nCo-committed-by: %s\n", sig.String())
+ message = AddCommitMessageTrailer(message, "Co-authored-by", sig.String())
+ message = AddCommitMessageTrailer(message, "Co-committed-by", sig.String()) // FIXME: this one should be removed, it is not really used or widely used
}
cmdCommit := git.NewCommand(ctx, "commit").
AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email).
diff --git a/services/pull/merge_test.go b/services/pull/merge_test.go
index 6df6f55d46..2a26759956 100644
--- a/services/pull/merge_test.go
+++ b/services/pull/merge_test.go
@@ -65,3 +65,28 @@ func Test_expandDefaultMergeMessage(t *testing.T) {
})
}
}
+
+func TestAddCommitMessageTailer(t *testing.T) {
+ // add tailer for empty message
+ assert.Equal(t, "\n\nTest-tailer: TestValue", AddCommitMessageTrailer("", "Test-tailer", "TestValue"))
+
+ // add tailer for message without newlines
+ assert.Equal(t, "title\n\nTest-tailer: TestValue", AddCommitMessageTrailer("title", "Test-tailer", "TestValue"))
+ assert.Equal(t, "title\n\nNot tailer: xxx\n\nTest-tailer: TestValue", AddCommitMessageTrailer("title\n\nNot tailer: xxx", "Test-tailer", "TestValue"))
+ assert.Equal(t, "title\n\nNotTailer: xxx\n\nTest-tailer: TestValue", AddCommitMessageTrailer("title\n\nNotTailer: xxx", "Test-tailer", "TestValue"))
+ assert.Equal(t, "title\n\nnot-tailer: xxx\n\nTest-tailer: TestValue", AddCommitMessageTrailer("title\n\nnot-tailer: xxx", "Test-tailer", "TestValue"))
+
+ // add tailer for message with one EOL
+ assert.Equal(t, "title\n\nTest-tailer: TestValue", AddCommitMessageTrailer("title\n", "Test-tailer", "TestValue"))
+
+ // add tailer for message with two EOLs
+ assert.Equal(t, "title\n\nTest-tailer: TestValue", AddCommitMessageTrailer("title\n\n", "Test-tailer", "TestValue"))
+
+ // add tailer for message with existing tailer (won't duplicate)
+ assert.Equal(t, "title\n\nTest-tailer: TestValue", AddCommitMessageTrailer("title\n\nTest-tailer: TestValue", "Test-tailer", "TestValue"))
+ assert.Equal(t, "title\n\nTest-tailer: TestValue\n", AddCommitMessageTrailer("title\n\nTest-tailer: TestValue\n", "Test-tailer", "TestValue"))
+
+ // add tailer for message with existing tailer and different value (will append)
+ assert.Equal(t, "title\n\nTest-tailer: v1\nTest-tailer: v2", AddCommitMessageTrailer("title\n\nTest-tailer: v1", "Test-tailer", "v2"))
+ assert.Equal(t, "title\n\nTest-tailer: v1\nTest-tailer: v2", AddCommitMessageTrailer("title\n\nTest-tailer: v1\n", "Test-tailer", "v2"))
+}
diff --git a/services/pull/patch.go b/services/pull/patch.go
index 35d1b101e2..37a0f818e9 100644
--- a/services/pull/patch.go
+++ b/services/pull/patch.go
@@ -5,7 +5,7 @@
package pull
import (
- "bufio"
+ "bytes"
"context"
"fmt"
"io"
@@ -16,14 +16,11 @@ import (
"forgejo.org/models"
git_model "forgejo.org/models/git"
issues_model "forgejo.org/models/issues"
- "forgejo.org/models/unit"
- "forgejo.org/modules/container"
"forgejo.org/modules/git"
"forgejo.org/modules/gitrepo"
"forgejo.org/modules/graceful"
"forgejo.org/modules/log"
"forgejo.org/modules/process"
- "forgejo.org/modules/setting"
"forgejo.org/modules/util"
"github.com/gobwas/glob"
@@ -49,66 +46,159 @@ func DownloadDiffOrPatch(ctx context.Context, pr *issues_model.PullRequest, w io
return nil
}
-var patchErrorSuffices = []string{
- ": already exists in index",
- ": patch does not apply",
- ": already exists in working directory",
- "unrecognized input",
- ": No such file or directory",
- ": does not exist in index",
-}
-
// TestPatch will test whether a simple patch will apply
func TestPatch(pr *issues_model.PullRequest) error {
ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), fmt.Sprintf("TestPatch: %s", pr))
defer finished()
- prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
- if err != nil {
- if !git_model.IsErrBranchNotExist(err) {
- log.Error("CreateTemporaryRepoForPR %-v: %v", pr, err)
- }
- return err
- }
- defer cancel()
-
- return testPatch(ctx, prCtx, pr)
+ testPatchCtx, err := testPatch(ctx, pr)
+ testPatchCtx.close()
+ return err
}
-func testPatch(ctx context.Context, prCtx *prContext, pr *issues_model.PullRequest) error {
- gitRepo, err := git.OpenRepository(ctx, prCtx.tmpBasePath)
- if err != nil {
- return fmt.Errorf("OpenRepository: %w", err)
- }
- defer gitRepo.Close()
+type testPatchContext struct {
+ headRev string
+ headIsCommitID bool
+ baseRev string
+ env []string
+ gitRepo *git.Repository
+ close func()
+}
- // 1. update merge base
- pr.MergeBase, _, err = git.NewCommand(ctx, "merge-base", "--", "base", "tracking").RunStdString(&git.RunOpts{Dir: prCtx.tmpBasePath})
- if err != nil {
- var err2 error
- pr.MergeBase, err2 = gitRepo.GetRefCommitID(git.BranchPrefix + "base")
- if err2 != nil {
- return fmt.Errorf("GetMergeBase: %v and can't find commit ID for base: %w", err, err2)
+// LoadHeadRevision loads the necessary information to access the head revision.
+func (t *testPatchContext) LoadHeadRevision(ctx context.Context, pr *issues_model.PullRequest) error {
+ // If AGit, then use HeadCommitID if set (AGit flow creates pull request),
+ // otherwise use the pull request reference.
+ if pr.Flow == issues_model.PullRequestFlowAGit {
+ if len(pr.HeadCommitID) > 0 {
+ t.headRev = pr.HeadCommitID
+ t.headIsCommitID = true
+ return nil
}
- }
- pr.MergeBase = strings.TrimSpace(pr.MergeBase)
- if pr.HeadCommitID, err = gitRepo.GetRefCommitID(git.BranchPrefix + "tracking"); err != nil {
- return fmt.Errorf("GetBranchCommitID: can't find commit ID for head: %w", err)
- }
-
- if pr.HeadCommitID == pr.MergeBase {
- pr.Status = issues_model.PullRequestStatusAncestor
+ t.headRev = pr.GetGitRefName()
return nil
}
- // 2. Check for conflicts
- if conflicts, err := checkConflicts(ctx, pr, gitRepo, prCtx.tmpBasePath); err != nil || conflicts || pr.Status == issues_model.PullRequestStatusEmpty {
+ // If it is within the same repository, simply return the branch name.
+ if pr.BaseRepoID == pr.HeadRepoID {
+ t.headRev = pr.GetGitHeadBranchRefName()
+ return nil
+ }
+
+ // We are in Github flow, head and base repository are different.
+ // Resolve the head branch to a commitID and return a Git alternate
+ // environment for the head repository.
+ gitRepo, err := git.OpenRepository(ctx, pr.HeadRepo.RepoPath())
+ if err != nil {
+ return err
+ }
+ defer gitRepo.Close()
+
+ headCommitID, err := gitRepo.GetRefCommitID(pr.GetGitHeadBranchRefName())
+ if err != nil {
return err
}
+ t.headRev = headCommitID
+ t.headIsCommitID = true
+ t.env = append(os.Environ(), `GIT_ALTERNATE_OBJECT_DIRECTORIES=`+pr.HeadRepo.RepoPath()+"/objects")
+ return nil
+}
+
+// getTestPatchCtx constructs a new testpatch context for the given pull request.
+func getTestPatchCtx(ctx context.Context, pr *issues_model.PullRequest) (*testPatchContext, error) {
+ testPatchCtx := &testPatchContext{
+ close: func() {},
+ }
+
+ if git.SupportGitMergeTree {
+ if err := pr.LoadBaseRepo(ctx); err != nil {
+ return testPatchCtx, fmt.Errorf("LoadBaseRepo: %w", err)
+ }
+ if err := pr.LoadHeadRepo(ctx); err != nil {
+ return testPatchCtx, fmt.Errorf("LoadHeadRepo: %w", err)
+ }
+
+ if err := testPatchCtx.LoadHeadRevision(ctx, pr); err != nil {
+ return testPatchCtx, fmt.Errorf("LoadHeadRevision: %w", err)
+ }
+
+ gitRepo, err := git.OpenRepository(ctx, pr.BaseRepo.RepoPath())
+ if err != nil {
+ return testPatchCtx, fmt.Errorf("OpenRepository: %w", err)
+ }
+
+ testPatchCtx.baseRev = git.BranchPrefix + pr.BaseBranch
+ testPatchCtx.gitRepo = gitRepo
+ testPatchCtx.close = func() {
+ gitRepo.Close()
+ }
+ } else {
+ prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
+ if err != nil {
+ return testPatchCtx, fmt.Errorf("createTemporaryRepoForPR: %w", err)
+ }
+ testPatchCtx.close = cancel
+
+ gitRepo, err := git.OpenRepository(ctx, prCtx.tmpBasePath)
+ if err != nil {
+ return testPatchCtx, fmt.Errorf("OpenRepository: %w", err)
+ }
+
+ testPatchCtx.baseRev = git.BranchPrefix + baseBranch
+ testPatchCtx.headRev = git.BranchPrefix + trackingBranch
+ testPatchCtx.gitRepo = gitRepo
+ testPatchCtx.close = func() {
+ cancel()
+ gitRepo.Close()
+ }
+ }
+ return testPatchCtx, nil
+}
+
+func testPatch(ctx context.Context, pr *issues_model.PullRequest) (*testPatchContext, error) {
+ testPatchCtx, err := getTestPatchCtx(ctx, pr)
+ if err != nil {
+ return testPatchCtx, fmt.Errorf("getTestPatchCtx: %w", err)
+ }
+
+ // 1. update merge base
+ pr.MergeBase, _, err = git.NewCommand(ctx, "merge-base").AddDashesAndList(testPatchCtx.baseRev, testPatchCtx.headRev).RunStdString(&git.RunOpts{Dir: testPatchCtx.gitRepo.Path, Env: testPatchCtx.env})
+ if err != nil {
+ var err2 error
+ pr.MergeBase, err2 = testPatchCtx.gitRepo.GetRefCommitID(testPatchCtx.baseRev)
+ if err2 != nil {
+ return testPatchCtx, fmt.Errorf("GetMergeBase: %v and can't find commit ID for base: %w", err, err2)
+ }
+ }
+ pr.MergeBase = strings.TrimSpace(pr.MergeBase)
+
+ if testPatchCtx.headIsCommitID {
+ pr.HeadCommitID = testPatchCtx.headRev
+ } else {
+ if pr.HeadCommitID, err = testPatchCtx.gitRepo.GetRefCommitID(testPatchCtx.headRev); err != nil {
+ return testPatchCtx, fmt.Errorf("GetRefCommitID: can't find commit ID for head: %w", err)
+ }
+ }
+
+ // If the head commit is equal to the merge base it roughly means that the
+ // head commit is a parent of the base commit.
+ if pr.HeadCommitID == pr.MergeBase {
+ pr.Status = issues_model.PullRequestStatusAncestor
+ return testPatchCtx, nil
+ }
+
+ // 2. Check for conflicts
+ if conflicts, err := checkConflicts(ctx, pr, testPatchCtx); err != nil || conflicts || pr.Status == issues_model.PullRequestStatusEmpty {
+ if err != nil {
+ return testPatchCtx, fmt.Errorf("checkConflicts: %w", err)
+ }
+ return testPatchCtx, nil
+ }
+
// 3. Check for protected files changes
- if err = checkPullFilesProtection(ctx, pr, gitRepo); err != nil {
- return fmt.Errorf("pr.CheckPullFilesProtection(): %v", err)
+ if err = checkPullFilesProtection(ctx, pr, testPatchCtx); err != nil {
+ return testPatchCtx, fmt.Errorf("checkPullFilesProtection: %v", err)
}
if len(pr.ChangedProtectedFiles) > 0 {
@@ -117,7 +207,7 @@ func testPatch(ctx context.Context, prCtx *prContext, pr *issues_model.PullReque
pr.Status = issues_model.PullRequestStatusMergeable
- return nil
+ return testPatchCtx, nil
}
type errMergeConflict struct {
@@ -236,12 +326,12 @@ func attemptMerge(ctx context.Context, file *unmergedFile, tmpBasePath string, f
}
// AttemptThreeWayMerge will attempt to three way merge using git read-tree and then follow the git merge-one-file algorithm to attempt to resolve basic conflicts
-func AttemptThreeWayMerge(ctx context.Context, gitPath string, gitRepo *git.Repository, base, ours, theirs, description string) (bool, []string, error) {
+func AttemptThreeWayMerge(ctx context.Context, gitRepo *git.Repository, base, ours, theirs, description string) (bool, []string, error) {
ctx, cancel := context.WithCancel(ctx)
defer cancel()
// First we use read-tree to do a simple three-way merge
- if _, _, err := git.NewCommand(ctx, "read-tree", "-m").AddDynamicArguments(base, ours, theirs).RunStdString(&git.RunOpts{Dir: gitPath}); err != nil {
+ if _, _, err := git.NewCommand(ctx, "read-tree", "-m").AddDynamicArguments(base, ours, theirs).RunStdString(&git.RunOpts{Dir: gitRepo.Path}); err != nil {
log.Error("Unable to run read-tree -m! Error: %v", err)
return false, nil, fmt.Errorf("unable to run read-tree -m! Error: %w", err)
}
@@ -251,7 +341,7 @@ func AttemptThreeWayMerge(ctx context.Context, gitPath string, gitRepo *git.Repo
// Then we use git ls-files -u to list the unmerged files and collate the triples in unmergedfiles
unmerged := make(chan *unmergedFile)
- go unmergedFiles(ctx, gitPath, unmerged)
+ go unmergedFiles(ctx, gitRepo.Path, unmerged)
defer func() {
cancel()
@@ -274,7 +364,7 @@ func AttemptThreeWayMerge(ctx context.Context, gitPath string, gitRepo *git.Repo
}
// OK now we have the unmerged file triplet attempt to merge it
- if err := attemptMerge(ctx, file, gitPath, &filesToRemove, &filesToAdd); err != nil {
+ if err := attemptMerge(ctx, file, gitRepo.Path, &filesToRemove, &filesToAdd); err != nil {
if conflictErr, ok := err.(*errMergeConflict); ok {
log.Trace("Conflict: %s in %s", conflictErr.filename, description)
conflict = true
@@ -299,14 +389,82 @@ func AttemptThreeWayMerge(ctx context.Context, gitPath string, gitRepo *git.Repo
return conflict, conflictedFiles, nil
}
-func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository, tmpBasePath string) (bool, error) {
- // 1. checkConflicts resets the conflict status - therefore - reset the conflict status
+// MergeTree runs a 3-way merge between `ours` and `theirs` with
+// `base` as the merge base.
+//
+// It uses git-merge-tree(1) to do this merge without requiring a work-tree and
+// can run in a base repository. It returns the object ID of the merge tree, if
+// there are any conflicts and conflicted files.
+func MergeTree(ctx context.Context, gitRepo *git.Repository, base, ours, theirs string, env []string) (string, bool, []string, error) {
+ cmd := git.NewCommand(ctx, "merge-tree", "--write-tree", "-z", "--name-only", "--no-messages")
+ if git.CheckGitVersionAtLeast("2.40") == nil {
+ cmd.AddOptionFormat("--merge-base=%s", base)
+ }
+
+ stdout := &bytes.Buffer{}
+ gitErr := cmd.AddDynamicArguments(ours, theirs).Run(&git.RunOpts{Dir: gitRepo.Path, Stdout: stdout, Env: env})
+ if gitErr != nil && !git.IsErrorExitCode(gitErr, 1) {
+ log.Error("Unable to run merge-tree: %v", gitErr)
+ return "", false, nil, fmt.Errorf("unable to run merge-tree: %w", gitErr)
+ }
+
+ // There are two situations that we consider for the output:
+ // 1. Clean merge and the output is NUL
+ // 2. Merge conflict and the output is NULNUL
+ treeOID, conflictedFileInfo, _ := strings.Cut(stdout.String(), "\x00")
+ if len(conflictedFileInfo) == 0 {
+ return treeOID, git.IsErrorExitCode(gitErr, 1), nil, nil
+ }
+
+ // Remove last NULL-byte from conflicted file info, then split with NULL byte as seperator.
+ return treeOID, true, strings.Split(conflictedFileInfo[:len(conflictedFileInfo)-1], "\x00"), nil
+}
+
+// checkConflicts takes a pull request and checks if merging it would result in
+// merge conflicts and checks if the diff is empty; the status is set accordingly.
+func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, testPatchCtx *testPatchContext) (bool, error) {
+ // Resets the conflict status.
pr.ConflictedFiles = nil
+ if git.SupportGitMergeTree {
+ // Check for conflicts via a merge-tree.
+ treeHash, conflict, conflictFiles, err := MergeTree(ctx, testPatchCtx.gitRepo, pr.MergeBase, testPatchCtx.baseRev, testPatchCtx.headRev, testPatchCtx.env)
+ if err != nil {
+ return false, fmt.Errorf("MergeTree: %w", err)
+ }
+
+ if !conflict {
+ // No conflicts were detected, now check if the pull request actually
+ // contains anything useful via a diff. git-diff-tree(1) with --quiet
+ // will return exit code 0 if there's no diff and exit code 1 if there's
+ // a diff.
+ err := git.NewCommand(ctx, "diff-tree", "--quiet").AddDynamicArguments(treeHash, pr.MergeBase).Run(&git.RunOpts{Dir: testPatchCtx.gitRepo.Path, Env: testPatchCtx.env})
+ isEmpty := true
+ if err != nil {
+ if git.IsErrorExitCode(err, 1) {
+ isEmpty = false
+ } else {
+ return false, fmt.Errorf("DiffTree: %w", err)
+ }
+ }
+
+ if isEmpty {
+ log.Debug("PullRequest[%d]: Patch is empty - ignoring", pr.ID)
+ pr.Status = issues_model.PullRequestStatusEmpty
+ }
+ return false, nil
+ }
+
+ pr.Status = issues_model.PullRequestStatusConflict
+ pr.ConflictedFiles = conflictFiles
+
+ log.Trace("Found %d files conflicted: %v", len(pr.ConflictedFiles), pr.ConflictedFiles)
+ return true, nil
+ }
+
// 2. AttemptThreeWayMerge first - this is much quicker than plain patch to base
description := fmt.Sprintf("PR[%d] %s/%s#%d", pr.ID, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, pr.Index)
- conflict, conflictFiles, err := AttemptThreeWayMerge(ctx,
- tmpBasePath, gitRepo, pr.MergeBase, "base", "tracking", description)
+ conflict, conflictFiles, err := AttemptThreeWayMerge(ctx, testPatchCtx.gitRepo, pr.MergeBase, testPatchCtx.baseRev, testPatchCtx.headRev, description)
if err != nil {
return false, err
}
@@ -315,13 +473,13 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo *
// No conflicts detected so we need to check if the patch is empty...
// a. Write the newly merged tree and check the new tree-hash
var treeHash string
- treeHash, _, err = git.NewCommand(ctx, "write-tree").RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ treeHash, _, err = git.NewCommand(ctx, "write-tree").RunStdString(&git.RunOpts{Dir: testPatchCtx.gitRepo.Path})
if err != nil {
- lsfiles, _, _ := git.NewCommand(ctx, "ls-files", "-u").RunStdString(&git.RunOpts{Dir: tmpBasePath})
+ lsfiles, _, _ := git.NewCommand(ctx, "ls-files", "-u").RunStdString(&git.RunOpts{Dir: testPatchCtx.gitRepo.Path})
return false, fmt.Errorf("unable to write unconflicted tree: %w\n`git ls-files -u`:\n%s", err, lsfiles)
}
treeHash = strings.TrimSpace(treeHash)
- baseTree, err := gitRepo.GetTree("base")
+ baseTree, err := testPatchCtx.gitRepo.GetTree(testPatchCtx.baseRev)
if err != nil {
return false, err
}
@@ -335,171 +493,11 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo *
return false, nil
}
- // 3. OK the three-way merge method has detected conflicts
- // 3a. Are still testing with GitApply? If not set the conflict status and move on
- if !setting.Repository.PullRequest.TestConflictingPatchesWithGitApply {
- pr.Status = issues_model.PullRequestStatusConflict
- pr.ConflictedFiles = conflictFiles
+ pr.Status = issues_model.PullRequestStatusConflict
+ pr.ConflictedFiles = conflictFiles
- log.Trace("Found %d files conflicted: %v", len(pr.ConflictedFiles), pr.ConflictedFiles)
- return true, nil
- }
-
- // 3b. Create a plain patch from head to base
- tmpPatchFile, err := os.CreateTemp("", "patch")
- if err != nil {
- log.Error("Unable to create temporary patch file! Error: %v", err)
- return false, fmt.Errorf("unable to create temporary patch file! Error: %w", err)
- }
- defer func() {
- _ = util.Remove(tmpPatchFile.Name())
- }()
-
- if err := gitRepo.GetDiffBinary(pr.MergeBase, "tracking", tmpPatchFile); err != nil {
- tmpPatchFile.Close()
- log.Error("Unable to get patch file from %s to %s in %s Error: %v", pr.MergeBase, pr.HeadBranch, pr.BaseRepo.FullName(), err)
- return false, fmt.Errorf("unable to get patch file from %s to %s in %s Error: %w", pr.MergeBase, pr.HeadBranch, pr.BaseRepo.FullName(), err)
- }
- stat, err := tmpPatchFile.Stat()
- if err != nil {
- tmpPatchFile.Close()
- return false, fmt.Errorf("unable to stat patch file: %w", err)
- }
- patchPath := tmpPatchFile.Name()
- tmpPatchFile.Close()
-
- // 3c. if the size of that patch is 0 - there can be no conflicts!
- if stat.Size() == 0 {
- log.Debug("PullRequest[%d]: Patch is empty - ignoring", pr.ID)
- pr.Status = issues_model.PullRequestStatusEmpty
- return false, nil
- }
-
- log.Trace("PullRequest[%d].testPatch (patchPath): %s", pr.ID, patchPath)
-
- // 4. Read the base branch in to the index of the temporary repository
- _, _, err = git.NewCommand(gitRepo.Ctx, "read-tree", "base").RunStdString(&git.RunOpts{Dir: tmpBasePath})
- if err != nil {
- return false, fmt.Errorf("git read-tree %s: %w", pr.BaseBranch, err)
- }
-
- // 5. Now get the pull request configuration to check if we need to ignore whitespace
- prUnit, err := pr.BaseRepo.GetUnit(ctx, unit.TypePullRequests)
- if err != nil {
- return false, err
- }
- prConfig := prUnit.PullRequestsConfig()
-
- // 6. Prepare the arguments to apply the patch against the index
- cmdApply := git.NewCommand(gitRepo.Ctx, "apply", "--check", "--cached")
- if prConfig.IgnoreWhitespaceConflicts {
- cmdApply.AddArguments("--ignore-whitespace")
- }
- is3way := false
- if git.CheckGitVersionAtLeast("2.32.0") == nil {
- cmdApply.AddArguments("--3way")
- is3way = true
- }
- cmdApply.AddDynamicArguments(patchPath)
-
- // 7. Prep the pipe:
- // - Here we could do the equivalent of:
- // `git apply --check --cached patch_file > conflicts`
- // Then iterate through the conflicts. However, that means storing all the conflicts
- // in memory - which is very wasteful.
- // - alternatively we can do the equivalent of:
- // `git apply --check ... | grep ...`
- // meaning we don't store all of the conflicts unnecessarily.
- stderrReader, stderrWriter, err := os.Pipe()
- if err != nil {
- log.Error("Unable to open stderr pipe: %v", err)
- return false, fmt.Errorf("unable to open stderr pipe: %w", err)
- }
- defer func() {
- _ = stderrReader.Close()
- _ = stderrWriter.Close()
- }()
-
- // 8. Run the check command
- conflict = false
- err = cmdApply.Run(&git.RunOpts{
- Dir: tmpBasePath,
- Stderr: stderrWriter,
- PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error {
- // Close the writer end of the pipe to begin processing
- _ = stderrWriter.Close()
- defer func() {
- // Close the reader on return to terminate the git command if necessary
- _ = stderrReader.Close()
- }()
-
- const prefix = "error: patch failed:"
- const errorPrefix = "error: "
- const threewayFailed = "Failed to perform three-way merge..."
- const appliedPatchPrefix = "Applied patch to '"
- const withConflicts = "' with conflicts."
-
- conflicts := make(container.Set[string])
-
- // Now scan the output from the command
- scanner := bufio.NewScanner(stderrReader)
- for scanner.Scan() {
- line := scanner.Text()
- log.Trace("PullRequest[%d].testPatch: stderr: %s", pr.ID, line)
- if strings.HasPrefix(line, prefix) {
- conflict = true
- filepath := strings.TrimSpace(strings.Split(line[len(prefix):], ":")[0])
- conflicts.Add(filepath)
- } else if is3way && line == threewayFailed {
- conflict = true
- } else if strings.HasPrefix(line, errorPrefix) {
- conflict = true
- for _, suffix := range patchErrorSuffices {
- if strings.HasSuffix(line, suffix) {
- filepath := strings.TrimSpace(strings.TrimSuffix(line[len(errorPrefix):], suffix))
- if filepath != "" {
- conflicts.Add(filepath)
- }
- break
- }
- }
- } else if is3way && strings.HasPrefix(line, appliedPatchPrefix) && strings.HasSuffix(line, withConflicts) {
- conflict = true
- filepath := strings.TrimPrefix(strings.TrimSuffix(line, withConflicts), appliedPatchPrefix)
- if filepath != "" {
- conflicts.Add(filepath)
- }
- }
- // only list 10 conflicted files
- if len(conflicts) >= 10 {
- break
- }
- }
-
- if len(conflicts) > 0 {
- pr.ConflictedFiles = make([]string, 0, len(conflicts))
- for key := range conflicts {
- pr.ConflictedFiles = append(pr.ConflictedFiles, key)
- }
- }
-
- return nil
- },
- })
-
- // 9. Check if the found conflictedfiles is non-zero, "err" could be non-nil, so we should ignore it if we found conflicts.
- // Note: `"err" could be non-nil` is due that if enable 3-way merge, it doesn't return any error on found conflicts.
- if len(pr.ConflictedFiles) > 0 {
- if conflict {
- pr.Status = issues_model.PullRequestStatusConflict
- log.Trace("Found %d files conflicted: %v", len(pr.ConflictedFiles), pr.ConflictedFiles)
-
- return true, nil
- }
- } else if err != nil {
- return false, fmt.Errorf("git apply --check: %w", err)
- }
- return false, nil
+ log.Trace("Found %d files conflicted: %v", len(pr.ConflictedFiles), pr.ConflictedFiles)
+ return true, nil
}
// CheckFileProtection check file Protection
@@ -558,7 +556,7 @@ func CheckUnprotectedFiles(repo *git.Repository, oldCommitID, newCommitID string
}
// checkPullFilesProtection check if pr changed protected files and save results
-func checkPullFilesProtection(ctx context.Context, pr *issues_model.PullRequest, gitRepo *git.Repository) error {
+func checkPullFilesProtection(ctx context.Context, pr *issues_model.PullRequest, testPatchCtx *testPatchContext) error {
if pr.Status == issues_model.PullRequestStatusEmpty {
pr.ChangedProtectedFiles = nil
return nil
@@ -574,7 +572,7 @@ func checkPullFilesProtection(ctx context.Context, pr *issues_model.PullRequest,
return nil
}
- pr.ChangedProtectedFiles, err = CheckFileProtection(gitRepo, pr.MergeBase, "tracking", pb.GetProtectedFilePatterns(), 10, os.Environ())
+ pr.ChangedProtectedFiles, err = CheckFileProtection(testPatchCtx.gitRepo, pr.MergeBase, testPatchCtx.headRev, pb.GetProtectedFilePatterns(), 10, testPatchCtx.env)
if err != nil && !models.IsErrFilePathProtected(err) {
return err
}
diff --git a/services/pull/patch_test.go b/services/pull/patch_test.go
new file mode 100644
index 0000000000..bcab19fc58
--- /dev/null
+++ b/services/pull/patch_test.go
@@ -0,0 +1,62 @@
+// Copyright 2025 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: GPL-3.0-or-later
+
+package pull
+
+import (
+ "fmt"
+ "testing"
+
+ issues_model "forgejo.org/models/issues"
+ "forgejo.org/models/unittest"
+ "forgejo.org/modules/setting"
+
+ "github.com/stretchr/testify/assert"
+ "github.com/stretchr/testify/require"
+)
+
+func TestLoadHeadRevision(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ t.Run("AGit", func(t *testing.T) {
+ t.Run("New", func(t *testing.T) {
+ ctx := &testPatchContext{}
+ require.NoError(t, ctx.LoadHeadRevision(t.Context(), &issues_model.PullRequest{Flow: issues_model.PullRequestFlowAGit, HeadCommitID: "Commit!"}))
+
+ assert.Empty(t, ctx.env)
+ assert.Equal(t, "Commit!", ctx.headRev)
+ assert.True(t, ctx.headIsCommitID)
+ })
+ t.Run("Existing", func(t *testing.T) {
+ ctx := &testPatchContext{}
+ require.NoError(t, ctx.LoadHeadRevision(t.Context(), &issues_model.PullRequest{Flow: issues_model.PullRequestFlowAGit, Index: 371}))
+
+ assert.Empty(t, ctx.env)
+ assert.Equal(t, "refs/pull/371/head", ctx.headRev)
+ assert.False(t, ctx.headIsCommitID)
+ })
+ })
+
+ t.Run("Same repository", func(t *testing.T) {
+ ctx := &testPatchContext{}
+ require.NoError(t, ctx.LoadHeadRevision(t.Context(), unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 1})))
+
+ assert.Empty(t, ctx.env)
+ assert.Equal(t, "refs/heads/branch1", ctx.headRev)
+ assert.False(t, ctx.headIsCommitID)
+ })
+
+ t.Run("Across repository", func(t *testing.T) {
+ pr := unittest.AssertExistsAndLoadBean(t, &issues_model.PullRequest{ID: 3})
+ require.NoError(t, pr.LoadHeadRepo(t.Context()))
+
+ ctx := &testPatchContext{}
+ require.NoError(t, ctx.LoadHeadRevision(t.Context(), pr))
+
+ if assert.NotEmpty(t, ctx.env) {
+ assert.Equal(t, fmt.Sprintf("GIT_ALTERNATE_OBJECT_DIRECTORIES=%s/user13/repo11.git/objects", setting.RepoRootPath), ctx.env[len(ctx.env)-1])
+ }
+ assert.Equal(t, "0abcb056019adb8336cf9db3ad9d9cf80cd4b141", ctx.headRev)
+ assert.True(t, ctx.headIsCommitID)
+ })
+}
diff --git a/services/pull/pull.go b/services/pull/pull.go
index 18f63cf95d..26210f7156 100644
--- a/services/pull/pull.go
+++ b/services/pull/pull.go
@@ -46,22 +46,15 @@ func NewPullRequest(ctx context.Context, repo *repo_model.Repository, issue *iss
return user_model.ErrBlockedByUser
}
- prCtx, cancel, err := createTemporaryRepoForPR(ctx, pr)
+ testPatchCtx, err := testPatch(ctx, pr)
+ defer testPatchCtx.close()
if err != nil {
- if !git_model.IsErrBranchNotExist(err) {
- log.Error("CreateTemporaryRepoForPR %-v: %v", pr, err)
- }
- return err
- }
- defer cancel()
-
- if err := testPatch(ctx, prCtx, pr); err != nil {
- return err
+ return fmt.Errorf("testPatch: %w", err)
}
- divergence, err := git.GetDivergingCommits(ctx, prCtx.tmpBasePath, baseBranch, trackingBranch)
+ divergence, err := git.GetDivergingCommits(ctx, testPatchCtx.gitRepo.Path, testPatchCtx.baseRev, testPatchCtx.headRev, testPatchCtx.env)
if err != nil {
- return err
+ return fmt.Errorf("GetDivergingCommits: %w", err)
}
pr.CommitsAhead = divergence.Ahead
pr.CommitsBehind = divergence.Behind
diff --git a/services/pull/review.go b/services/pull/review.go
index d61d9623b6..c740328e4c 100644
--- a/services/pull/review.go
+++ b/services/pull/review.go
@@ -6,6 +6,7 @@ package pull
import (
"context"
+ "errors"
"fmt"
"io"
"regexp"
@@ -232,7 +233,7 @@ func CreateCodeCommentKnownReviewID(ctx context.Context, doer *user_model.User,
commit, err := gitRepo.LineBlame(head, gitRepo.Path, treePath, uint(line))
if err == nil {
commitID = commit.ID.String()
- } else if !(strings.Contains(err.Error(), "exit status 128 - fatal: no such path") || notEnoughLines.MatchString(err.Error())) {
+ } else if !strings.Contains(err.Error(), "exit status 128 - fatal: no such path") && !notEnoughLines.MatchString(err.Error()) {
return nil, fmt.Errorf("LineBlame[%s, %s, %s, %d]: %w", pr.GetGitRefName(), gitRepo.Path, treePath, line, err)
}
}
@@ -387,7 +388,7 @@ func DismissReview(ctx context.Context, reviewID, repoID int64, message string,
}
if review.Type != issues_model.ReviewTypeApprove && review.Type != issues_model.ReviewTypeReject {
- return nil, fmt.Errorf("not need to dismiss this review because it's type is not Approve or change request")
+ return nil, errors.New("not need to dismiss this review because it's type is not Approve or change request")
}
// load data for notify
@@ -397,7 +398,7 @@ func DismissReview(ctx context.Context, reviewID, repoID int64, message string,
// Check if the review's repoID is the one we're currently expecting.
if review.Issue.RepoID != repoID {
- return nil, fmt.Errorf("reviews's repository is not the same as the one we expect")
+ return nil, errors.New("reviews's repository is not the same as the one we expect")
}
issue := review.Issue
diff --git a/services/pull/temp_repo.go b/services/pull/temp_repo.go
index 1805ffc527..76ae0df018 100644
--- a/services/pull/temp_repo.go
+++ b/services/pull/temp_repo.go
@@ -103,11 +103,7 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest)
remoteRepoName := "head_repo"
baseBranch := "base"
- fetchArgs := git.TrustedCmdArgs{"--no-tags"}
- if git.CheckGitVersionAtLeast("2.25.0") == nil {
- // Writing the commit graph can be slow and is not needed here
- fetchArgs = append(fetchArgs, "--no-write-commit-graph")
- }
+ fetchArgs := git.TrustedCmdArgs{"--no-tags", "--no-write-commit-graph"}
// addCacheRepo adds git alternatives for the cacheRepoPath in the repoPath
addCacheRepo := func(repoPath, cacheRepoPath string) error {
diff --git a/services/pull/update.go b/services/pull/update.go
index 1b4b6b039d..563c11fff3 100644
--- a/services/pull/update.go
+++ b/services/pull/update.go
@@ -5,6 +5,7 @@ package pull
import (
"context"
+ "errors"
"fmt"
git_model "forgejo.org/models/git"
@@ -22,7 +23,7 @@ import (
func Update(ctx context.Context, pr *issues_model.PullRequest, doer *user_model.User, message string, rebase bool) error {
if pr.Flow == issues_model.PullRequestFlowAGit {
// TODO: update of agit flow pull request's head branch is unsupported
- return fmt.Errorf("update of agit flow pull request's head branch is unsupported")
+ return errors.New("update of agit flow pull request's head branch is unsupported")
}
pullWorkingPool.CheckIn(fmt.Sprint(pr.ID))
@@ -175,6 +176,6 @@ func GetDiverging(ctx context.Context, pr *issues_model.PullRequest) (*git.Diver
}
defer cancel()
- diff, err := git.GetDivergingCommits(ctx, prCtx.tmpBasePath, baseBranch, trackingBranch)
+ diff, err := git.GetDivergingCommits(ctx, prCtx.tmpBasePath, baseBranch, trackingBranch, nil)
return &diff, err
}
diff --git a/services/release/release.go b/services/release/release.go
index f0682c4dca..90eb1320ed 100644
--- a/services/release/release.go
+++ b/services/release/release.go
@@ -161,17 +161,17 @@ func CreateRelease(gitRepo *git.Repository, rel *repo_model.Release, msg string,
for _, attachmentChange := range attachmentChanges {
if attachmentChange.Action != "add" {
- return fmt.Errorf("can only create new attachments when creating release")
+ return errors.New("can only create new attachments when creating release")
}
switch attachmentChange.Type {
case "attachment":
if attachmentChange.UUID == "" {
- return fmt.Errorf("new attachment should have a uuid")
+ return errors.New("new attachment should have a uuid")
}
addAttachmentUUIDs.Add(attachmentChange.UUID)
case "external":
if attachmentChange.Name == "" || attachmentChange.ExternalURL == "" {
- return fmt.Errorf("new external attachment should have a name and external url")
+ return errors.New("new external attachment should have a name and external url")
}
_, err = attachment.NewExternalAttachment(gitRepo.Ctx, &repo_model.Attachment{
@@ -186,7 +186,7 @@ func CreateRelease(gitRepo *git.Repository, rel *repo_model.Release, msg string,
}
default:
if attachmentChange.Type == "" {
- return fmt.Errorf("missing attachment type")
+ return errors.New("missing attachment type")
}
return fmt.Errorf("unknown attachment type: '%q'", attachmentChange.Type)
}
@@ -280,7 +280,7 @@ func UpdateRelease(ctx context.Context, doer *user_model.User, gitRepo *git.Repo
addAttachmentUUIDs.Add(attachmentChange.UUID)
case "external":
if attachmentChange.Name == "" || attachmentChange.ExternalURL == "" {
- return fmt.Errorf("new external attachment should have a name and external url")
+ return errors.New("new external attachment should have a name and external url")
}
_, err := attachment.NewExternalAttachment(ctx, &repo_model.Attachment{
Name: attachmentChange.Name,
@@ -294,13 +294,13 @@ func UpdateRelease(ctx context.Context, doer *user_model.User, gitRepo *git.Repo
}
default:
if attachmentChange.Type == "" {
- return fmt.Errorf("missing attachment type")
+ return errors.New("missing attachment type")
}
return fmt.Errorf("unknown attachment type: %q", attachmentChange.Type)
}
case "delete":
if attachmentChange.UUID == "" {
- return fmt.Errorf("attachment deletion should have a uuid")
+ return errors.New("attachment deletion should have a uuid")
}
delAttachmentUUIDs.Add(attachmentChange.UUID)
case "update":
@@ -308,7 +308,7 @@ func UpdateRelease(ctx context.Context, doer *user_model.User, gitRepo *git.Repo
updateAttachments.Add(attachmentChange)
default:
if attachmentChange.Action == "" {
- return fmt.Errorf("missing attachment action")
+ return errors.New("missing attachment action")
}
return fmt.Errorf("unknown attachment action: %q", attachmentChange.Action)
}
diff --git a/services/release/release_test.go b/services/release/release_test.go
index 66106eb606..f03b4d42b8 100644
--- a/services/release/release_test.go
+++ b/services/release/release_test.go
@@ -6,7 +6,6 @@ package release
import (
"strings"
"testing"
- "time"
"forgejo.org/models/db"
repo_model "forgejo.org/models/repo"
@@ -14,6 +13,7 @@ import (
user_model "forgejo.org/models/user"
"forgejo.org/modules/git"
"forgejo.org/modules/gitrepo"
+ "forgejo.org/modules/test"
"forgejo.org/services/attachment"
_ "forgejo.org/models/actions"
@@ -138,9 +138,9 @@ func TestRelease_Create(t *testing.T) {
}))
assert.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, &release))
assert.Len(t, release.Attachments, 1)
- assert.EqualValues(t, attach.UUID, release.Attachments[0].UUID)
- assert.EqualValues(t, attach.Name, release.Attachments[0].Name)
- assert.EqualValues(t, attach.ExternalURL, release.Attachments[0].ExternalURL)
+ assert.Equal(t, attach.UUID, release.Attachments[0].UUID)
+ assert.Equal(t, attach.Name, release.Attachments[0].Name)
+ assert.Equal(t, attach.ExternalURL, release.Attachments[0].ExternalURL)
release = repo_model.Release{
RepoID: repo.ID,
@@ -165,8 +165,8 @@ func TestRelease_Create(t *testing.T) {
}))
assert.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, &release))
assert.Len(t, release.Attachments, 1)
- assert.EqualValues(t, "test", release.Attachments[0].Name)
- assert.EqualValues(t, "https://forgejo.org/", release.Attachments[0].ExternalURL)
+ assert.Equal(t, "test", release.Attachments[0].Name)
+ assert.Equal(t, "https://forgejo.org/", release.Attachments[0].ExternalURL)
release = repo_model.Release{
RepoID: repo.ID,
@@ -219,7 +219,7 @@ func TestRelease_Update(t *testing.T) {
release, err := repo_model.GetRelease(db.DefaultContext, repo.ID, "v1.1.1")
require.NoError(t, err)
releaseCreatedUnix := release.CreatedUnix
- time.Sleep(2 * time.Second) // sleep 2 seconds to ensure a different timestamp
+ test.SleepTillNextSecond()
release.Note = "Changed note"
require.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, false, []*AttachmentChange{}))
release, err = repo_model.GetReleaseByID(db.DefaultContext, release.ID)
@@ -243,7 +243,7 @@ func TestRelease_Update(t *testing.T) {
release, err = repo_model.GetRelease(db.DefaultContext, repo.ID, "v1.2.1")
require.NoError(t, err)
releaseCreatedUnix = release.CreatedUnix
- time.Sleep(2 * time.Second) // sleep 2 seconds to ensure a different timestamp
+ test.SleepTillNextSecond()
release.Title = "Changed title"
require.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, false, []*AttachmentChange{}))
release, err = repo_model.GetReleaseByID(db.DefaultContext, release.ID)
@@ -267,7 +267,7 @@ func TestRelease_Update(t *testing.T) {
release, err = repo_model.GetRelease(db.DefaultContext, repo.ID, "v1.3.1")
require.NoError(t, err)
releaseCreatedUnix = release.CreatedUnix
- time.Sleep(2 * time.Second) // sleep 2 seconds to ensure a different timestamp
+ test.SleepTillNextSecond()
release.Title = "Changed title"
release.Note = "Changed note"
require.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, false, []*AttachmentChange{}))
@@ -318,10 +318,10 @@ func TestRelease_Update(t *testing.T) {
}))
require.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, release))
assert.Len(t, release.Attachments, 1)
- assert.EqualValues(t, attach.UUID, release.Attachments[0].UUID)
- assert.EqualValues(t, release.ID, release.Attachments[0].ReleaseID)
- assert.EqualValues(t, attach.Name, release.Attachments[0].Name)
- assert.EqualValues(t, attach.ExternalURL, release.Attachments[0].ExternalURL)
+ assert.Equal(t, attach.UUID, release.Attachments[0].UUID)
+ assert.Equal(t, release.ID, release.Attachments[0].ReleaseID)
+ assert.Equal(t, attach.Name, release.Attachments[0].Name)
+ assert.Equal(t, attach.ExternalURL, release.Attachments[0].ExternalURL)
// update the attachment name
require.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, false, []*AttachmentChange{
@@ -334,10 +334,10 @@ func TestRelease_Update(t *testing.T) {
release.Attachments = nil
require.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, release))
assert.Len(t, release.Attachments, 1)
- assert.EqualValues(t, attach.UUID, release.Attachments[0].UUID)
- assert.EqualValues(t, release.ID, release.Attachments[0].ReleaseID)
- assert.EqualValues(t, "test2.txt", release.Attachments[0].Name)
- assert.EqualValues(t, attach.ExternalURL, release.Attachments[0].ExternalURL)
+ assert.Equal(t, attach.UUID, release.Attachments[0].UUID)
+ assert.Equal(t, release.ID, release.Attachments[0].ReleaseID)
+ assert.Equal(t, "test2.txt", release.Attachments[0].Name)
+ assert.Equal(t, attach.ExternalURL, release.Attachments[0].ExternalURL)
// delete the attachment
require.NoError(t, UpdateRelease(db.DefaultContext, user, gitRepo, release, false, []*AttachmentChange{
@@ -361,9 +361,9 @@ func TestRelease_Update(t *testing.T) {
}))
assert.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, release))
assert.Len(t, release.Attachments, 1)
- assert.EqualValues(t, release.ID, release.Attachments[0].ReleaseID)
- assert.EqualValues(t, "test", release.Attachments[0].Name)
- assert.EqualValues(t, "https://forgejo.org/", release.Attachments[0].ExternalURL)
+ assert.Equal(t, release.ID, release.Attachments[0].ReleaseID)
+ assert.Equal(t, "test", release.Attachments[0].Name)
+ assert.Equal(t, "https://forgejo.org/", release.Attachments[0].ExternalURL)
externalAttachmentUUID := release.Attachments[0].UUID
// update the attachment name
@@ -378,10 +378,10 @@ func TestRelease_Update(t *testing.T) {
release.Attachments = nil
assert.NoError(t, repo_model.GetReleaseAttachments(db.DefaultContext, release))
assert.Len(t, release.Attachments, 1)
- assert.EqualValues(t, externalAttachmentUUID, release.Attachments[0].UUID)
- assert.EqualValues(t, release.ID, release.Attachments[0].ReleaseID)
- assert.EqualValues(t, "test2", release.Attachments[0].Name)
- assert.EqualValues(t, "https://about.gitea.com/", release.Attachments[0].ExternalURL)
+ assert.Equal(t, externalAttachmentUUID, release.Attachments[0].UUID)
+ assert.Equal(t, release.ID, release.Attachments[0].ReleaseID)
+ assert.Equal(t, "test2", release.Attachments[0].Name)
+ assert.Equal(t, "https://about.gitea.com/", release.Attachments[0].ExternalURL)
}
func TestRelease_createTag(t *testing.T) {
@@ -412,7 +412,7 @@ func TestRelease_createTag(t *testing.T) {
require.NoError(t, err)
assert.NotEmpty(t, release.CreatedUnix)
releaseCreatedUnix := release.CreatedUnix
- time.Sleep(2 * time.Second) // sleep 2 seconds to ensure a different timestamp
+ test.SleepTillNextSecond()
release.Note = "Changed note"
_, err = createTag(db.DefaultContext, gitRepo, release, "")
require.NoError(t, err)
@@ -435,7 +435,7 @@ func TestRelease_createTag(t *testing.T) {
_, err = createTag(db.DefaultContext, gitRepo, release, "")
require.NoError(t, err)
releaseCreatedUnix = release.CreatedUnix
- time.Sleep(2 * time.Second) // sleep 2 seconds to ensure a different timestamp
+ test.SleepTillNextSecond()
release.Title = "Changed title"
_, err = createTag(db.DefaultContext, gitRepo, release, "")
require.NoError(t, err)
@@ -458,7 +458,7 @@ func TestRelease_createTag(t *testing.T) {
_, err = createTag(db.DefaultContext, gitRepo, release, "")
require.NoError(t, err)
releaseCreatedUnix = release.CreatedUnix
- time.Sleep(2 * time.Second) // sleep 2 seconds to ensure a different timestamp
+ test.SleepTillNextSecond()
release.Title = "Changed title"
release.Note = "Changed note"
_, err = createTag(db.DefaultContext, gitRepo, release, "")
diff --git a/services/repository/archiver/archiver_test.go b/services/repository/archiver/archiver_test.go
index ec4da2404f..00d82267c9 100644
--- a/services/repository/archiver/archiver_test.go
+++ b/services/repository/archiver/archiver_test.go
@@ -36,7 +36,7 @@ func TestArchive_Basic(t *testing.T) {
bogusReq, err := NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit, git.ZIP)
require.NoError(t, err)
assert.NotNil(t, bogusReq)
- assert.EqualValues(t, firstCommit+".zip", bogusReq.GetArchiveName())
+ assert.Equal(t, firstCommit+".zip", bogusReq.GetArchiveName())
// Check a series of bogus requests.
// Step 1, valid commit with a bad extension.
@@ -57,12 +57,12 @@ func TestArchive_Basic(t *testing.T) {
bogusReq, err = NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, "master", git.ZIP)
require.NoError(t, err)
assert.NotNil(t, bogusReq)
- assert.EqualValues(t, "master.zip", bogusReq.GetArchiveName())
+ assert.Equal(t, "master.zip", bogusReq.GetArchiveName())
bogusReq, err = NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, "test/archive", git.ZIP)
require.NoError(t, err)
assert.NotNil(t, bogusReq)
- assert.EqualValues(t, "test-archive.zip", bogusReq.GetArchiveName())
+ assert.Equal(t, "test-archive.zip", bogusReq.GetArchiveName())
// Now two valid requests, firstCommit with valid extensions.
zipReq, err := NewRequest(ctx, ctx.Repo.Repository.ID, ctx.Repo.GitRepo, firstCommit, git.ZIP)
diff --git a/services/repository/avatar_test.go b/services/repository/avatar_test.go
index e5fcf7f239..6f28113286 100644
--- a/services/repository/avatar_test.go
+++ b/services/repository/avatar_test.go
@@ -60,7 +60,7 @@ func TestDeleteAvatar(t *testing.T) {
err = DeleteAvatar(db.DefaultContext, repo)
require.NoError(t, err)
- assert.Equal(t, "", repo.Avatar)
+ assert.Empty(t, repo.Avatar)
}
func TestTemplateGenerateAvatar(t *testing.T) {
diff --git a/services/repository/branch.go b/services/repository/branch.go
index 689f35803d..bc739825a5 100644
--- a/services/repository/branch.go
+++ b/services/repository/branch.go
@@ -28,6 +28,7 @@ import (
"forgejo.org/modules/timeutil"
"forgejo.org/modules/util"
webhook_module "forgejo.org/modules/webhook"
+ actions_service "forgejo.org/services/actions"
notify_service "forgejo.org/services/notify"
pull_service "forgejo.org/services/pull"
files_service "forgejo.org/services/repository/files"
@@ -250,7 +251,7 @@ func SyncBranchesToDB(ctx context.Context, repoID, pusherID int64, branchNames,
// For other batches, it will hit optimization 4.
if len(branchNames) != len(commitIDs) {
- return fmt.Errorf("branchNames and commitIDs length not match")
+ return errors.New("branchNames and commitIDs length not match")
}
return db.WithTx(ctx, func(ctx context.Context) error {
@@ -377,7 +378,7 @@ func RenameBranch(ctx context.Context, repo *repo_model.Repository, doer *user_m
log.Error("DeleteCronTaskByRepo: %v", err)
}
// cancel running cron jobs of this repository and delete old schedules
- if err := actions_model.CancelPreviousJobs(
+ if err := actions_service.CancelPreviousJobs(
ctx,
repo.ID,
from,
@@ -578,7 +579,7 @@ func SetRepoDefaultBranch(ctx context.Context, repo *repo_model.Repository, gitR
log.Error("DeleteCronTaskByRepo: %v", err)
}
// cancel running cron jobs of this repository and delete old schedules
- if err := actions_model.CancelPreviousJobs(
+ if err := actions_service.CancelPreviousJobs(
ctx,
repo.ID,
oldDefaultBranchName,
diff --git a/services/repository/contributors_graph.go b/services/repository/contributors_graph.go
index ad4cc400cb..1805bd5960 100644
--- a/services/repository/contributors_graph.go
+++ b/services/repository/contributors_graph.go
@@ -111,7 +111,7 @@ func GetContributorStats(ctx context.Context, cache cache.Cache, repo *repo_mode
var cachedStats map[string]*ContributorData
return cachedStats, json.Unmarshal([]byte(v), &cachedStats)
default:
- return nil, fmt.Errorf("unexpected type in cache detected")
+ return nil, errors.New("unexpected type in cache detected")
}
}
diff --git a/services/repository/contributors_graph_test.go b/services/repository/contributors_graph_test.go
index 927c950bec..45af85272d 100644
--- a/services/repository/contributors_graph_test.go
+++ b/services/repository/contributors_graph_test.go
@@ -53,14 +53,14 @@ func TestRepository_ContributorsGraph(t *testing.T) {
keys = append(keys, k)
}
slices.Sort(keys)
- assert.EqualValues(t, []string{
+ assert.Equal(t, []string{
"ethantkoenig@gmail.com",
"jimmy.praet@telenet.be",
"jon@allspice.io",
"total", // generated summary
}, keys)
- assert.EqualValues(t, &ContributorData{
+ assert.Equal(t, &ContributorData{
Name: "Ethan Koenig",
AvatarLink: "/assets/img/avatar_default.png",
TotalCommits: 1,
@@ -73,7 +73,7 @@ func TestRepository_ContributorsGraph(t *testing.T) {
},
},
}, data["ethantkoenig@gmail.com"])
- assert.EqualValues(t, &ContributorData{
+ assert.Equal(t, &ContributorData{
Name: "Total",
AvatarLink: "",
TotalCommits: 3,
diff --git a/services/repository/create_test.go b/services/repository/create_test.go
index 7eb3c0f805..0a6c34b6fe 100644
--- a/services/repository/create_test.go
+++ b/services/repository/create_test.go
@@ -147,3 +147,13 @@ func TestIncludesAllRepositoriesTeams(t *testing.T) {
}
require.NoError(t, organization.DeleteOrganization(db.DefaultContext, org), "DeleteOrganization")
}
+
+func TestCreateRepository(t *testing.T) {
+ require.NoError(t, unittest.PrepareTestDatabase())
+ user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
+
+ r, err := CreateRepositoryDirectly(db.DefaultContext, user, user, CreateRepoOptions{Name: "repo-last"})
+ require.NoError(t, err)
+ require.NotNil(t, r.Topics)
+ require.Empty(t, r.Topics)
+}
diff --git a/services/repository/delete.go b/services/repository/delete.go
index 7c83ba12cd..f4124fb9e2 100644
--- a/services/repository/delete.go
+++ b/services/repository/delete.go
@@ -1,4 +1,5 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
+// Copyright 2024 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package repository
@@ -89,6 +90,11 @@ func DeleteRepositoryDirectly(ctx context.Context, doer *user_model.User, repoID
}
}
+ // If the repository was reported as abusive, a shadow copy should be created before deletion.
+ if err := repo_model.IfNeededCreateShadowCopyForRepository(ctx, repo, false); err != nil {
+ return err
+ }
+
if cnt, err := sess.ID(repoID).Delete(&repo_model.Repository{}); err != nil {
return err
} else if cnt != 1 {
diff --git a/services/repository/files/cherry_pick.go b/services/repository/files/cherry_pick.go
index b6d54c4086..0e88a29230 100644
--- a/services/repository/files/cherry_pick.go
+++ b/services/repository/files/cherry_pick.go
@@ -5,6 +5,7 @@ package files
import (
"context"
+ "errors"
"fmt"
"strings"
@@ -79,21 +80,33 @@ func CherryPick(ctx context.Context, repo *repo_model.Repository, doer *user_mod
right, base = base, right
}
- description := fmt.Sprintf("CherryPick %s onto %s", right, opts.OldBranch)
- conflict, _, err := pull.AttemptThreeWayMerge(ctx,
- t.basePath, t.gitRepo, base, opts.LastCommitID, right, description)
- if err != nil {
- return nil, fmt.Errorf("failed to three-way merge %s onto %s: %w", right, opts.OldBranch, err)
- }
+ var treeHash string
+ if git.SupportGitMergeTree {
+ var conflict bool
+ treeHash, conflict, _, err = pull.MergeTree(ctx, t.gitRepo, base, opts.LastCommitID, right, nil)
+ if err != nil {
+ return nil, fmt.Errorf("failed to three-way merge %s onto %s: %w", right, opts.OldBranch, err)
+ }
- if conflict {
- return nil, fmt.Errorf("failed to merge due to conflicts")
- }
+ if conflict {
+ return nil, errors.New("failed to merge due to conflicts")
+ }
+ } else {
+ description := fmt.Sprintf("CherryPick %s onto %s", right, opts.OldBranch)
+ conflict, _, err := pull.AttemptThreeWayMerge(ctx, t.gitRepo, base, opts.LastCommitID, right, description)
+ if err != nil {
+ return nil, fmt.Errorf("failed to three-way merge %s onto %s: %w", right, opts.OldBranch, err)
+ }
- treeHash, err := t.WriteTree()
- if err != nil {
- // likely non-sensical tree due to merge conflicts...
- return nil, err
+ if conflict {
+ return nil, errors.New("failed to merge due to conflicts")
+ }
+
+ treeHash, err = t.WriteTree()
+ if err != nil {
+ // likely non-sensical tree due to merge conflicts...
+ return nil, err
+ }
}
// Now commit the tree
diff --git a/services/repository/files/commit.go b/services/repository/files/commit.go
index 0c0671429b..43c9048aaf 100644
--- a/services/repository/files/commit.go
+++ b/services/repository/files/commit.go
@@ -1,4 +1,5 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
+// Copyright 2025 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package files
@@ -14,7 +15,7 @@ import (
// CountDivergingCommits determines how many commits a branch is ahead or behind the repository's base branch
func CountDivergingCommits(ctx context.Context, repo *repo_model.Repository, branch string) (*git.DivergeObject, error) {
- divergence, err := git.GetDivergingCommits(ctx, repo.RepoPath(), repo.DefaultBranch, branch)
+ divergence, err := git.GetDivergingCommits(ctx, repo.RepoPath(), repo.DefaultBranch, branch, nil)
if err != nil {
return nil, err
}
@@ -38,7 +39,7 @@ func GetPayloadCommitVerification(ctx context.Context, commit *git.Commit) *stru
verification.Verified = commitVerification.Verified
verification.Reason = commitVerification.Reason
if verification.Reason == "" && !verification.Verified {
- verification.Reason = "gpg.error.not_signed_commit"
+ verification.Reason = asymkey_model.NotSigned
}
return verification
}
diff --git a/services/repository/files/content.go b/services/repository/files/content.go
index 3eb3049f12..5a6006e9f2 100644
--- a/services/repository/files/content.go
+++ b/services/repository/files/content.go
@@ -5,6 +5,7 @@ package files
import (
"context"
+ "errors"
"fmt"
"net/url"
"path"
@@ -178,12 +179,13 @@ func GetContents(ctx context.Context, repo *repo_model.Repository, treePath, ref
// All content types have these fields in populated
contentsResponse := &api.ContentsResponse{
- Name: entry.Name(),
- Path: treePath,
- SHA: entry.ID.String(),
- LastCommitSHA: lastCommit.ID.String(),
- Size: entry.Size(),
- URL: &selfURLString,
+ Name: entry.Name(),
+ Path: treePath,
+ SHA: entry.ID.String(),
+ LastCommitSHA: lastCommit.ID.String(),
+ LastCommitWhen: lastCommit.Committer.When,
+ Size: entry.Size(),
+ URL: &selfURLString,
Links: &api.FileLinksResponse{
Self: &selfURLString,
},
@@ -204,7 +206,7 @@ func GetContents(ctx context.Context, repo *repo_model.Repository, treePath, ref
} else if entry.IsLink() {
contentsResponse.Type = string(ContentTypeLink)
// The target of a symlink file is the content of the file
- targetFromContent, err := entry.Blob().GetBlobContent(1024)
+ targetFromContent, err := entry.LinkTarget()
if err != nil {
return nil, err
}
@@ -249,20 +251,35 @@ func GetContents(ctx context.Context, repo *repo_model.Repository, treePath, ref
return contentsResponse, nil
}
-// GetBlobBySHA get the GitBlobResponse of a repository using a sha hash.
-func GetBlobBySHA(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, sha string) (*api.GitBlobResponse, error) {
+// GetBlobsBySHA gets multiple GitBlobs of a repository by sha hash.
+func GetBlobsBySHA(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, shas []string) ([]*api.GitBlob, error) {
+ if len(shas) > setting.API.MaxResponseItems {
+ shas = shas[:setting.API.MaxResponseItems]
+ }
+
+ blobs := make([]*api.GitBlob, 0, len(shas))
+ for _, sha := range shas {
+ blob, err := GetBlobBySHA(ctx, repo, gitRepo, sha)
+ if err != nil {
+ return nil, err
+ }
+ blobs = append(blobs, blob)
+ }
+ return blobs, nil
+}
+
+// GetBlobBySHA get the GitBlob of a repository using a sha hash.
+func GetBlobBySHA(ctx context.Context, repo *repo_model.Repository, gitRepo *git.Repository, sha string) (*api.GitBlob, error) {
gitBlob, err := gitRepo.GetBlob(sha)
if err != nil {
return nil, err
}
- content := ""
- if gitBlob.Size() <= setting.API.DefaultMaxBlobSize {
- content, err = gitBlob.GetBlobContentBase64()
- if err != nil {
- return nil, err
- }
+ content, err := gitBlob.GetContentBase64(setting.API.DefaultMaxBlobSize)
+ if err != nil && !errors.As(err, &git.BlobTooLargeError{}) {
+ return nil, err
}
- return &api.GitBlobResponse{
+
+ return &api.GitBlob{
SHA: gitBlob.ID.String(),
URL: repo.APIURL() + "/git/blobs/" + url.PathEscape(gitBlob.ID.String()),
Size: gitBlob.Size(),
diff --git a/services/repository/files/content_test.go b/services/repository/files/content_test.go
index ca2f861c0b..8fc8f56b4f 100644
--- a/services/repository/files/content_test.go
+++ b/services/repository/files/content_test.go
@@ -5,6 +5,7 @@ package files
import (
"testing"
+ "time"
"forgejo.org/models/db"
repo_model "forgejo.org/models/repo"
@@ -33,18 +34,19 @@ func getExpectedReadmeContentsResponse() *api.ContentsResponse {
gitURL := "https://try.gitea.io/api/v1/repos/user2/repo1/git/blobs/" + sha
downloadURL := "https://try.gitea.io/user2/repo1/raw/branch/master/" + treePath
return &api.ContentsResponse{
- Name: treePath,
- Path: treePath,
- SHA: "4b4851ad51df6a7d9f25c979345979eaeb5b349f",
- LastCommitSHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d",
- Type: "file",
- Size: 30,
- Encoding: &encoding,
- Content: &content,
- URL: &selfURL,
- HTMLURL: &htmlURL,
- GitURL: &gitURL,
- DownloadURL: &downloadURL,
+ Name: treePath,
+ Path: treePath,
+ SHA: "4b4851ad51df6a7d9f25c979345979eaeb5b349f",
+ LastCommitSHA: "65f1bf27bc3bf70f64657658635e66094edbcb4d",
+ LastCommitWhen: time.Date(2017, time.March, 19, 16, 47, 59, 0, time.FixedZone("", -14400)),
+ Type: "file",
+ Size: 30,
+ Encoding: &encoding,
+ Content: &content,
+ URL: &selfURL,
+ HTMLURL: &htmlURL,
+ GitURL: &gitURL,
+ DownloadURL: &downloadURL,
Links: &api.FileLinksResponse{
Self: &selfURL,
GitURL: &gitURL,
@@ -64,13 +66,13 @@ func TestGetContents(t *testing.T) {
t.Run("Get README.md contents with GetContents(ctx, )", func(t *testing.T) {
fileContentResponse, err := GetContents(db.DefaultContext, repo, treePath, ref, false)
- assert.EqualValues(t, expectedContentsResponse, fileContentResponse)
+ assert.Equal(t, expectedContentsResponse, fileContentResponse)
require.NoError(t, err)
})
t.Run("Get README.md contents with ref as empty string (should then use the repo's default branch) with GetContents(ctx, )", func(t *testing.T) {
fileContentResponse, err := GetContents(db.DefaultContext, repo, treePath, "", false)
- assert.EqualValues(t, expectedContentsResponse, fileContentResponse)
+ assert.Equal(t, expectedContentsResponse, fileContentResponse)
require.NoError(t, err)
})
}
@@ -190,7 +192,7 @@ func TestGetBlobBySHA(t *testing.T) {
defer gitRepo.Close()
gbr, err := GetBlobBySHA(db.DefaultContext, repo, gitRepo, "65f1bf27bc3bf70f64657658635e66094edbcb4d")
- expectedGBR := &api.GitBlobResponse{
+ expectedGBR := &api.GitBlob{
Content: "dHJlZSAyYTJmMWQ0NjcwNzI4YTJlMTAwNDllMzQ1YmQ3YTI3NjQ2OGJlYWI2CmF1dGhvciB1c2VyMSA8YWRkcmVzczFAZXhhbXBsZS5jb20+IDE0ODk5NTY0NzkgLTA0MDAKY29tbWl0dGVyIEV0aGFuIEtvZW5pZyA8ZXRoYW50a29lbmlnQGdtYWlsLmNvbT4gMTQ4OTk1NjQ3OSAtMDQwMAoKSW5pdGlhbCBjb21taXQK",
Encoding: "base64",
URL: "https://try.gitea.io/api/v1/repos/user2/repo1/git/blobs/65f1bf27bc3bf70f64657658635e66094edbcb4d",
@@ -200,3 +202,43 @@ func TestGetBlobBySHA(t *testing.T) {
require.NoError(t, err)
assert.Equal(t, expectedGBR, gbr)
}
+
+func TestGetBlobsBySHA(t *testing.T) {
+ unittest.PrepareTestEnv(t)
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2})
+
+ gitRepo, err := gitrepo.OpenRepository(db.DefaultContext, repo)
+ require.NoError(t, err)
+ defer gitRepo.Close()
+
+ gbr, err := GetBlobsBySHA(db.DefaultContext, repo, gitRepo, []string{
+ "ea82fc8777a24b07c26b3a4bf4e2742c03733eab", // Home.md
+ "6395b68e1feebb1e4c657b4f9f6ba2676a283c0b", // line.svg
+ "26f842bcad37fa40a1bb34cbb5ee219ee35d863d", // test.xml
+ })
+ expectedGBR := []*api.GitBlob{
+ {
+ Content: "IyBIb21lIHBhZ2UKClRoaXMgaXMgdGhlIGhvbWUgcGFnZSEK",
+ Encoding: "base64",
+ URL: "https://try.gitea.io/api/v1/repos/user2/repo2/git/blobs/ea82fc8777a24b07c26b3a4bf4e2742c03733eab",
+ SHA: "ea82fc8777a24b07c26b3a4bf4e2742c03733eab",
+ Size: 36,
+ },
+ {
+ Content: "PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPHN2ZwogICB4bWxuczpzdmc9Imh0dHA6Ly93d3cudzMub3JnLzIwMDAvc3ZnIgogICB4bWxucz0iaHR0cDovL3d3dy53My5vcmcvMjAwMC9zdmciCiAgIHdpZHRoPSIxMjgiCiAgIGhlaWdodD0iMTI4IgogICB2aWV3Qm94PSIwIDAgMTI4IDEyOCI+CgogIDxsaW5lIHgxPSIwIiB5MT0iNyIgeDI9IjEwIiB5Mj0iNyIgc3Ryb2tlLXdpZHRoPSIxLjUiLz4KPC9zdmc+",
+ Encoding: "base64",
+ URL: "https://try.gitea.io/api/v1/repos/user2/repo2/git/blobs/6395b68e1feebb1e4c657b4f9f6ba2676a283c0b",
+ SHA: "6395b68e1feebb1e4c657b4f9f6ba2676a283c0b",
+ Size: 246,
+ },
+ {
+ Content: "PD94bWwgdmVyc2lvbj0iMS4wIiBlbmNvZGluZz0iVVRGLTgiPz4KPHRlc3Q+VGhpcyBpcyBYTUw8L3Rlc3Q+Cg==",
+ Encoding: "base64",
+ URL: "https://try.gitea.io/api/v1/repos/user2/repo2/git/blobs/26f842bcad37fa40a1bb34cbb5ee219ee35d863d",
+ SHA: "26f842bcad37fa40a1bb34cbb5ee219ee35d863d",
+ Size: 64,
+ },
+ }
+ require.NoError(t, err)
+ assert.Equal(t, expectedGBR, gbr)
+}
diff --git a/services/repository/files/diff_test.go b/services/repository/files/diff_test.go
index d6265273c7..67c63803d3 100644
--- a/services/repository/files/diff_test.go
+++ b/services/repository/files/diff_test.go
@@ -124,7 +124,7 @@ func TestGetDiffPreview(t *testing.T) {
require.NoError(t, err)
bs, err := json.Marshal(diff)
require.NoError(t, err)
- assert.EqualValues(t, string(expectedBs), string(bs))
+ assert.Equal(t, string(expectedBs), string(bs))
})
t.Run("empty branch, same results", func(t *testing.T) {
@@ -134,7 +134,7 @@ func TestGetDiffPreview(t *testing.T) {
require.NoError(t, err)
bs, err := json.Marshal(diff)
require.NoError(t, err)
- assert.EqualValues(t, expectedBs, bs)
+ assert.Equal(t, expectedBs, bs)
})
}
diff --git a/services/repository/files/file.go b/services/repository/files/file.go
index 810c60163d..5b93258840 100644
--- a/services/repository/files/file.go
+++ b/services/repository/files/file.go
@@ -5,7 +5,7 @@ package files
import (
"context"
- "fmt"
+ "errors"
"net/url"
"strings"
"time"
@@ -50,10 +50,10 @@ func GetFileResponseFromFilesResponse(filesResponse *api.FilesResponse, index in
// GetFileCommitResponse Constructs a FileCommitResponse from a Commit object
func GetFileCommitResponse(repo *repo_model.Repository, commit *git.Commit) (*api.FileCommitResponse, error) {
if repo == nil {
- return nil, fmt.Errorf("repo cannot be nil")
+ return nil, errors.New("repo cannot be nil")
}
if commit == nil {
- return nil, fmt.Errorf("commit cannot be nil")
+ return nil, errors.New("commit cannot be nil")
}
commitURL, _ := url.Parse(repo.APIURL() + "/git/commits/" + url.PathEscape(commit.ID.String()))
commitTreeURL, _ := url.Parse(repo.APIURL() + "/git/trees/" + url.PathEscape(commit.Tree.ID.String()))
@@ -104,36 +104,35 @@ func GetAuthorAndCommitterUsers(author, committer *IdentityOptions, doer *user_m
// then we use bogus User objects for them to store their FullName and Email.
// If only one of the two are provided, we set both of them to it.
// If neither are provided, both are the doer.
- if committer != nil && committer.Email != "" {
- if doer != nil && strings.EqualFold(doer.Email, committer.Email) {
- committerUser = doer // the committer is the doer, so will use their user object
- if committer.Name != "" {
- committerUser.FullName = committer.Name
+ getUser := func(identity *IdentityOptions) *user_model.User {
+ if identity == nil || identity.Email == "" {
+ return nil
+ }
+
+ if doer != nil && strings.EqualFold(doer.Email, identity.Email) {
+ user := doer // the committer is the doer, so will use their user object
+ if identity.Name != "" {
+ user.FullName = identity.Name
}
// Use the provided email and not revert to placeholder mail.
- committerUser.KeepEmailPrivate = false
- } else {
- committerUser = &user_model.User{
- FullName: committer.Name,
- Email: committer.Email,
- }
- }
- }
- if author != nil && author.Email != "" {
- if doer != nil && strings.EqualFold(doer.Email, author.Email) {
- authorUser = doer // the author is the doer, so will use their user object
- if authorUser.Name != "" {
- authorUser.FullName = author.Name
- }
- // Use the provided email and not revert to placeholder mail.
- authorUser.KeepEmailPrivate = false
- } else {
- authorUser = &user_model.User{
- FullName: author.Name,
- Email: author.Email,
- }
+ user.KeepEmailPrivate = false
+ return user
+ }
+
+ var id int64
+ if doer != nil {
+ id = doer.ID
+ }
+ return &user_model.User{
+ ID: id, // Needed to ensure the doer is checked to pass rules for instance signing of CRUD actions.
+ FullName: identity.Name,
+ Email: identity.Email,
}
}
+
+ committerUser = getUser(committer)
+ authorUser = getUser(author)
+
if authorUser == nil {
if committerUser != nil {
authorUser = committerUser // No valid author was given so use the committer
diff --git a/services/repository/files/file_test.go b/services/repository/files/file_test.go
index db2f4403f4..169cafba0d 100644
--- a/services/repository/files/file_test.go
+++ b/services/repository/files/file_test.go
@@ -14,13 +14,13 @@ func TestCleanUploadFileName(t *testing.T) {
name := "this/is/test"
cleanName := CleanUploadFileName(name)
expectedCleanName := name
- assert.EqualValues(t, expectedCleanName, cleanName)
+ assert.Equal(t, expectedCleanName, cleanName)
})
t.Run("Clean a .git path", func(t *testing.T) {
name := "this/is/test/.git"
cleanName := CleanUploadFileName(name)
expectedCleanName := ""
- assert.EqualValues(t, expectedCleanName, cleanName)
+ assert.Equal(t, expectedCleanName, cleanName)
})
}
diff --git a/services/repository/files/patch.go b/services/repository/files/patch.go
index 5b1dd65b5a..18b5226c02 100644
--- a/services/repository/files/patch.go
+++ b/services/repository/files/patch.go
@@ -147,11 +147,7 @@ func ApplyDiffPatch(ctx context.Context, repo *repo_model.Repository, doer *user
stdout := &strings.Builder{}
stderr := &strings.Builder{}
- cmdApply := git.NewCommand(ctx, "apply", "--index", "--recount", "--cached", "--ignore-whitespace", "--whitespace=fix", "--binary")
- if git.CheckGitVersionAtLeast("2.32") == nil {
- cmdApply.AddArguments("-3")
- }
-
+ cmdApply := git.NewCommand(ctx, "apply", "--index", "--recount", "--cached", "--ignore-whitespace", "--whitespace=fix", "--binary", "-3")
if err := cmdApply.Run(&git.RunOpts{
Dir: t.basePath,
Stdout: stdout,
diff --git a/services/repository/files/temp_repo.go b/services/repository/files/temp_repo.go
index b3aadbc6cb..64d3e5887d 100644
--- a/services/repository/files/temp_repo.go
+++ b/services/repository/files/temp_repo.go
@@ -6,6 +6,7 @@ package files
import (
"bytes"
"context"
+ "errors"
"fmt"
"io"
"os"
@@ -368,7 +369,7 @@ func (t *TemporaryUploadRepository) DiffIndex() (*gitdiff.Diff, error) {
// GetBranchCommit Gets the commit object of the given branch
func (t *TemporaryUploadRepository) GetBranchCommit(branch string) (*git.Commit, error) {
if t.gitRepo == nil {
- return nil, fmt.Errorf("repository has not been cloned")
+ return nil, errors.New("repository has not been cloned")
}
return t.gitRepo.GetBranchCommit(branch)
}
@@ -376,7 +377,7 @@ func (t *TemporaryUploadRepository) GetBranchCommit(branch string) (*git.Commit,
// GetCommit Gets the commit object of the given commit ID
func (t *TemporaryUploadRepository) GetCommit(commitID string) (*git.Commit, error) {
if t.gitRepo == nil {
- return nil, fmt.Errorf("repository has not been cloned")
+ return nil, errors.New("repository has not been cloned")
}
return t.gitRepo.GetCommit(commitID)
}
diff --git a/services/repository/files/tree_test.go b/services/repository/files/tree_test.go
index 7865fcf2e2..5cd628722b 100644
--- a/services/repository/files/tree_test.go
+++ b/services/repository/files/tree_test.go
@@ -48,5 +48,5 @@ func TestGetTreeBySHA(t *testing.T) {
TotalCount: 1,
}
- assert.EqualValues(t, expectedTree, tree)
+ assert.Equal(t, expectedTree, tree)
}
diff --git a/services/repository/files/update.go b/services/repository/files/update.go
index 5e8834c6de..8fb9644fa4 100644
--- a/services/repository/files/update.go
+++ b/services/repository/files/update.go
@@ -193,28 +193,34 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use
}
if hasOldBranch {
- // Get the commit of the original branch
- commit, err := t.GetBranchCommit(opts.OldBranch)
+ // Get the current commit of the original branch
+ actualBaseCommit, err := t.GetBranchCommit(opts.OldBranch)
if err != nil {
return nil, err // Couldn't get a commit for the branch
}
- // Assigned LastCommitID in opts if it hasn't been set
- if opts.LastCommitID == "" {
- opts.LastCommitID = commit.ID.String()
- } else {
- lastCommitID, err := t.gitRepo.ConvertToGitID(opts.LastCommitID)
+ var lastKnownCommit git.ObjectID // when nil, the sha provided in the opts.Files must match the current blob-sha
+ if opts.OldBranch != opts.NewBranch {
+ // when creating a new branch, ignore if a file has been changed in the meantime
+ // (such changes will visible when doing the merge)
+ lastKnownCommit = actualBaseCommit.ID
+ } else if opts.LastCommitID != "" {
+ lastKnownCommit, err = t.gitRepo.ConvertToGitID(opts.LastCommitID)
if err != nil {
return nil, fmt.Errorf("ConvertToSHA1: Invalid last commit ID: %w", err)
}
- opts.LastCommitID = lastCommitID.String()
}
for _, file := range opts.Files {
- if err := handleCheckErrors(file, commit, opts); err != nil {
+ if err := handleCheckErrors(file, actualBaseCommit, lastKnownCommit); err != nil {
return nil, err
}
}
+
+ if opts.LastCommitID == "" {
+ // needed for t.CommitTree
+ opts.LastCommitID = actualBaseCommit.ID.String()
+ }
}
contentStore := lfs.NewContentStore()
@@ -277,9 +283,9 @@ func ChangeRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use
}
// handles the check for various issues for ChangeRepoFiles
-func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRepoFilesOptions) error {
+func handleCheckErrors(file *ChangeRepoFile, actualBaseCommit *git.Commit, lastKnownCommit git.ObjectID) error {
if file.Operation == "update" || file.Operation == "delete" {
- fromEntry, err := commit.GetTreeEntryByPath(file.Options.fromTreePath)
+ fromEntry, err := actualBaseCommit.GetTreeEntryByPath(file.Options.fromTreePath)
if err != nil {
return err
}
@@ -292,22 +298,22 @@ func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRep
CurrentSHA: fromEntry.ID.String(),
}
}
- } else if opts.LastCommitID != "" {
- // If a lastCommitID was given and it doesn't match the commitID of the head of the branch throw
- // an error, but only if we aren't creating a new branch.
- if commit.ID.String() != opts.LastCommitID && opts.OldBranch == opts.NewBranch {
- if changed, err := commit.FileChangedSinceCommit(file.Options.treePath, opts.LastCommitID); err != nil {
+ } else if lastKnownCommit != nil {
+ if actualBaseCommit.ID.String() != lastKnownCommit.String() {
+ // If a lastKnownCommit was given and it doesn't match the actualBaseCommit,
+ // check if the file has been changed in between
+ if changed, err := actualBaseCommit.FileChangedSinceCommit(file.Options.treePath, lastKnownCommit.String()); err != nil {
return err
} else if changed {
return models.ErrCommitIDDoesNotMatch{
- GivenCommitID: opts.LastCommitID,
- CurrentCommitID: opts.LastCommitID,
+ GivenCommitID: lastKnownCommit.String(),
+ CurrentCommitID: actualBaseCommit.ID.String(),
}
}
- // The file wasn't modified, so we are good to delete it
+ // The file wasn't modified, so we are good to update it
}
} else {
- // When updating a file, a lastCommitID or SHA needs to be given to make sure other commits
+ // When updating a file, a lastKnownCommit or SHA needs to be given to make sure other commits
// haven't been made. We throw an error if one wasn't provided.
return models.ErrSHAOrCommitIDNotProvided{}
}
@@ -322,7 +328,7 @@ func handleCheckErrors(file *ChangeRepoFile, commit *git.Commit, opts *ChangeRep
subTreePath := ""
for index, part := range treePathParts {
subTreePath = path.Join(subTreePath, part)
- entry, err := commit.GetTreeEntryByPath(subTreePath)
+ entry, err := actualBaseCommit.GetTreeEntryByPath(subTreePath)
if err != nil {
if git.IsErrNotExist(err) {
// Means there is no item with that name, so we're good
diff --git a/services/repository/fork_test.go b/services/repository/fork_test.go
index 227dd1850e..6de241e4d4 100644
--- a/services/repository/fork_test.go
+++ b/services/repository/fork_test.go
@@ -11,6 +11,7 @@ import (
user_model "forgejo.org/models/user"
"forgejo.org/modules/git"
"forgejo.org/modules/setting"
+ "forgejo.org/modules/test"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@@ -36,7 +37,7 @@ func TestForkRepository(t *testing.T) {
assert.False(t, repo_model.IsErrReachLimitOfRepo(err))
// change AllowForkWithoutMaximumLimit to false for the test
- setting.Repository.AllowForkWithoutMaximumLimit = false
+ defer test.MockVariableValue(&setting.Repository.AllowForkWithoutMaximumLimit, false)()
// user has reached maximum limit of repositories
user.MaxRepoCreation = 0
fork2, err := ForkRepositoryAndUpdates(git.DefaultContext, user, user, ForkRepoOptions{
diff --git a/services/repository/generate.go b/services/repository/generate.go
index 9aeb057c3d..e23e294de1 100644
--- a/services/repository/generate.go
+++ b/services/repository/generate.go
@@ -43,12 +43,8 @@ type expansion struct {
var defaultTransformers = []transformer{
{Name: "SNAKE", Transform: xstrings.ToSnakeCase},
{Name: "KEBAB", Transform: xstrings.ToKebabCase},
- // as of xstrings v1.5.0 the CAMEL & PASCAL workarounds are no longer necessary
- // and can be removed https://codeberg.org/forgejo/forgejo/pulls/4050
- {Name: "CAMEL", Transform: func(str string) string {
- return xstrings.FirstRuneToLower(xstrings.ToCamelCase(str))
- }},
- {Name: "PASCAL", Transform: xstrings.ToCamelCase},
+ {Name: "CAMEL", Transform: xstrings.ToCamelCase},
+ {Name: "PASCAL", Transform: xstrings.ToPascalCase},
{Name: "LOWER", Transform: strings.ToLower},
{Name: "UPPER", Transform: strings.ToUpper},
{Name: "TITLE", Transform: util.ToTitleCase},
diff --git a/services/repository/generate_test.go b/services/repository/generate_test.go
index b0f97d0ffb..2eb3a55e96 100644
--- a/services/repository/generate_test.go
+++ b/services/repository/generate_test.go
@@ -65,3 +65,30 @@ func TestFileNameSanitize(t *testing.T) {
assert.Equal(t, "_", fileNameSanitize("\u0000"))
assert.Equal(t, "目标", fileNameSanitize("目标"))
}
+
+func TestTransformers(t *testing.T) {
+ input := "Foo_Forgejo-BAR"
+
+ tests := []struct {
+ name string
+ expected string
+ }{
+ {"SNAKE", "foo_forgejo_bar"},
+ {"KEBAB", "foo-forgejo-bar"},
+ {"CAMEL", "fooForgejoBar"},
+ {"PASCAL", "FooForgejoBar"},
+ {"LOWER", "foo_forgejo-bar"},
+ {"UPPER", "FOO_FORGEJO-BAR"},
+ {"TITLE", "Foo_forgejo-Bar"},
+ }
+
+ for i, tt := range tests {
+ t.Run(tt.name, func(t *testing.T) {
+ tranform := defaultTransformers[i]
+ assert.Equal(t, tt.name, tranform.Name)
+
+ got := tranform.Transform(input)
+ assert.Equal(t, tt.expected, got)
+ })
+ }
+}
diff --git a/services/repository/lfs.go b/services/repository/lfs.go
index 43acb8ee6c..2e090290a7 100644
--- a/services/repository/lfs.go
+++ b/services/repository/lfs.go
@@ -76,7 +76,7 @@ func GarbageCollectLFSMetaObjectsForRepo(ctx context.Context, repo *repo_model.R
err = git_model.IterateLFSMetaObjectsForRepo(ctx, repo.ID, func(ctx context.Context, metaObject *git_model.LFSMetaObject) error {
total++
- pointerSha := git.ComputeBlobHash(objectFormat, []byte(metaObject.Pointer.StringContent()))
+ pointerSha := git.ComputeBlobHash(objectFormat, []byte(metaObject.StringContent()))
if gitRepo.IsObjectExist(pointerSha.String()) {
return git_model.MarkLFSMetaObject(ctx, metaObject.ID)
diff --git a/services/repository/push.go b/services/repository/push.go
index 53574a7d93..eaedd80e1f 100644
--- a/services/repository/push.go
+++ b/services/repository/push.go
@@ -66,7 +66,7 @@ func PushUpdates(opts []*repo_module.PushUpdateOptions) error {
for _, opt := range opts {
if opt.IsNewRef() && opt.IsDelRef() {
- return fmt.Errorf("Old and new revisions are both NULL")
+ return errors.New("Old and new revisions are both NULL")
}
}
diff --git a/services/repository/repository.go b/services/repository/repository.go
index a2620740b1..41f3a96dd1 100644
--- a/services/repository/repository.go
+++ b/services/repository/repository.go
@@ -6,6 +6,7 @@ package repository
import (
"context"
+ "errors"
"fmt"
"forgejo.org/models/db"
@@ -72,10 +73,10 @@ func PushCreateRepo(ctx context.Context, authUser, owner *user_model.User, repoN
if ok, err := organization.CanCreateOrgRepo(ctx, owner.ID, authUser.ID); err != nil {
return nil, err
} else if !ok {
- return nil, fmt.Errorf("cannot push-create repository for org")
+ return nil, errors.New("cannot push-create repository for org")
}
} else if authUser.ID != owner.ID {
- return nil, fmt.Errorf("cannot push-create repository for another user")
+ return nil, errors.New("cannot push-create repository for another user")
}
}
diff --git a/services/repository/setting.go b/services/repository/setting.go
index c127f3129e..68cdfc370b 100644
--- a/services/repository/setting.go
+++ b/services/repository/setting.go
@@ -7,7 +7,6 @@ import (
"context"
"slices"
- actions_model "forgejo.org/models/actions"
"forgejo.org/models/db"
repo_model "forgejo.org/models/repo"
"forgejo.org/models/unit"
@@ -29,7 +28,7 @@ func UpdateRepositoryUnits(ctx context.Context, repo *repo_model.Repository, uni
}
if slices.Contains(deleteUnitTypes, unit.TypeActions) {
- if err := actions_model.CleanRepoScheduleTasks(ctx, repo, true); err != nil {
+ if err := actions_service.CleanRepoScheduleTasks(ctx, repo, true); err != nil {
log.Error("CleanRepoScheduleTasks: %v", err)
}
}
diff --git a/services/repository/sync_fork.go b/services/repository/sync_fork.go
new file mode 100644
index 0000000000..ebcac76136
--- /dev/null
+++ b/services/repository/sync_fork.go
@@ -0,0 +1,113 @@
+// Copyright 2025 The Forgejo Authors. All rights reserved.
+// SPDX-License-Identifier: MIT
+
+package repository
+
+import (
+ "context"
+ "fmt"
+ "slices"
+
+ git_model "forgejo.org/models/git"
+ repo_model "forgejo.org/models/repo"
+ user_model "forgejo.org/models/user"
+ "forgejo.org/modules/git"
+ repo_module "forgejo.org/modules/repository"
+ api "forgejo.org/modules/structs"
+)
+
+// SyncFork syncs a branch of a fork with the base repo
+func SyncFork(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, branch string) error {
+ err := repo.MustNotBeArchived()
+ if err != nil {
+ return err
+ }
+
+ err = repo.GetBaseRepo(ctx)
+ if err != nil {
+ return err
+ }
+
+ err = git.Push(ctx, repo.BaseRepo.RepoPath(), git.PushOptions{
+ Remote: repo.RepoPath(),
+ Branch: fmt.Sprintf("%s:%s", branch, branch),
+ Env: repo_module.PushingEnvironment(doer, repo),
+ })
+
+ return err
+}
+
+// CanSyncFork returns information about syncing a fork
+func GetSyncForkInfo(ctx context.Context, repo *repo_model.Repository, branch string) (*api.SyncForkInfo, error) {
+ info := new(api.SyncForkInfo)
+
+ if !repo.IsFork {
+ return info, nil
+ }
+
+ if repo.IsArchived {
+ return info, nil
+ }
+
+ err := repo.GetBaseRepo(ctx)
+ if err != nil {
+ return nil, err
+ }
+
+ forkBranch, err := git_model.GetBranch(ctx, repo.ID, branch)
+ if err != nil {
+ return nil, err
+ }
+
+ info.ForkCommit = forkBranch.CommitID
+
+ baseBranch, err := git_model.GetBranch(ctx, repo.BaseRepo.ID, branch)
+ if err != nil {
+ if git_model.IsErrBranchNotExist(err) {
+ // If the base repo don't have the branch, we don't need to continue
+ return info, nil
+ }
+ return nil, err
+ }
+
+ info.BaseCommit = baseBranch.CommitID
+
+ // If both branches has the same latest commit, we don't need to sync
+ if forkBranch.CommitID == baseBranch.CommitID {
+ return info, nil
+ }
+
+ // Check if the latest commit of the fork is also in the base
+ gitRepo, err := git.OpenRepository(ctx, repo.BaseRepo.RepoPath())
+ if err != nil {
+ return nil, err
+ }
+ defer gitRepo.Close()
+
+ commit, err := gitRepo.GetCommit(forkBranch.CommitID)
+ if err != nil {
+ if git.IsErrNotExist(err) {
+ return info, nil
+ }
+ return nil, err
+ }
+
+ branchList, err := commit.GetAllBranches()
+ if err != nil {
+ return nil, err
+ }
+
+ if !slices.Contains(branchList, branch) {
+ return info, nil
+ }
+
+ diff, err := git.GetDivergingCommits(ctx, repo.BaseRepo.RepoPath(), baseBranch.CommitID, forkBranch.CommitID, nil)
+ if err != nil {
+ return nil, err
+ }
+
+ info.Allowed = true
+ info.CommitsBehind = diff.Behind
+
+ return info, nil
+}
diff --git a/services/shared/automerge/automerge.go b/services/shared/automerge/automerge.go
index 1dc309f4b3..be7b2f6eb4 100644
--- a/services/shared/automerge/automerge.go
+++ b/services/shared/automerge/automerge.go
@@ -21,9 +21,9 @@ import (
var PRAutoMergeQueue *queue.WorkerPoolQueue[string]
func addToQueue(pr *issues_model.PullRequest, sha string) {
- log.Trace("Adding pullID: %d to the pull requests patch checking queue with sha %s", pr.ID, sha)
+ log.Trace("Adding pullID: %d to the automerge queue with sha %s", pr.ID, sha)
if err := PRAutoMergeQueue.Push(fmt.Sprintf("%d_%s", pr.ID, sha)); err != nil {
- log.Error("Error adding pullID: %d to the pull requests patch checking queue %v", pr.ID, err)
+ log.Error("Error adding pullID: %d to the automerge queue %v", pr.ID, err)
}
}
@@ -43,32 +43,29 @@ func StartPRCheckAndAutoMergeBySHA(ctx context.Context, sha string, repo *repo_m
return nil
}
-// StartPRCheckAndAutoMerge start an automerge check and auto merge task for a pull request
func StartPRCheckAndAutoMerge(ctx context.Context, pull *issues_model.PullRequest) {
if pull == nil || pull.HasMerged || !pull.CanAutoMerge() {
return
}
- if err := pull.LoadBaseRepo(ctx); err != nil {
- log.Error("LoadBaseRepo: %v", err)
- return
+ commitID := pull.HeadCommitID
+ if commitID == "" {
+ commitID = getCommitIDFromRefName(ctx, pull)
}
- gitRepo, err := gitrepo.OpenRepository(ctx, pull.BaseRepo)
- if err != nil {
- log.Error("OpenRepository: %v", err)
- return
- }
- defer gitRepo.Close()
- commitID, err := gitRepo.GetRefCommitID(pull.GetGitRefName())
- if err != nil {
- log.Error("GetRefCommitID: %v", err)
+ if commitID == "" {
return
}
addToQueue(pull, commitID)
}
+var AddToQueueIfMergeable = func(ctx context.Context, pull *issues_model.PullRequest) {
+ if pull.Status == issues_model.PullRequestStatusMergeable {
+ StartPRCheckAndAutoMerge(ctx, pull)
+ }
+}
+
func getPullRequestsByHeadSHA(ctx context.Context, sha string, repo *repo_model.Repository, filter func(*issues_model.PullRequest) bool) (map[int64]*issues_model.PullRequest, error) {
gitRepo, err := gitrepo.OpenRepository(ctx, repo)
if err != nil {
@@ -118,3 +115,24 @@ func getPullRequestsByHeadSHA(ctx context.Context, sha string, repo *repo_model.
return pulls, nil
}
+
+func getCommitIDFromRefName(ctx context.Context, pull *issues_model.PullRequest) string {
+ if err := pull.LoadBaseRepo(ctx); err != nil {
+ log.Error("LoadBaseRepo: %v", err)
+ return ""
+ }
+
+ gitRepo, err := gitrepo.OpenRepository(ctx, pull.BaseRepo)
+ if err != nil {
+ log.Error("OpenRepository: %v", err)
+ return ""
+ }
+ defer gitRepo.Close()
+ commitID, err := gitRepo.GetRefCommitID(pull.GetGitRefName())
+ if err != nil {
+ log.Error("GetRefCommitID: %v", err)
+ return ""
+ }
+
+ return commitID
+}
diff --git a/services/task/task.go b/services/task/task.go
index 3181fc79d7..f030bdb38c 100644
--- a/services/task/task.go
+++ b/services/task/task.go
@@ -5,6 +5,7 @@ package task
import (
"context"
+ "errors"
"fmt"
admin_model "forgejo.org/models/admin"
@@ -41,7 +42,7 @@ func Run(ctx context.Context, t *admin_model.Task) error {
func Init() error {
taskQueue = queue.CreateSimpleQueue(graceful.GetManager().ShutdownContext(), "task", handler)
if taskQueue == nil {
- return fmt.Errorf("unable to create task queue")
+ return errors.New("unable to create task queue")
}
go graceful.GetManager().RunWithCancel(taskQueue)
return nil
diff --git a/services/user/avatar_test.go b/services/user/avatar_test.go
index b208efeb6f..17132a74ab 100644
--- a/services/user/avatar_test.go
+++ b/services/user/avatar_test.go
@@ -42,7 +42,7 @@ func TestUserDeleteAvatar(t *testing.T) {
err := UploadAvatar(db.DefaultContext, user, buff.Bytes())
require.NoError(t, err)
verification := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
- assert.NotEqual(t, "", verification.Avatar)
+ assert.NotEmpty(t, verification.Avatar)
// fail to delete ...
storage.Avatars = storage.UninitializedStorage
@@ -60,7 +60,7 @@ func TestUserDeleteAvatar(t *testing.T) {
// ... the avatar is removed from the database
verification = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
- assert.Equal(t, "", verification.Avatar)
+ assert.Empty(t, verification.Avatar)
})
t.Run("Success", func(t *testing.T) {
@@ -70,12 +70,12 @@ func TestUserDeleteAvatar(t *testing.T) {
err := UploadAvatar(db.DefaultContext, user, buff.Bytes())
require.NoError(t, err)
verification := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
- assert.NotEqual(t, "", verification.Avatar)
+ assert.NotEmpty(t, verification.Avatar)
err = DeleteAvatar(db.DefaultContext, user)
require.NoError(t, err)
verification = unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
- assert.Equal(t, "", verification.Avatar)
+ assert.Empty(t, verification.Avatar)
})
}
diff --git a/services/user/delete.go b/services/user/delete.go
index 9ce917cd27..9caa24c373 100644
--- a/services/user/delete.go
+++ b/services/user/delete.go
@@ -1,4 +1,5 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
+// Copyright 2024 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package user
@@ -216,6 +217,11 @@ func deleteUser(ctx context.Context, u *user_model.User, purge bool) (err error)
}
// ***** END: ExternalLoginUser *****
+ // If the user was reported as abusive, a shadow copy should be created before deletion.
+ if err = user_model.IfNeededCreateShadowCopyForUser(ctx, u); err != nil {
+ return err
+ }
+
if _, err = db.DeleteByID[user_model.User](ctx, u.ID); err != nil {
return fmt.Errorf("delete: %w", err)
}
diff --git a/services/user/email.go b/services/user/email.go
index f49efde1be..7a01fa77b3 100644
--- a/services/user/email.go
+++ b/services/user/email.go
@@ -1,4 +1,5 @@
// Copyright 2024 The Gitea Authors. All rights reserved.
+// Copyright 2024 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package user
@@ -203,6 +204,11 @@ func MakeEmailAddressPrimary(ctx context.Context, u *user_model.User, newPrimary
oldPrimaryEmail := u.Email
+ // If the user was reported as abusive, a shadow copy should be created before first update (of certain columns).
+ if err = user_model.IfNeededCreateShadowCopyForUser(ctx, u, "email"); err != nil {
+ return err
+ }
+
// 1. Update user table
u.Email = newPrimaryEmail.Email
if _, err = sess.ID(u.ID).Cols("email").Update(u); err != nil {
diff --git a/services/user/user_test.go b/services/user/user_test.go
index 64a9ed2d23..4678d3bc9a 100644
--- a/services/user/user_test.go
+++ b/services/user/user_test.go
@@ -189,7 +189,7 @@ func TestRenameUser(t *testing.T) {
redirectUID, err := user_model.LookupUserRedirect(db.DefaultContext, oldUsername)
require.NoError(t, err)
- assert.EqualValues(t, user.ID, redirectUID)
+ assert.Equal(t, user.ID, redirectUID)
unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{OwnerID: user.ID, OwnerName: user.Name})
})
@@ -205,13 +205,13 @@ func TestRenameUser(t *testing.T) {
unittest.AssertExistsIf(t, true, &user_model.Redirect{LowerName: "user_rename"})
// The granularity of created_unix is a second.
- time.Sleep(time.Second)
+ test.SleepTillNextSecond()
require.NoError(t, RenameUser(db.DefaultContext, user, "redirect-2"))
unittest.AssertExistsIf(t, false, &user_model.Redirect{LowerName: "user_rename"})
unittest.AssertExistsIf(t, true, &user_model.Redirect{LowerName: "redirect-1"})
setting.Service.MaxUserRedirects = 2
- time.Sleep(time.Second)
+ test.SleepTillNextSecond()
require.NoError(t, RenameUser(db.DefaultContext, user, "redirect-3"))
unittest.AssertExistsIf(t, true, &user_model.Redirect{LowerName: "redirect-1"})
unittest.AssertExistsIf(t, true, &user_model.Redirect{LowerName: "redirect-2"})
diff --git a/services/webhook/default.go b/services/webhook/default.go
index 30717a7352..797b98f99a 100644
--- a/services/webhook/default.go
+++ b/services/webhook/default.go
@@ -36,8 +36,7 @@ func (dh defaultHandler) Type() webhook_module.HookType {
func (dh defaultHandler) Icon(size int) template.HTML {
if dh.forgejo {
- // forgejo.svg is not in web_src/svg/, so svg.RenderHTML does not work
- return shared.ImgIcon("forgejo.svg", size)
+ return svg.RenderHTML("gitea-forgejo", size, "img")
}
return svg.RenderHTML("gitea-gitea", size, "img")
}
diff --git a/services/webhook/default_test.go b/services/webhook/default_test.go
index f946870d57..fcef4612e1 100644
--- a/services/webhook/default_test.go
+++ b/services/webhook/default_test.go
@@ -237,7 +237,7 @@ func TestOpenProjectPayload(t *testing.T) {
assert.Equal(t, 12, j.Get("number").MustBeValid().ToInt())
assert.Equal(t, "http://localhost:3000/test/repo/pulls/12", j.Get("html_url").MustBeValid().ToString())
assert.Equal(t, jsoniter.NilValue, j.Get("updated_at").ValueType())
- assert.Equal(t, "", j.Get("state").MustBeValid().ToString())
+ assert.Empty(t, j.Get("state").MustBeValid().ToString())
assert.Equal(t, "Fix bug", j.Get("title").MustBeValid().ToString())
assert.Equal(t, "fixes bug #2", j.Get("body").MustBeValid().ToString())
diff --git a/services/webhook/deliver.go b/services/webhook/deliver.go
index 0c7c039f10..23aca80345 100644
--- a/services/webhook/deliver.go
+++ b/services/webhook/deliver.go
@@ -6,6 +6,7 @@ package webhook
import (
"context"
"crypto/tls"
+ "errors"
"fmt"
"io"
"net/http"
@@ -218,7 +219,7 @@ func Init() error {
hookQueue = queue.CreateUniqueQueue(graceful.GetManager().ShutdownContext(), "webhook_sender", handler)
if hookQueue == nil {
- return fmt.Errorf("unable to create webhook_sender queue")
+ return errors.New("unable to create webhook_sender queue")
}
go graceful.GetManager().RunWithCancel(hookQueue)
diff --git a/services/webhook/deliver_test.go b/services/webhook/deliver_test.go
index 4dd898e60a..1a9ce05de4 100644
--- a/services/webhook/deliver_test.go
+++ b/services/webhook/deliver_test.go
@@ -137,7 +137,7 @@ func TestWebhookDeliverHookTask(t *testing.T) {
case "/webhook/66d222a5d6349e1311f551e50722d837e30fce98":
// Version 1
assert.Equal(t, "push", r.Header.Get("X-GitHub-Event"))
- assert.Equal(t, "", r.Header.Get("Content-Type"))
+ assert.Empty(t, r.Header.Get("Content-Type"))
body, err := io.ReadAll(r.Body)
require.NoError(t, err)
assert.Equal(t, `{"data": 42}`, string(body))
diff --git a/services/webhook/dingtalk.go b/services/webhook/dingtalk.go
index 9d5c7e573f..ec53c79c2c 100644
--- a/services/webhook/dingtalk.go
+++ b/services/webhook/dingtalk.go
@@ -207,6 +207,12 @@ func (dc dingtalkConvertor) Package(p *api.PackagePayload) (DingtalkPayload, err
return createDingtalkPayload(text, text, "view package", p.Package.HTMLURL), nil
}
+func (dc dingtalkConvertor) Action(p *api.ActionPayload) (DingtalkPayload, error) {
+ text, _ := getActionPayloadInfo(p, noneLinkFormatter)
+
+ return createDingtalkPayload(text, text, "view action", p.Run.HTMLURL), nil
+}
+
func createDingtalkPayload(title, text, singleTitle, singleURL string) DingtalkPayload {
return DingtalkPayload{
MsgType: "actionCard",
diff --git a/services/webhook/discord.go b/services/webhook/discord.go
index 3970a2552d..7259c4a995 100644
--- a/services/webhook/discord.go
+++ b/services/webhook/discord.go
@@ -16,6 +16,7 @@ import (
"unicode/utf8"
webhook_model "forgejo.org/models/webhook"
+ "forgejo.org/modules/base"
"forgejo.org/modules/git"
"forgejo.org/modules/json"
"forgejo.org/modules/log"
@@ -151,6 +152,18 @@ var (
redColor = color("ff3232")
)
+// https://discord.com/developers/docs/resources/message#embed-object-embed-limits
+// Discord has some limits in place for the embeds.
+// According to some tests, there is no consistent limit for different character sets.
+// For example: 4096 ASCII letters are allowed, but only 2490 emoji characters are allowed.
+// To keep it simple, we currently truncate at 2000.
+const discordDescriptionCharactersLimit = 2000
+
+type discordConvertor struct {
+ Username string
+ AvatarURL string
+}
+
// Create implements PayloadConvertor Create method
func (d discordConvertor) Create(p *api.CreatePayload) (DiscordPayload, error) {
// created tag/branch
@@ -312,9 +325,10 @@ func (d discordConvertor) Package(p *api.PackagePayload) (DiscordPayload, error)
return d.createPayload(p.Sender, text, "", p.Package.HTMLURL, color), nil
}
-type discordConvertor struct {
- Username string
- AvatarURL string
+func (d discordConvertor) Action(p *api.ActionPayload) (DiscordPayload, error) {
+ text, color := getActionPayloadInfo(p, noneLinkFormatter)
+
+ return d.createPayload(p.Run.TriggerUser, text, "", p.Run.HTMLURL, color), nil
}
var _ shared.PayloadConvertor[DiscordPayload] = discordConvertor{}
@@ -336,7 +350,7 @@ func parseHookPullRequestEventType(event webhook_module.HookEventType) (string,
case webhook_module.HookEventPullRequestReviewApproved:
return "approved", nil
case webhook_module.HookEventPullRequestReviewRejected:
- return "rejected", nil
+ return "requested changes", nil
case webhook_module.HookEventPullRequestReviewComment:
return "comment", nil
default:
@@ -357,7 +371,7 @@ func (d discordConvertor) createPayload(s *api.User, title, text, url string, co
Embeds: []DiscordEmbed{
{
Title: title,
- Description: text,
+ Description: base.TruncateString(text, discordDescriptionCharactersLimit),
URL: url,
Color: color,
Author: DiscordEmbedAuthor{
diff --git a/services/webhook/discord_test.go b/services/webhook/discord_test.go
index ce3aaa10cf..b04be30bc6 100644
--- a/services/webhook/discord_test.go
+++ b/services/webhook/discord_test.go
@@ -175,7 +175,7 @@ func TestDiscordPayload(t *testing.T) {
require.NoError(t, err)
assert.Len(t, pl.Embeds, 1)
- assert.Len(t, pl.Embeds[0].Description, 4096)
+ assert.Len(t, pl.Embeds[0].Description, 2000)
})
t.Run("IssueComment", func(t *testing.T) {
diff --git a/services/webhook/feishu.go b/services/webhook/feishu.go
index 01b3d07983..57f2362783 100644
--- a/services/webhook/feishu.go
+++ b/services/webhook/feishu.go
@@ -191,6 +191,12 @@ func (fc feishuConvertor) Package(p *api.PackagePayload) (FeishuPayload, error)
return newFeishuTextPayload(text), nil
}
+func (fc feishuConvertor) Action(p *api.ActionPayload) (FeishuPayload, error) {
+ text, _ := getActionPayloadInfo(p, noneLinkFormatter)
+
+ return newFeishuTextPayload(text), nil
+}
+
type feishuConvertor struct{}
var _ shared.PayloadConvertor[FeishuPayload] = feishuConvertor{}
diff --git a/services/webhook/general.go b/services/webhook/general.go
index 40a2467177..c728b6ba1a 100644
--- a/services/webhook/general.go
+++ b/services/webhook/general.go
@@ -37,11 +37,12 @@ func getPullRequestInfo(p *api.PullRequestPayload) (title, link, by, operator, o
for i, user := range assignList {
assignStringList[i] = user.UserName
}
- if p.Action == api.HookIssueAssigned {
+ switch p.Action {
+ case api.HookIssueAssigned:
operateResult = fmt.Sprintf("%s assign this to %s", p.Sender.UserName, assignList[len(assignList)-1].UserName)
- } else if p.Action == api.HookIssueUnassigned {
+ case api.HookIssueUnassigned:
operateResult = fmt.Sprintf("%s unassigned this for someone", p.Sender.UserName)
- } else if p.Action == api.HookIssueMilestoned {
+ case api.HookIssueMilestoned:
operateResult = fmt.Sprintf("%s/milestone/%d", p.Repository.HTMLURL, p.PullRequest.Milestone.ID)
}
link = p.PullRequest.HTMLURL
@@ -62,11 +63,12 @@ func getIssuesInfo(p *api.IssuePayload) (issueTitle, link, by, operator, operate
for i, user := range assignList {
assignStringList[i] = user.UserName
}
- if p.Action == api.HookIssueAssigned {
+ switch p.Action {
+ case api.HookIssueAssigned:
operateResult = fmt.Sprintf("%s assign this to %s", p.Sender.UserName, assignList[len(assignList)-1].UserName)
- } else if p.Action == api.HookIssueUnassigned {
+ case api.HookIssueUnassigned:
operateResult = fmt.Sprintf("%s unassigned this for someone", p.Sender.UserName)
- } else if p.Action == api.HookIssueMilestoned {
+ case api.HookIssueMilestoned:
operateResult = fmt.Sprintf("%s/milestone/%d", p.Repository.HTMLURL, p.Issue.Milestone.ID)
}
link = p.Issue.HTMLURL
@@ -302,6 +304,25 @@ func getPackagePayloadInfo(p *api.PackagePayload, linkFormatter linkFormatter, w
return text, color
}
+func getActionPayloadInfo(p *api.ActionPayload, linkFormatter linkFormatter) (text string, color int) {
+ runLink := linkFormatter(p.Run.HTMLURL, p.Run.Title)
+ repoLink := linkFormatter(p.Run.Repo.HTMLURL, p.Run.Repo.FullName)
+
+ switch p.Action {
+ case api.HookActionFailure:
+ text = fmt.Sprintf("%s Action Failed in %s %s", runLink, repoLink, p.Run.PrettyRef)
+ color = redColor
+ case api.HookActionRecover:
+ text = fmt.Sprintf("%s Action Recovered in %s %s", runLink, repoLink, p.Run.PrettyRef)
+ color = greenColor
+ case api.HookActionSuccess:
+ text = fmt.Sprintf("%s Action Succeeded in %s %s", runLink, repoLink, p.Run.PrettyRef)
+ color = greenColor
+ }
+
+ return text, color
+}
+
// ToHook convert models.Webhook to api.Hook
// This function is not part of the convert package to prevent an import cycle
func ToHook(repoLink string, w *webhook_model.Webhook) (*api.Hook, error) {
diff --git a/services/webhook/general_test.go b/services/webhook/general_test.go
index b321fb3f8c..10c779742d 100644
--- a/services/webhook/general_test.go
+++ b/services/webhook/general_test.go
@@ -270,6 +270,22 @@ func pullReleaseTestPayload() *api.ReleasePayload {
}
}
+func ActionTestPayload() *api.ActionPayload {
+ // this is not a complete action payload but enough for testing purposes
+ return &api.ActionPayload{
+ Run: &api.ActionRun{
+ Repo: &api.Repository{
+ HTMLURL: "http://localhost:3000/test/repo",
+ Name: "repo",
+ FullName: "test/repo",
+ },
+ PrettyRef: "main",
+ HTMLURL: "http://localhost:3000/test/repo/actions/runs/69",
+ Title: "Build release",
+ },
+ }
+}
+
func pullRequestTestPayload() *api.PullRequestPayload {
return &api.PullRequestPayload{
Action: api.HookIssueOpened,
@@ -675,3 +691,36 @@ func TestGetIssueCommentPayloadInfo(t *testing.T) {
assert.Equal(t, c.color, color, "case %d", i)
}
}
+
+func TestGetActionPayloadInfo(t *testing.T) {
+ p := ActionTestPayload()
+
+ cases := []struct {
+ action api.HookActionAction
+ text string
+ color int
+ }{
+ {
+ api.HookActionFailure,
+ "Build release Action Failed in test/repo main",
+ redColor,
+ },
+ {
+ api.HookActionSuccess,
+ "Build release Action Succeeded in test/repo main",
+ greenColor,
+ },
+ {
+ api.HookActionRecover,
+ "Build release Action Recovered in test/repo main",
+ greenColor,
+ },
+ }
+
+ for i, c := range cases {
+ p.Action = c.action
+ text, color := getActionPayloadInfo(p, noneLinkFormatter)
+ assert.Equal(t, c.text, text, "case %d", i)
+ assert.Equal(t, c.color, color, "case %d", i)
+ }
+}
diff --git a/services/webhook/matrix.go b/services/webhook/matrix.go
index f1cc9384d3..bdb0c292ab 100644
--- a/services/webhook/matrix.go
+++ b/services/webhook/matrix.go
@@ -273,6 +273,12 @@ func (m matrixConvertor) Package(p *api.PackagePayload) (MatrixPayload, error) {
return m.newPayload(text)
}
+func (m matrixConvertor) Action(p *api.ActionPayload) (MatrixPayload, error) {
+ text, _ := getActionPayloadInfo(p, htmlLinkFormatter)
+
+ return m.newPayload(text)
+}
+
var urlRegex = regexp.MustCompile(`]*?href="([^">]*?)">(.*?)`)
func getMessageBody(htmlText string) string {
diff --git a/services/webhook/msteams.go b/services/webhook/msteams.go
index 1ed03afd26..3b35c407e1 100644
--- a/services/webhook/msteams.go
+++ b/services/webhook/msteams.go
@@ -326,6 +326,23 @@ func (m msteamsConvertor) Package(p *api.PackagePayload) (MSTeamsPayload, error)
), nil
}
+func (m msteamsConvertor) Action(p *api.ActionPayload) (MSTeamsPayload, error) {
+ title, color := getActionPayloadInfo(p, noneLinkFormatter)
+
+ // TODO: is TriggerUser correct here?
+ // if you'd like to test these proprietary services, see the discussion on: https://codeberg.org/forgejo/forgejo/pulls/7508
+ return createMSTeamsPayload(
+ p.Run.Repo,
+ p.Run.TriggerUser,
+ title,
+ "",
+ p.Run.HTMLURL,
+ color,
+ // TODO: does this make any sense?
+ &MSTeamsFact{"Action:", p.Run.Title},
+ ), nil
+}
+
func createMSTeamsPayload(r *api.Repository, s *api.User, title, text, actionTarget string, color int, fact *MSTeamsFact) MSTeamsPayload {
facts := make([]MSTeamsFact, 0, 2)
if r != nil {
diff --git a/services/webhook/msteams_test.go b/services/webhook/msteams_test.go
index b210f299bc..da6439f198 100644
--- a/services/webhook/msteams_test.go
+++ b/services/webhook/msteams_test.go
@@ -335,7 +335,7 @@ func TestMSTeamsPayload(t *testing.T) {
assert.Equal(t, "[test/repo] New wiki page 'index' (Wiki change comment)", pl.Summary)
assert.Len(t, pl.Sections, 1)
assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
- assert.Equal(t, "", pl.Sections[0].Text)
+ assert.Empty(t, pl.Sections[0].Text)
assert.Len(t, pl.Sections[0].Facts, 2)
for _, fact := range pl.Sections[0].Facts {
if fact.Name == "Repository:" {
@@ -356,7 +356,7 @@ func TestMSTeamsPayload(t *testing.T) {
assert.Equal(t, "[test/repo] Wiki page 'index' edited (Wiki change comment)", pl.Summary)
assert.Len(t, pl.Sections, 1)
assert.Equal(t, "user1", pl.Sections[0].ActivitySubtitle)
- assert.Equal(t, "", pl.Sections[0].Text)
+ assert.Empty(t, pl.Sections[0].Text)
assert.Len(t, pl.Sections[0].Facts, 2)
for _, fact := range pl.Sections[0].Facts {
if fact.Name == "Repository:" {
diff --git a/services/webhook/notifier.go b/services/webhook/notifier.go
index e9fd52c940..009efc994f 100644
--- a/services/webhook/notifier.go
+++ b/services/webhook/notifier.go
@@ -6,6 +6,7 @@ package webhook
import (
"context"
+ actions_model "forgejo.org/models/actions"
issues_model "forgejo.org/models/issues"
packages_model "forgejo.org/models/packages"
"forgejo.org/models/perm"
@@ -887,6 +888,45 @@ func (m *webhookNotifier) PackageDelete(ctx context.Context, doer *user_model.Us
notifyPackage(ctx, doer, pd, api.HookPackageDeleted)
}
+func (m *webhookNotifier) ActionRunNowDone(ctx context.Context, run *actions_model.ActionRun, priorStatus actions_model.Status, lastRun *actions_model.ActionRun) {
+ source := EventSource{
+ Repository: run.Repo,
+ Owner: run.TriggerUser,
+ }
+
+ // The doer is the one whose perspective is used to view this ActionRun.
+ // In the best case we use the user that created the webhook.
+ // Unfortunately we don't know who that was.
+ // So instead we use the repo owner, who is able to create webhooks and allow others to do so by making them repo admins.
+ // This is pretty close to perfect.
+ doer := run.Repo.Owner
+
+ payload := &api.ActionPayload{
+ Run: convert.ToActionRun(ctx, run, doer),
+ LastRun: convert.ToActionRun(ctx, lastRun, doer),
+ PriorStatus: priorStatus.String(),
+ }
+
+ if run.Status.IsSuccess() {
+ payload.Action = api.HookActionSuccess
+ if err := PrepareWebhooks(ctx, source, webhook_module.HookEventActionRunSuccess, payload); err != nil {
+ log.Error("PrepareWebhooks: %v", err)
+ }
+ // send another event when this is a recover
+ if lastRun != nil && !lastRun.Status.IsSuccess() {
+ payload.Action = api.HookActionRecover
+ if err := PrepareWebhooks(ctx, source, webhook_module.HookEventActionRunRecover, payload); err != nil {
+ log.Error("PrepareWebhooks: %v", err)
+ }
+ }
+ } else {
+ payload.Action = api.HookActionFailure
+ if err := PrepareWebhooks(ctx, source, webhook_module.HookEventActionRunFailure, payload); err != nil {
+ log.Error("PrepareWebhooks: %v", err)
+ }
+ }
+}
+
func notifyPackage(ctx context.Context, sender *user_model.User, pd *packages_model.PackageDescriptor, action api.HookPackageAction) {
source := EventSource{
Repository: pd.Repository,
diff --git a/services/webhook/notifier_test.go b/services/webhook/notifier_test.go
index 46eb1f089c..a810de91c1 100644
--- a/services/webhook/notifier_test.go
+++ b/services/webhook/notifier_test.go
@@ -6,6 +6,7 @@ package webhook
import (
"testing"
+ actions_model "forgejo.org/models/actions"
"forgejo.org/models/db"
repo_model "forgejo.org/models/repo"
"forgejo.org/models/unittest"
@@ -13,10 +14,12 @@ import (
webhook_model "forgejo.org/models/webhook"
"forgejo.org/modules/git"
"forgejo.org/modules/json"
+ "forgejo.org/modules/log"
"forgejo.org/modules/repository"
"forgejo.org/modules/setting"
"forgejo.org/modules/structs"
"forgejo.org/modules/test"
+ webhook_module "forgejo.org/modules/webhook"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
@@ -83,7 +86,7 @@ func TestSyncPushCommits(t *testing.T) {
var payloadContent structs.PushPayload
require.NoError(t, json.Unmarshal([]byte(hookTask.PayloadContent), &payloadContent))
assert.Len(t, payloadContent.Commits, 1)
- assert.EqualValues(t, "2c54faec6c45d31c1abfaecdab471eac6633738a", payloadContent.Commits[0].ID)
+ assert.Equal(t, "2c54faec6c45d31c1abfaecdab471eac6633738a", payloadContent.Commits[0].ID)
})
}
@@ -116,6 +119,193 @@ func TestPushCommits(t *testing.T) {
var payloadContent structs.PushPayload
require.NoError(t, json.Unmarshal([]byte(hookTask.PayloadContent), &payloadContent))
assert.Len(t, payloadContent.Commits, 1)
- assert.EqualValues(t, "2c54faec6c45d31c1abfaecdab471eac6633738a", payloadContent.Commits[0].ID)
+ assert.Equal(t, "2c54faec6c45d31c1abfaecdab471eac6633738a", payloadContent.Commits[0].ID)
+ })
+}
+
+func assertActionEqual(t *testing.T, expectedRun *actions_model.ActionRun, actualRun *structs.ActionRun) {
+ assert.NotNil(t, expectedRun)
+ assert.NotNil(t, actualRun)
+ // only test a few things
+ assert.Equal(t, expectedRun.ID, actualRun.ID)
+ assert.Equal(t, expectedRun.Status.String(), actualRun.Status)
+ assert.Equal(t, expectedRun.Index, actualRun.Index)
+ assert.Equal(t, expectedRun.RepoID, actualRun.Repo.ID)
+ // convert to unix because of time zones
+ assert.Equal(t, expectedRun.Stopped.AsTime().Unix(), actualRun.Stopped.Unix())
+ assert.Equal(t, expectedRun.Title, actualRun.Title)
+ assert.Equal(t, expectedRun.WorkflowID, actualRun.WorkflowID)
+}
+
+func TestAction(t *testing.T) {
+ defer unittest.OverrideFixtures("services/webhook/TestPushCommits")()
+ require.NoError(t, unittest.PrepareTestDatabase())
+
+ triggerUser := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
+ repo := unittest.AssertExistsAndLoadBean(t, &repo_model.Repository{ID: 2, OwnerID: triggerUser.ID})
+
+ oldSuccessRun := &actions_model.ActionRun{
+ ID: 1,
+ Status: actions_model.StatusSuccess,
+ Index: 1,
+ RepoID: repo.ID,
+ Stopped: 1693648027,
+ WorkflowID: "some_workflow",
+ Title: "oldSuccessRun",
+ TriggerUser: triggerUser,
+ TriggerUserID: triggerUser.ID,
+ TriggerEvent: "push",
+ }
+ oldSuccessRun.LoadAttributes(db.DefaultContext)
+ oldFailureRun := &actions_model.ActionRun{
+ ID: 1,
+ Status: actions_model.StatusFailure,
+ Index: 1,
+ RepoID: repo.ID,
+ Stopped: 1693648027,
+ WorkflowID: "some_workflow",
+ Title: "oldFailureRun",
+ TriggerUser: triggerUser,
+ TriggerUserID: triggerUser.ID,
+ TriggerEvent: "push",
+ }
+ oldFailureRun.LoadAttributes(db.DefaultContext)
+ newSuccessRun := &actions_model.ActionRun{
+ ID: 1,
+ Status: actions_model.StatusSuccess,
+ Index: 1,
+ RepoID: repo.ID,
+ Stopped: 1693648327,
+ WorkflowID: "some_workflow",
+ Title: "newSuccessRun",
+ TriggerUser: triggerUser,
+ TriggerUserID: triggerUser.ID,
+ TriggerEvent: "push",
+ }
+ newSuccessRun.LoadAttributes(db.DefaultContext)
+ newFailureRun := &actions_model.ActionRun{
+ ID: 1,
+ Status: actions_model.StatusFailure,
+ Index: 1,
+ RepoID: repo.ID,
+ Stopped: 1693648327,
+ WorkflowID: "some_workflow",
+ Title: "newFailureRun",
+ TriggerUser: triggerUser,
+ TriggerUserID: triggerUser.ID,
+ TriggerEvent: "push",
+ }
+ newFailureRun.LoadAttributes(db.DefaultContext)
+
+ t.Run("Successful Run after Nothing", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.Webhook.PayloadCommitLimit, 10)()
+
+ NewNotifier().ActionRunNowDone(db.DefaultContext, newSuccessRun, actions_model.StatusWaiting, nil)
+
+ // there's only one of these at the time
+ hookTask := unittest.AssertExistsAndLoadBean(t, &webhook_model.HookTask{}, unittest.Cond("event_type == 'action_run_success' AND payload_content LIKE '%success%newSuccessRun%'"))
+ assert.Equal(t, webhook_module.HookEventActionRunSuccess, hookTask.EventType)
+
+ var payloadContent structs.ActionPayload
+ require.NoError(t, json.Unmarshal([]byte(hookTask.PayloadContent), &payloadContent))
+ assert.Equal(t, structs.HookActionSuccess, payloadContent.Action)
+ assert.Equal(t, actions_model.StatusWaiting.String(), payloadContent.PriorStatus)
+ assertActionEqual(t, newSuccessRun, payloadContent.Run)
+ assert.Nil(t, payloadContent.LastRun)
+ })
+
+ t.Run("Successful Run after Failure", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.Webhook.PayloadCommitLimit, 10)()
+
+ NewNotifier().ActionRunNowDone(db.DefaultContext, newSuccessRun, actions_model.StatusWaiting, oldFailureRun)
+
+ {
+ hookTask := unittest.AssertExistsAndLoadBean(t, &webhook_model.HookTask{}, unittest.Cond("event_type == 'action_run_success' AND payload_content LIKE '%success%newSuccessRun%oldFailureRun%'"))
+ assert.Equal(t, webhook_module.HookEventActionRunSuccess, hookTask.EventType)
+
+ var payloadContent structs.ActionPayload
+ require.NoError(t, json.Unmarshal([]byte(hookTask.PayloadContent), &payloadContent))
+ assert.Equal(t, structs.HookActionSuccess, payloadContent.Action)
+ assert.Equal(t, actions_model.StatusWaiting.String(), payloadContent.PriorStatus)
+ assertActionEqual(t, newSuccessRun, payloadContent.Run)
+ assertActionEqual(t, oldFailureRun, payloadContent.LastRun)
+ }
+ {
+ hookTask := unittest.AssertExistsAndLoadBean(t, &webhook_model.HookTask{}, unittest.Cond("event_type == 'action_run_recover' AND payload_content LIKE '%recover%newSuccessRun%oldFailureRun%'"))
+ assert.Equal(t, webhook_module.HookEventActionRunRecover, hookTask.EventType)
+
+ log.Error("something: %s", hookTask.PayloadContent)
+ var payloadContent structs.ActionPayload
+ require.NoError(t, json.Unmarshal([]byte(hookTask.PayloadContent), &payloadContent))
+ assert.Equal(t, structs.HookActionRecover, payloadContent.Action)
+ assert.Equal(t, actions_model.StatusWaiting.String(), payloadContent.PriorStatus)
+ assertActionEqual(t, newSuccessRun, payloadContent.Run)
+ assertActionEqual(t, oldFailureRun, payloadContent.LastRun)
+ }
+ })
+
+ t.Run("Successful Run after Success", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.Webhook.PayloadCommitLimit, 10)()
+
+ NewNotifier().ActionRunNowDone(db.DefaultContext, newSuccessRun, actions_model.StatusWaiting, oldSuccessRun)
+
+ hookTask := unittest.AssertExistsAndLoadBean(t, &webhook_model.HookTask{}, unittest.Cond("event_type == 'action_run_success' AND payload_content LIKE '%success%newSuccessRun%oldSuccessRun%'"))
+ assert.Equal(t, webhook_module.HookEventActionRunSuccess, hookTask.EventType)
+
+ var payloadContent structs.ActionPayload
+ require.NoError(t, json.Unmarshal([]byte(hookTask.PayloadContent), &payloadContent))
+ assert.Equal(t, structs.HookActionSuccess, payloadContent.Action)
+ assert.Equal(t, actions_model.StatusWaiting.String(), payloadContent.PriorStatus)
+ assertActionEqual(t, newSuccessRun, payloadContent.Run)
+ assertActionEqual(t, oldSuccessRun, payloadContent.LastRun)
+ })
+
+ t.Run("Failed Run after Nothing", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.Webhook.PayloadCommitLimit, 10)()
+
+ NewNotifier().ActionRunNowDone(db.DefaultContext, newFailureRun, actions_model.StatusWaiting, nil)
+
+ // there should only be this one at the time
+ hookTask := unittest.AssertExistsAndLoadBean(t, &webhook_model.HookTask{}, unittest.Cond("event_type == 'action_run_failure' AND payload_content LIKE '%failure%newFailureRun%'"))
+ assert.Equal(t, webhook_module.HookEventActionRunFailure, hookTask.EventType)
+
+ var payloadContent structs.ActionPayload
+ require.NoError(t, json.Unmarshal([]byte(hookTask.PayloadContent), &payloadContent))
+ assert.Equal(t, structs.HookActionFailure, payloadContent.Action)
+ assert.Equal(t, actions_model.StatusWaiting.String(), payloadContent.PriorStatus)
+ assertActionEqual(t, newFailureRun, payloadContent.Run)
+ assert.Nil(t, payloadContent.LastRun)
+ })
+
+ t.Run("Failed Run after Failure", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.Webhook.PayloadCommitLimit, 10)()
+
+ NewNotifier().ActionRunNowDone(db.DefaultContext, newFailureRun, actions_model.StatusWaiting, oldFailureRun)
+
+ hookTask := unittest.AssertExistsAndLoadBean(t, &webhook_model.HookTask{}, unittest.Cond("event_type == 'action_run_failure' AND payload_content LIKE '%failure%newFailureRun%oldFailureRun%'"))
+ assert.Equal(t, webhook_module.HookEventActionRunFailure, hookTask.EventType)
+
+ var payloadContent structs.ActionPayload
+ require.NoError(t, json.Unmarshal([]byte(hookTask.PayloadContent), &payloadContent))
+ assert.Equal(t, structs.HookActionFailure, payloadContent.Action)
+ assert.Equal(t, actions_model.StatusWaiting.String(), payloadContent.PriorStatus)
+ assertActionEqual(t, newFailureRun, payloadContent.Run)
+ assertActionEqual(t, oldFailureRun, payloadContent.LastRun)
+ })
+
+ t.Run("Failed Run after Success", func(t *testing.T) {
+ defer test.MockVariableValue(&setting.Webhook.PayloadCommitLimit, 10)()
+
+ NewNotifier().ActionRunNowDone(db.DefaultContext, newFailureRun, actions_model.StatusWaiting, oldSuccessRun)
+
+ hookTask := unittest.AssertExistsAndLoadBean(t, &webhook_model.HookTask{}, unittest.Cond("event_type == 'action_run_failure' AND payload_content LIKE '%failure%newFailureRun%oldSuccessRun%'"))
+ assert.Equal(t, webhook_module.HookEventActionRunFailure, hookTask.EventType)
+
+ var payloadContent structs.ActionPayload
+ require.NoError(t, json.Unmarshal([]byte(hookTask.PayloadContent), &payloadContent))
+ assert.Equal(t, structs.HookActionFailure, payloadContent.Action)
+ assert.Equal(t, actions_model.StatusWaiting.String(), payloadContent.PriorStatus)
+ assertActionEqual(t, newFailureRun, payloadContent.Run)
+ assertActionEqual(t, oldSuccessRun, payloadContent.LastRun)
})
}
diff --git a/services/webhook/shared/payloader.go b/services/webhook/shared/payloader.go
index 0a6535eddb..e3be4c4b4c 100644
--- a/services/webhook/shared/payloader.go
+++ b/services/webhook/shared/payloader.go
@@ -36,6 +36,7 @@ type PayloadConvertor[T any] interface {
Release(*api.ReleasePayload) (T, error)
Wiki(*api.WikiPayload) (T, error)
Package(*api.PackagePayload) (T, error)
+ Action(*api.ActionPayload) (T, error)
}
func convertUnmarshalledJSON[T, P any](convert func(P) (T, error), data []byte) (T, error) {
@@ -86,6 +87,8 @@ func NewPayload[T any](rc PayloadConvertor[T], data []byte, event webhook_module
return convertUnmarshalledJSON(rc.Wiki, data)
case webhook_module.HookEventPackage:
return convertUnmarshalledJSON(rc.Package, data)
+ case webhook_module.HookEventActionRunFailure, webhook_module.HookEventActionRunRecover, webhook_module.HookEventActionRunSuccess:
+ return convertUnmarshalledJSON(rc.Action, data)
}
var t T
return t, fmt.Errorf("newPayload unsupported event: %s", event)
diff --git a/services/webhook/slack.go b/services/webhook/slack.go
index e854f89c6c..8c61e7ba25 100644
--- a/services/webhook/slack.go
+++ b/services/webhook/slack.go
@@ -142,6 +142,7 @@ func SlackLinkToRef(repoURL, ref string) string {
return SlackLinkFormatter(url, refName)
}
+// TODO: fix spelling to Converter
// Create implements payloadConvertor Create method
func (s slackConvertor) Create(p *api.CreatePayload) (SlackPayload, error) {
refLink := SlackLinkToRef(p.Repo.HTMLURL, p.Ref)
@@ -311,6 +312,12 @@ func (s slackConvertor) Repository(p *api.RepositoryPayload) (SlackPayload, erro
return s.createPayload(text, nil), nil
}
+func (s slackConvertor) Action(p *api.ActionPayload) (SlackPayload, error) {
+ text, _ := getActionPayloadInfo(p, SlackLinkFormatter)
+
+ return s.createPayload(text, nil), nil
+}
+
func (s slackConvertor) createPayload(text string, attachments []SlackAttachment) SlackPayload {
return SlackPayload{
Channel: s.Channel,
diff --git a/services/webhook/sourcehut/builds.go b/services/webhook/sourcehut/builds.go
index bd3eeebc6c..2593afb0b2 100644
--- a/services/webhook/sourcehut/builds.go
+++ b/services/webhook/sourcehut/builds.go
@@ -190,6 +190,10 @@ func (pc sourcehutConvertor) Package(_ *api.PackagePayload) (graphqlPayload[buil
return graphqlPayload[buildsVariables]{}, shared.ErrPayloadTypeNotSupported
}
+func (pc sourcehutConvertor) Action(_ *api.ActionPayload) (graphqlPayload[buildsVariables], error) {
+ return graphqlPayload[buildsVariables]{}, shared.ErrPayloadTypeNotSupported
+}
+
// newPayload opens and adjusts the manifest to submit to the builds service
//
// in case of an error the Error field will be set, to be visible by the end-user under recent deliveries
diff --git a/services/webhook/telegram.go b/services/webhook/telegram.go
index d0abd667f4..e6897f68bc 100644
--- a/services/webhook/telegram.go
+++ b/services/webhook/telegram.go
@@ -205,6 +205,12 @@ func (t telegramConvertor) Package(p *api.PackagePayload) (TelegramPayload, erro
return createTelegramPayload(text), nil
}
+func (telegramConvertor) Action(p *api.ActionPayload) (TelegramPayload, error) {
+ text, _ := getActionPayloadInfo(p, htmlLinkFormatter)
+
+ return createTelegramPayload(text), nil
+}
+
func createTelegramPayload(message string) TelegramPayload {
return TelegramPayload{
Message: markup.Sanitize(strings.TrimSpace(message)),
diff --git a/services/webhook/webhook.go b/services/webhook/webhook.go
index 989b535564..ecbbfcfbd6 100644
--- a/services/webhook/webhook.go
+++ b/services/webhook/webhook.go
@@ -103,7 +103,7 @@ type EventSource struct {
Owner *user_model.User
}
-// handle delivers hook tasks
+// handler delivers hook tasks
func handler(items ...int64) []int64 {
ctx := graceful.GetManager().HammerContext()
diff --git a/services/webhook/webhook_test.go b/services/webhook/webhook_test.go
index c9af09d3e9..15cb8f620c 100644
--- a/services/webhook/webhook_test.go
+++ b/services/webhook/webhook_test.go
@@ -14,6 +14,7 @@ import (
webhook_model "forgejo.org/models/webhook"
"forgejo.org/modules/setting"
api "forgejo.org/modules/structs"
+ "forgejo.org/modules/test"
webhook_module "forgejo.org/modules/webhook"
"forgejo.org/services/convert"
@@ -104,7 +105,8 @@ func TestPrepareWebhooksBranchFilterNoMatch(t *testing.T) {
func TestWebhookUserMail(t *testing.T) {
require.NoError(t, unittest.PrepareTestDatabase())
- setting.Service.NoReplyAddress = "no-reply.com"
+ defer test.MockVariableValue(&setting.Service.NoReplyAddress, "no-reply.com")()
+
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
assert.Equal(t, user.GetPlaceholderEmail(), convert.ToUser(db.DefaultContext, user, nil).Email)
assert.Equal(t, user.Email, convert.ToUser(db.DefaultContext, user, user).Email)
diff --git a/services/webhook/wechatwork.go b/services/webhook/wechatwork.go
index 323d23aba7..5c765b0754 100644
--- a/services/webhook/wechatwork.go
+++ b/services/webhook/wechatwork.go
@@ -201,6 +201,12 @@ func (wc wechatworkConvertor) Package(p *api.PackagePayload) (WechatworkPayload,
return newWechatworkMarkdownPayload(text), nil
}
+func (wc wechatworkConvertor) Action(p *api.ActionPayload) (WechatworkPayload, error) {
+ text, _ := getActionPayloadInfo(p, noneLinkFormatter)
+
+ return newWechatworkMarkdownPayload(text), nil
+}
+
type wechatworkConvertor struct{}
var _ shared.PayloadConvertor[WechatworkPayload] = wechatworkConvertor{}
diff --git a/services/wiki/wiki_test.go b/services/wiki/wiki_test.go
index d76104dfc7..cb984425af 100644
--- a/services/wiki/wiki_test.go
+++ b/services/wiki/wiki_test.go
@@ -26,7 +26,7 @@ func TestMain(m *testing.M) {
func TestWebPathSegments(t *testing.T) {
a := WebPathSegments("a%2Fa/b+c/d-e/f-g.-")
- assert.EqualValues(t, []string{"a/a", "b c", "d e", "f-g"}, a)
+ assert.Equal(t, []string{"a/a", "b c", "d e", "f-g"}, a)
}
func TestUserTitleToWebPath(t *testing.T) {
@@ -63,7 +63,7 @@ func TestWebPathToDisplayName(t *testing.T) {
{"a b", "a%20b.md"},
} {
_, displayName := WebPathToUserTitle(test.WebPath)
- assert.EqualValues(t, test.Expected, displayName)
+ assert.Equal(t, test.Expected, displayName)
}
}
@@ -80,7 +80,7 @@ func TestWebPathToGitPath(t *testing.T) {
{"2000-01-02-meeting.md", "2000-01-02+meeting"},
{"2000-01-02 meeting.-.md", "2000-01-02%20meeting.-"},
} {
- assert.EqualValues(t, test.Expected, WebPathToGitPath(test.WikiName))
+ assert.Equal(t, test.Expected, WebPathToGitPath(test.WikiName))
}
}
@@ -134,9 +134,9 @@ func TestUserWebGitPathConsistency(t *testing.T) {
_, userTitle1 := WebPathToUserTitle(webPath1)
gitPath1 := WebPathToGitPath(webPath1)
- assert.EqualValues(t, userTitle, userTitle1, "UserTitle for userTitle: %q", userTitle)
- assert.EqualValues(t, webPath, webPath1, "WebPath for userTitle: %q", userTitle)
- assert.EqualValues(t, gitPath, gitPath1, "GitPath for userTitle: %q", userTitle)
+ assert.Equal(t, userTitle, userTitle1, "UserTitle for userTitle: %q", userTitle)
+ assert.Equal(t, webPath, webPath1, "WebPath for userTitle: %q", userTitle)
+ assert.Equal(t, gitPath, gitPath1, "GitPath for userTitle: %q", userTitle)
}
}
@@ -175,7 +175,7 @@ func TestRepository_AddWikiPage(t *testing.T) {
gitPath := WebPathToGitPath(webPath)
entry, err := masterTree.GetTreeEntryByPath(gitPath)
require.NoError(t, err)
- assert.EqualValues(t, gitPath, entry.Name(), "%s not added correctly", userTitle)
+ assert.Equal(t, gitPath, entry.Name(), "%s not added correctly", userTitle)
})
}
@@ -220,7 +220,7 @@ func TestRepository_EditWikiPage(t *testing.T) {
gitPath := WebPathToGitPath(webPath)
entry, err := masterTree.GetTreeEntryByPath(gitPath)
require.NoError(t, err)
- assert.EqualValues(t, gitPath, entry.Name(), "%s not edited correctly", newWikiName)
+ assert.Equal(t, gitPath, entry.Name(), "%s not edited correctly", newWikiName)
if newWikiName != "Home" {
_, err := masterTree.GetTreeEntryByPath("Home.md")
@@ -284,12 +284,12 @@ func TestPrepareWikiFileName(t *testing.T) {
}
if existence != tt.existence {
if existence {
- t.Errorf("expect to find no escaped file but we detect one")
+ t.Error("expect to find no escaped file but we detect one")
} else {
- t.Errorf("expect to find an escaped file but we could not detect one")
+ t.Error("expect to find an escaped file but we could not detect one")
}
}
- assert.EqualValues(t, tt.wikiPath, newWikiPath)
+ assert.Equal(t, tt.wikiPath, newWikiPath)
})
}
}
@@ -311,13 +311,13 @@ func TestPrepareWikiFileName_FirstPage(t *testing.T) {
existence, newWikiPath, err := prepareGitPath(gitRepo, "master", "Home")
assert.False(t, existence)
require.NoError(t, err)
- assert.EqualValues(t, "Home.md", newWikiPath)
+ assert.Equal(t, "Home.md", newWikiPath)
}
func TestWebPathConversion(t *testing.T) {
assert.Equal(t, "path/wiki", WebPathToURLPath(WebPath("path/wiki")))
assert.Equal(t, "wiki", WebPathToURLPath(WebPath("wiki")))
- assert.Equal(t, "", WebPathToURLPath(WebPath("")))
+ assert.Empty(t, WebPathToURLPath(WebPath("")))
}
func TestWebPathFromRequest(t *testing.T) {
diff --git a/shell.nix b/shell.nix
new file mode 100644
index 0000000000..cfd555fa37
--- /dev/null
+++ b/shell.nix
@@ -0,0 +1,28 @@
+{
+ pkgs ? import { },
+}:
+
+pkgs.mkShell {
+ name = "forgejo";
+ nativeBuildInputs = with pkgs; [
+ # generic
+ git
+ git-lfs
+ gnumake
+ gnused
+ gnutar
+ gzip
+
+ # frontend
+ nodejs
+
+ # backend
+ gofumpt
+ sqlite
+ go
+ gopls
+
+ # tests
+ openssh
+ ];
+}
diff --git a/templates/admin/config.tmpl b/templates/admin/config.tmpl
index 8f2b1c12e3..12504b8824 100644
--- a/templates/admin/config.tmpl
+++ b/templates/admin/config.tmpl
@@ -247,6 +247,16 @@