Merge plus fixing tests/e2e/declare_repos_test.go conflict
Some checks failed
Integration tests for the release process / release-simulation (push) Has been cancelled
|
@ -0,0 +1,22 @@
|
||||||
|
#
|
||||||
|
# Install the minimal version of Git supported by Forgejo
|
||||||
|
#
|
||||||
|
runs:
|
||||||
|
using: "composite"
|
||||||
|
steps:
|
||||||
|
- name: install git and git-lfs
|
||||||
|
run: |
|
||||||
|
set -x
|
||||||
|
|
||||||
|
export DEBIAN_FRONTEND=noninteractive
|
||||||
|
|
||||||
|
apt-get update -qq
|
||||||
|
apt-get -q install -y -qq curl ca-certificates
|
||||||
|
|
||||||
|
curl -sS -o /tmp/git-man.deb http://archive.ubuntu.com/ubuntu/pool/main/g/git/git-man_2.34.1-1ubuntu1_all.deb
|
||||||
|
curl -sS -o /tmp/git.deb https://archive.ubuntu.com/ubuntu/pool/main/g/git/git_2.34.1-1ubuntu1_amd64.deb
|
||||||
|
curl -sS -o /tmp/git-lfs.deb https://archive.ubuntu.com/ubuntu/pool/universe/g/git-lfs/git-lfs_3.0.2-1_amd64.deb
|
||||||
|
|
||||||
|
apt-get -q install --allow-downgrades -y -qq /tmp/git-man.deb
|
||||||
|
apt-get -q install --allow-downgrades -y -qq /tmp/git.deb
|
||||||
|
apt-get -q install --allow-downgrades -y -qq /tmp/git-lfs.deb
|
|
@ -28,7 +28,7 @@ jobs:
|
||||||
|
|
||||||
runs-on: docker
|
runs-on: docker
|
||||||
container:
|
container:
|
||||||
image: data.forgejo.org/renovate/renovate:41.1.4
|
image: data.forgejo.org/renovate/renovate:41.17.2
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- name: Load renovate repo cache
|
- name: Load renovate repo cache
|
||||||
|
|
|
@ -33,11 +33,8 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- uses: https://data.forgejo.org/actions/checkout@v4
|
- uses: https://data.forgejo.org/actions/checkout@v4
|
||||||
- uses: ./.forgejo/workflows-composite/setup-env
|
- uses: ./.forgejo/workflows-composite/setup-env
|
||||||
- name: install git 2.30
|
- name: install git 2.34.1 and git-lfs 3.0.2
|
||||||
uses: ./.forgejo/workflows-composite/apt-install-from
|
uses: ./.forgejo/workflows-composite/install-minimum-git-version
|
||||||
with:
|
|
||||||
packages: git/bullseye git-lfs/bullseye
|
|
||||||
release: bullseye
|
|
||||||
- uses: ./.forgejo/workflows-composite/build-backend
|
- uses: ./.forgejo/workflows-composite/build-backend
|
||||||
- run: |
|
- run: |
|
||||||
su forgejo -c 'make test-backend test-check'
|
su forgejo -c 'make test-backend test-check'
|
||||||
|
@ -55,11 +52,8 @@ jobs:
|
||||||
steps:
|
steps:
|
||||||
- uses: https://data.forgejo.org/actions/checkout@v4
|
- uses: https://data.forgejo.org/actions/checkout@v4
|
||||||
- uses: ./.forgejo/workflows-composite/setup-env
|
- uses: ./.forgejo/workflows-composite/setup-env
|
||||||
- name: install git 2.30
|
- name: install git 2.34.1 and git-lfs 3.0.2
|
||||||
uses: ./.forgejo/workflows-composite/apt-install-from
|
uses: ./.forgejo/workflows-composite/install-minimum-git-version
|
||||||
with:
|
|
||||||
packages: git/bullseye git-lfs/bullseye
|
|
||||||
release: bullseye
|
|
||||||
- uses: ./.forgejo/workflows-composite/build-backend
|
- uses: ./.forgejo/workflows-composite/build-backend
|
||||||
- run: |
|
- run: |
|
||||||
su forgejo -c 'make test-sqlite-migration test-sqlite'
|
su forgejo -c 'make test-sqlite-migration test-sqlite'
|
||||||
|
|
2
Makefile
|
@ -47,7 +47,7 @@ GO_LICENSES_PACKAGE ?= github.com/google/go-licenses@v1.6.0 # renovate: datasour
|
||||||
GOVULNCHECK_PACKAGE ?= golang.org/x/vuln/cmd/govulncheck@v1 # renovate: datasource=go
|
GOVULNCHECK_PACKAGE ?= golang.org/x/vuln/cmd/govulncheck@v1 # renovate: datasource=go
|
||||||
DEADCODE_PACKAGE ?= golang.org/x/tools/cmd/deadcode@v0.34.0 # renovate: datasource=go
|
DEADCODE_PACKAGE ?= golang.org/x/tools/cmd/deadcode@v0.34.0 # renovate: datasource=go
|
||||||
GOMOCK_PACKAGE ?= go.uber.org/mock/mockgen@v0.5.2 # renovate: datasource=go
|
GOMOCK_PACKAGE ?= go.uber.org/mock/mockgen@v0.5.2 # renovate: datasource=go
|
||||||
RENOVATE_NPM_PACKAGE ?= renovate@41.1.4 # renovate: datasource=docker packageName=data.forgejo.org/renovate/renovate
|
RENOVATE_NPM_PACKAGE ?= renovate@41.17.2 # renovate: datasource=docker packageName=data.forgejo.org/renovate/renovate
|
||||||
|
|
||||||
# https://github.com/disposable-email-domains/disposable-email-domains/commits/main/
|
# https://github.com/disposable-email-domains/disposable-email-domains/commits/main/
|
||||||
DISPOSABLE_EMAILS_SHA ?= 0c27e671231d27cf66370034d7f6818037416989 # renovate: ...
|
DISPOSABLE_EMAILS_SHA ?= 0c27e671231d27cf66370034d7f6818037416989 # renovate: ...
|
||||||
|
|
|
@ -82,6 +82,11 @@ wiki, issues, labels, releases, release_assets, milestones, pull_requests, comme
|
||||||
}
|
}
|
||||||
|
|
||||||
func runDumpRepository(stdCtx context.Context, ctx *cli.Command) error {
|
func runDumpRepository(stdCtx context.Context, ctx *cli.Command) error {
|
||||||
|
setupConsoleLogger(log.INFO, log.CanColorStderr, os.Stderr)
|
||||||
|
|
||||||
|
// setting.DisableLoggerInit()
|
||||||
|
setting.LoadSettings() // cannot access skip_tls_verify settings otherwise
|
||||||
|
|
||||||
stdCtx, cancel := installSignals(stdCtx)
|
stdCtx, cancel := installSignals(stdCtx)
|
||||||
defer cancel()
|
defer cancel()
|
||||||
|
|
||||||
|
|
14
cmd/hook.go
|
@ -231,8 +231,6 @@ Forgejo or set your environment appropriately.`, "")
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
supportProcReceive := git.CheckGitVersionAtLeast("2.29") == nil
|
|
||||||
|
|
||||||
for scanner.Scan() {
|
for scanner.Scan() {
|
||||||
// TODO: support news feeds for wiki
|
// TODO: support news feeds for wiki
|
||||||
if isWiki {
|
if isWiki {
|
||||||
|
@ -250,10 +248,7 @@ Forgejo or set your environment appropriately.`, "")
|
||||||
total++
|
total++
|
||||||
lastline++
|
lastline++
|
||||||
|
|
||||||
// If the ref is a branch or tag, check if it's protected
|
// All references should be checked because permission check was delayed.
|
||||||
// if supportProcReceive all ref should be checked because
|
|
||||||
// permission check was delayed
|
|
||||||
if supportProcReceive || refFullName.IsBranch() || refFullName.IsTag() {
|
|
||||||
oldCommitIDs[count] = oldCommitID
|
oldCommitIDs[count] = oldCommitID
|
||||||
newCommitIDs[count] = newCommitID
|
newCommitIDs[count] = newCommitID
|
||||||
refFullNames[count] = refFullName
|
refFullNames[count] = refFullName
|
||||||
|
@ -273,9 +268,6 @@ Forgejo or set your environment appropriately.`, "")
|
||||||
count = 0
|
count = 0
|
||||||
lastline = 0
|
lastline = 0
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
fmt.Fprint(out, ".")
|
|
||||||
}
|
|
||||||
if lastline >= hookBatchSize {
|
if lastline >= hookBatchSize {
|
||||||
fmt.Fprint(out, "\n")
|
fmt.Fprint(out, "\n")
|
||||||
lastline = 0
|
lastline = 0
|
||||||
|
@ -513,10 +505,6 @@ Forgejo or set your environment appropriately.`, "")
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
if git.CheckGitVersionAtLeast("2.29") != nil {
|
|
||||||
return fail(ctx, "No proc-receive support", "current git version doesn't support proc-receive.")
|
|
||||||
}
|
|
||||||
|
|
||||||
reader := bufio.NewReader(os.Stdin)
|
reader := bufio.NewReader(os.Stdin)
|
||||||
repoUser := os.Getenv(repo_module.EnvRepoUsername)
|
repoUser := os.Getenv(repo_module.EnvRepoUsername)
|
||||||
repoName := os.Getenv(repo_module.EnvRepoName)
|
repoName := os.Getenv(repo_module.EnvRepoName)
|
||||||
|
|
|
@ -193,13 +193,11 @@ func runServ(ctx context.Context, c *cli.Command) error {
|
||||||
}
|
}
|
||||||
|
|
||||||
if len(words) < 2 {
|
if len(words) < 2 {
|
||||||
if git.CheckGitVersionAtLeast("2.29") == nil {
|
|
||||||
// for AGit Flow
|
// for AGit Flow
|
||||||
if cmd == "ssh_info" {
|
if cmd == "ssh_info" {
|
||||||
fmt.Print(`{"type":"agit","version":1}`)
|
fmt.Print(`{"type":"agit","version":1}`)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
|
||||||
return fail(ctx, "Too few arguments", "Too few arguments in cmd: %s", cmd)
|
return fail(ctx, "Too few arguments", "Too few arguments in cmd: %s", cmd)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
2
go.mod
|
@ -79,7 +79,7 @@ require (
|
||||||
github.com/minio/minio-go/v7 v7.0.94
|
github.com/minio/minio-go/v7 v7.0.94
|
||||||
github.com/msteinert/pam/v2 v2.1.0
|
github.com/msteinert/pam/v2 v2.1.0
|
||||||
github.com/nektos/act v0.2.52
|
github.com/nektos/act v0.2.52
|
||||||
github.com/niklasfasching/go-org v1.8.0
|
github.com/niklasfasching/go-org v1.9.0
|
||||||
github.com/olivere/elastic/v7 v7.0.32
|
github.com/olivere/elastic/v7 v7.0.32
|
||||||
github.com/opencontainers/go-digest v1.0.0
|
github.com/opencontainers/go-digest v1.0.0
|
||||||
github.com/opencontainers/image-spec v1.1.1
|
github.com/opencontainers/image-spec v1.1.1
|
||||||
|
|
4
go.sum
|
@ -426,8 +426,8 @@ github.com/msteinert/pam/v2 v2.1.0 h1:er5F9TKV5nGFuTt12ubtqPHEUdeBwReP7vd3wovidG
|
||||||
github.com/msteinert/pam/v2 v2.1.0/go.mod h1:KT28NNIcDFf3PcBmNI2mIGO4zZJ+9RSs/At2PB3IDVc=
|
github.com/msteinert/pam/v2 v2.1.0/go.mod h1:KT28NNIcDFf3PcBmNI2mIGO4zZJ+9RSs/At2PB3IDVc=
|
||||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
|
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822 h1:C3w9PqII01/Oq1c1nUAm88MOHcQC9l5mIlSMApZMrHA=
|
||||||
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
github.com/munnerz/goautoneg v0.0.0-20191010083416-a7dc8b61c822/go.mod h1:+n7T8mK8HuQTcFwEeznm/DIxMOiR9yIdICNftLE1DvQ=
|
||||||
github.com/niklasfasching/go-org v1.8.0 h1:WyGLaajLLp8JbQzkmapZ1y0MOzKuKV47HkZRloi+HGY=
|
github.com/niklasfasching/go-org v1.9.0 h1:4/Sr68Qx06hjC9MVDB/4etGP67JionLHGscLMOClpnk=
|
||||||
github.com/niklasfasching/go-org v1.8.0/go.mod h1:e2A9zJs7cdONrEGs3gvxCcaAEpwwPNPG7csDpXckMNg=
|
github.com/niklasfasching/go-org v1.9.0/go.mod h1:ZAGFFkWvUQcpazmi/8nHqwvARpr1xpb+Es67oUGX/48=
|
||||||
github.com/nwaples/rardecode v1.1.0/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
|
github.com/nwaples/rardecode v1.1.0/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
|
||||||
github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc=
|
github.com/nwaples/rardecode v1.1.3 h1:cWCaZwfM5H7nAD6PyEdcVnczzV8i/JtotnyW/dD9lEc=
|
||||||
github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
|
github.com/nwaples/rardecode v1.1.3/go.mod h1:5DzqNKiOdpKKBH87u8VlvAnPZMXcGRhxWkRpHbbfGS0=
|
||||||
|
|
|
@ -186,10 +186,46 @@
|
||||||
type: 8 # milestone
|
type: 8 # milestone
|
||||||
poster_id: 1
|
poster_id: 1
|
||||||
issue_id: 1 # in repo_id 1
|
issue_id: 1 # in repo_id 1
|
||||||
milestone_id: 10 # not exsting milestone
|
milestone_id: 10 # not existing milestone
|
||||||
old_milestone_id: 0
|
old_milestone_id: 0
|
||||||
created_unix: 946685080
|
created_unix: 946685080
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 2004
|
||||||
|
type: 8 # milestone
|
||||||
|
poster_id: 1
|
||||||
|
issue_id: 1 # in repo_id 1
|
||||||
|
milestone_id: 1
|
||||||
|
old_milestone_id: 10 # not existing (ghost) milestone
|
||||||
|
created_unix: 946685085
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 2005
|
||||||
|
type: 8 # milestone
|
||||||
|
poster_id: 1
|
||||||
|
issue_id: 1 # in repo_id 1
|
||||||
|
milestone_id: 10 # not existing (ghost) milestone
|
||||||
|
old_milestone_id: 1
|
||||||
|
created_unix: 946685090
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 2006
|
||||||
|
type: 8 # milestone
|
||||||
|
poster_id: 1
|
||||||
|
issue_id: 1 # in repo_id 1
|
||||||
|
milestone_id: 11 # not existing (ghost) milestone
|
||||||
|
old_milestone_id: 10 # not existing (ghost) milestone
|
||||||
|
created_unix: 946685095
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 2007
|
||||||
|
type: 8 # milestone
|
||||||
|
poster_id: 1
|
||||||
|
issue_id: 1 # in repo_id 1
|
||||||
|
milestone_id: 0
|
||||||
|
old_milestone_id: 11 # not existing (ghost) milestone
|
||||||
|
created_unix: 946685100
|
||||||
|
|
||||||
-
|
-
|
||||||
id: 2010
|
id: 2010
|
||||||
type: 30 # project
|
type: 30 # project
|
||||||
|
|
|
@ -101,7 +101,7 @@ func (comments CommentList) loadMilestones(ctx context.Context) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs))
|
milestones := make(map[int64]*Milestone, len(milestoneIDs))
|
||||||
left := len(milestoneIDs)
|
left := len(milestoneIDs)
|
||||||
for left > 0 {
|
for left > 0 {
|
||||||
limit := db.DefaultMaxInSize
|
limit := db.DefaultMaxInSize
|
||||||
|
@ -110,7 +110,7 @@ func (comments CommentList) loadMilestones(ctx context.Context) error {
|
||||||
}
|
}
|
||||||
err := db.GetEngine(ctx).
|
err := db.GetEngine(ctx).
|
||||||
In("id", milestoneIDs[:limit]).
|
In("id", milestoneIDs[:limit]).
|
||||||
Find(&milestoneMaps)
|
Find(&milestones)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -118,8 +118,8 @@ func (comments CommentList) loadMilestones(ctx context.Context) error {
|
||||||
milestoneIDs = milestoneIDs[limit:]
|
milestoneIDs = milestoneIDs[limit:]
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, issue := range comments {
|
for _, comment := range comments {
|
||||||
issue.Milestone = milestoneMaps[issue.MilestoneID]
|
comment.Milestone = milestones[comment.MilestoneID]
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -140,7 +140,7 @@ func (comments CommentList) loadOldMilestones(ctx context.Context) error {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
milestoneMaps := make(map[int64]*Milestone, len(milestoneIDs))
|
milestones := make(map[int64]*Milestone, len(milestoneIDs))
|
||||||
left := len(milestoneIDs)
|
left := len(milestoneIDs)
|
||||||
for left > 0 {
|
for left > 0 {
|
||||||
limit := db.DefaultMaxInSize
|
limit := db.DefaultMaxInSize
|
||||||
|
@ -149,7 +149,7 @@ func (comments CommentList) loadOldMilestones(ctx context.Context) error {
|
||||||
}
|
}
|
||||||
err := db.GetEngine(ctx).
|
err := db.GetEngine(ctx).
|
||||||
In("id", milestoneIDs[:limit]).
|
In("id", milestoneIDs[:limit]).
|
||||||
Find(&milestoneMaps)
|
Find(&milestones)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -157,8 +157,8 @@ func (comments CommentList) loadOldMilestones(ctx context.Context) error {
|
||||||
milestoneIDs = milestoneIDs[limit:]
|
milestoneIDs = milestoneIDs[limit:]
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, issue := range comments {
|
for _, comment := range comments {
|
||||||
issue.OldMilestone = milestoneMaps[issue.MilestoneID]
|
comment.OldMilestone = milestones[comment.OldMilestoneID]
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -48,6 +48,8 @@ type IssuesOptions struct { //nolint
|
||||||
UpdatedBeforeUnix int64
|
UpdatedBeforeUnix int64
|
||||||
// prioritize issues from this repo
|
// prioritize issues from this repo
|
||||||
PriorityRepoID int64
|
PriorityRepoID int64
|
||||||
|
// if this issue index (not ID) exists and matches the filters, *and* priorityrepo sort is used, show it first
|
||||||
|
PriorityIssueIndex int64
|
||||||
IsArchived optional.Option[bool]
|
IsArchived optional.Option[bool]
|
||||||
|
|
||||||
// If combined with AllPublic, then private as well as public issues
|
// If combined with AllPublic, then private as well as public issues
|
||||||
|
@ -60,7 +62,7 @@ type IssuesOptions struct { //nolint
|
||||||
|
|
||||||
// applySorts sort an issues-related session based on the provided
|
// applySorts sort an issues-related session based on the provided
|
||||||
// sortType string
|
// sortType string
|
||||||
func applySorts(sess *xorm.Session, sortType string, priorityRepoID int64) {
|
func applySorts(sess *xorm.Session, sortType string, priorityRepoID, priorityIssueIndex int64) {
|
||||||
switch sortType {
|
switch sortType {
|
||||||
case "oldest":
|
case "oldest":
|
||||||
sess.Asc("issue.created_unix").Asc("issue.id")
|
sess.Asc("issue.created_unix").Asc("issue.id")
|
||||||
|
@ -97,8 +99,11 @@ func applySorts(sess *xorm.Session, sortType string, priorityRepoID int64) {
|
||||||
case "priorityrepo":
|
case "priorityrepo":
|
||||||
sess.OrderBy("CASE "+
|
sess.OrderBy("CASE "+
|
||||||
"WHEN issue.repo_id = ? THEN 1 "+
|
"WHEN issue.repo_id = ? THEN 1 "+
|
||||||
"ELSE 2 END ASC", priorityRepoID).
|
"ELSE 2 END ASC", priorityRepoID)
|
||||||
Desc("issue.created_unix").
|
if priorityIssueIndex != 0 {
|
||||||
|
sess.OrderBy("issue.index = ? DESC", priorityIssueIndex)
|
||||||
|
}
|
||||||
|
sess.Desc("issue.created_unix").
|
||||||
Desc("issue.id")
|
Desc("issue.id")
|
||||||
case "project-column-sorting":
|
case "project-column-sorting":
|
||||||
sess.Asc("project_issue.sorting").Desc("issue.created_unix").Desc("issue.id")
|
sess.Asc("project_issue.sorting").Desc("issue.created_unix").Desc("issue.id")
|
||||||
|
@ -470,7 +475,7 @@ func Issues(ctx context.Context, opts *IssuesOptions) (IssueList, error) {
|
||||||
Join("INNER", "repository", "`issue`.repo_id = `repository`.id")
|
Join("INNER", "repository", "`issue`.repo_id = `repository`.id")
|
||||||
applyLimit(sess, opts)
|
applyLimit(sess, opts)
|
||||||
applyConditions(sess, opts)
|
applyConditions(sess, opts)
|
||||||
applySorts(sess, opts.SortType, opts.PriorityRepoID)
|
applySorts(sess, opts.SortType, opts.PriorityRepoID, opts.PriorityIssueIndex)
|
||||||
|
|
||||||
issues := IssueList{}
|
issues := IssueList{}
|
||||||
if err := sess.Find(&issues); err != nil {
|
if err := sess.Find(&issues); err != nil {
|
||||||
|
@ -494,7 +499,7 @@ func IssueIDs(ctx context.Context, opts *IssuesOptions, otherConds ...builder.Co
|
||||||
}
|
}
|
||||||
|
|
||||||
applyLimit(sess, opts)
|
applyLimit(sess, opts)
|
||||||
applySorts(sess, opts.SortType, opts.PriorityRepoID)
|
applySorts(sess, opts.SortType, opts.PriorityRepoID, opts.PriorityIssueIndex)
|
||||||
|
|
||||||
var res []int64
|
var res []int64
|
||||||
total, err := sess.Select("`issue`.id").Table(&Issue{}).FindAndCount(&res)
|
total, err := sess.Select("`issue`.id").Table(&Issue{}).FindAndCount(&res)
|
||||||
|
|
|
@ -149,7 +149,7 @@ func PullRequests(ctx context.Context, baseRepoID int64, opts *PullRequestsOptio
|
||||||
}
|
}
|
||||||
|
|
||||||
findSession := listPullRequestStatement(ctx, baseRepoID, opts)
|
findSession := listPullRequestStatement(ctx, baseRepoID, opts)
|
||||||
applySorts(findSession, opts.SortType, 0)
|
applySorts(findSession, opts.SortType, 0, 0)
|
||||||
findSession = db.SetSessionPagination(findSession, opts)
|
findSession = db.SetSessionPagination(findSession, opts)
|
||||||
prs := make([]*PullRequest, 0, opts.PageSize)
|
prs := make([]*PullRequest, 0, opts.PageSize)
|
||||||
found := findSession.Find(&prs)
|
found := findSession.Find(&prs)
|
||||||
|
|
|
@ -100,7 +100,7 @@ type AbuseReport struct {
|
||||||
// The abuse category selected by the reporter.
|
// The abuse category selected by the reporter.
|
||||||
Category AbuseCategoryType `xorm:"INDEX NOT NULL"`
|
Category AbuseCategoryType `xorm:"INDEX NOT NULL"`
|
||||||
// Remarks provided by the reporter.
|
// Remarks provided by the reporter.
|
||||||
Remarks string
|
Remarks string `xorm:"VARCHAR(500)"`
|
||||||
// The ID of the corresponding shadow-copied content when exists; otherwise null.
|
// The ID of the corresponding shadow-copied content when exists; otherwise null.
|
||||||
ShadowCopyID sql.NullInt64 `xorm:"DEFAULT NULL"`
|
ShadowCopyID sql.NullInt64 `xorm:"DEFAULT NULL"`
|
||||||
CreatedUnix timeutil.TimeStamp `xorm:"created NOT NULL"`
|
CreatedUnix timeutil.TimeStamp `xorm:"created NOT NULL"`
|
||||||
|
|
|
@ -17,7 +17,7 @@ import (
|
||||||
|
|
||||||
type AbuseReportShadowCopy struct {
|
type AbuseReportShadowCopy struct {
|
||||||
ID int64 `xorm:"pk autoincr"`
|
ID int64 `xorm:"pk autoincr"`
|
||||||
RawValue string `xorm:"NOT NULL"`
|
RawValue string `xorm:"LONGTEXT NOT NULL"` // A JSON with relevant fields from user, repository, issue or comment table.
|
||||||
CreatedUnix timeutil.TimeStamp `xorm:"created NOT NULL"`
|
CreatedUnix timeutil.TimeStamp `xorm:"created NOT NULL"`
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -132,7 +132,7 @@ func (r *BlameReader) Close() error {
|
||||||
// CreateBlameReader creates reader for given repository, commit and file
|
// CreateBlameReader creates reader for given repository, commit and file
|
||||||
func CreateBlameReader(ctx context.Context, objectFormat ObjectFormat, repoPath string, commit *Commit, file string, bypassBlameIgnore bool) (*BlameReader, error) {
|
func CreateBlameReader(ctx context.Context, objectFormat ObjectFormat, repoPath string, commit *Commit, file string, bypassBlameIgnore bool) (*BlameReader, error) {
|
||||||
var ignoreRevsFile *string
|
var ignoreRevsFile *string
|
||||||
if CheckGitVersionAtLeast("2.23") == nil && !bypassBlameIgnore {
|
if !bypassBlameIgnore {
|
||||||
ignoreRevsFile = tryCreateBlameIgnoreRevsFile(commit)
|
ignoreRevsFile = tryCreateBlameIgnoreRevsFile(commit)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -8,6 +8,7 @@ import (
|
||||||
"bufio"
|
"bufio"
|
||||||
"bytes"
|
"bytes"
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
|
"fmt"
|
||||||
"io"
|
"io"
|
||||||
|
|
||||||
"forgejo.org/modules/log"
|
"forgejo.org/modules/log"
|
||||||
|
@ -172,33 +173,43 @@ func (b *Blob) GetBlobContent(limit int64) (string, error) {
|
||||||
return string(buf), err
|
return string(buf), err
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetBlobContentBase64 Reads the content of the blob with a base64 encode and returns the encoded string
|
type BlobTooLargeError struct {
|
||||||
func (b *Blob) GetBlobContentBase64() (string, error) {
|
Size, Limit int64
|
||||||
dataRc, err := b.DataAsync()
|
}
|
||||||
|
|
||||||
|
func (b BlobTooLargeError) Error() string {
|
||||||
|
return fmt.Sprintf("blob: content larger than limit (%d > %d)", b.Size, b.Limit)
|
||||||
|
}
|
||||||
|
|
||||||
|
// GetContentBase64 Reads the content of the blob and returns it as base64 encoded string.
|
||||||
|
// Returns [BlobTooLargeError] if the (unencoded) content is larger than the limit.
|
||||||
|
func (b *Blob) GetContentBase64(limit int64) (string, error) {
|
||||||
|
if b.Size() > limit {
|
||||||
|
return "", BlobTooLargeError{
|
||||||
|
Size: b.Size(),
|
||||||
|
Limit: limit,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
rc, size, err := b.NewTruncatedReader(limit)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
defer dataRc.Close()
|
defer rc.Close()
|
||||||
|
|
||||||
pr, pw := io.Pipe()
|
encoding := base64.StdEncoding
|
||||||
encoder := base64.NewEncoder(base64.StdEncoding, pw)
|
buf := bytes.NewBuffer(make([]byte, 0, encoding.EncodedLen(int(size))))
|
||||||
|
|
||||||
go func() {
|
encoder := base64.NewEncoder(encoding, buf)
|
||||||
_, err := io.Copy(encoder, dataRc)
|
|
||||||
_ = encoder.Close()
|
|
||||||
|
|
||||||
if err != nil {
|
if _, err := io.Copy(encoder, rc); err != nil {
|
||||||
_ = pw.CloseWithError(err)
|
|
||||||
} else {
|
|
||||||
_ = pw.Close()
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
|
|
||||||
out, err := io.ReadAll(pr)
|
|
||||||
if err != nil {
|
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
return string(out), nil
|
if err := encoder.Close(); err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
|
||||||
|
return buf.String(), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GuessContentType guesses the content type of the blob.
|
// GuessContentType guesses the content type of the blob.
|
||||||
|
|
|
@ -63,6 +63,24 @@ func TestBlob(t *testing.T) {
|
||||||
require.Equal(t, "file2\n", r)
|
require.Equal(t, "file2\n", r)
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("GetContentBase64", func(t *testing.T) {
|
||||||
|
r, err := testBlob.GetContentBase64(100)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, "ZmlsZTIK", r)
|
||||||
|
|
||||||
|
r, err = testBlob.GetContentBase64(-1)
|
||||||
|
require.ErrorAs(t, err, &BlobTooLargeError{})
|
||||||
|
require.Empty(t, r)
|
||||||
|
|
||||||
|
r, err = testBlob.GetContentBase64(4)
|
||||||
|
require.ErrorAs(t, err, &BlobTooLargeError{})
|
||||||
|
require.Empty(t, r)
|
||||||
|
|
||||||
|
r, err = testBlob.GetContentBase64(6)
|
||||||
|
require.NoError(t, err)
|
||||||
|
require.Equal(t, "ZmlsZTIK", r)
|
||||||
|
})
|
||||||
|
|
||||||
t.Run("NewTruncatedReader", func(t *testing.T) {
|
t.Run("NewTruncatedReader", func(t *testing.T) {
|
||||||
// read fewer than available
|
// read fewer than available
|
||||||
rc, size, err := testBlob.NewTruncatedReader(100)
|
rc, size, err := testBlob.NewTruncatedReader(100)
|
||||||
|
|
|
@ -412,11 +412,7 @@ func (c *Commit) GetSubModule(entryname string) (string, error) {
|
||||||
|
|
||||||
// GetBranchName gets the closest branch name (as returned by 'git name-rev --name-only')
|
// GetBranchName gets the closest branch name (as returned by 'git name-rev --name-only')
|
||||||
func (c *Commit) GetBranchName() (string, error) {
|
func (c *Commit) GetBranchName() (string, error) {
|
||||||
cmd := NewCommand(c.repo.Ctx, "name-rev")
|
cmd := NewCommand(c.repo.Ctx, "name-rev", "--exclude", "refs/tags/*", "--name-only", "--no-undefined").AddDynamicArguments(c.ID.String())
|
||||||
if CheckGitVersionAtLeast("2.13.0") == nil {
|
|
||||||
cmd.AddArguments("--exclude", "refs/tags/*")
|
|
||||||
}
|
|
||||||
cmd.AddArguments("--name-only", "--no-undefined").AddDynamicArguments(c.ID.String())
|
|
||||||
data, _, err := cmd.RunStdString(&RunOpts{Dir: c.repo.Path})
|
data, _, err := cmd.RunStdString(&RunOpts{Dir: c.repo.Path})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
// handle special case where git can not describe commit
|
// handle special case where git can not describe commit
|
||||||
|
|
|
@ -23,7 +23,7 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
// RequiredVersion is the minimum Git version required
|
// RequiredVersion is the minimum Git version required
|
||||||
const RequiredVersion = "2.0.0"
|
const RequiredVersion = "2.34.1"
|
||||||
|
|
||||||
var (
|
var (
|
||||||
// GitExecutable is the command name of git
|
// GitExecutable is the command name of git
|
||||||
|
@ -33,7 +33,6 @@ var (
|
||||||
// DefaultContext is the default context to run git commands in, must be initialized by git.InitXxx
|
// DefaultContext is the default context to run git commands in, must be initialized by git.InitXxx
|
||||||
DefaultContext context.Context
|
DefaultContext context.Context
|
||||||
|
|
||||||
SupportProcReceive bool // >= 2.29
|
|
||||||
SupportHashSha256 bool // >= 2.42, SHA-256 repositories no longer an ‘experimental curiosity’
|
SupportHashSha256 bool // >= 2.42, SHA-256 repositories no longer an ‘experimental curiosity’
|
||||||
InvertedGitFlushEnv bool // 2.43.1
|
InvertedGitFlushEnv bool // 2.43.1
|
||||||
SupportCheckAttrOnBare bool // >= 2.40
|
SupportCheckAttrOnBare bool // >= 2.40
|
||||||
|
@ -113,7 +112,7 @@ func VersionInfo() string {
|
||||||
format := "%s"
|
format := "%s"
|
||||||
args := []any{GitVersion.Original()}
|
args := []any{GitVersion.Original()}
|
||||||
// Since git wire protocol has been released from git v2.18
|
// Since git wire protocol has been released from git v2.18
|
||||||
if setting.Git.EnableAutoGitWireProtocol && CheckGitVersionAtLeast("2.18") == nil {
|
if setting.Git.EnableAutoGitWireProtocol {
|
||||||
format += ", Wire Protocol %s Enabled"
|
format += ", Wire Protocol %s Enabled"
|
||||||
args = append(args, "Version 2") // for focus color
|
args = append(args, "Version 2") // for focus color
|
||||||
}
|
}
|
||||||
|
@ -172,16 +171,13 @@ func InitFull(ctx context.Context) (err error) {
|
||||||
_ = os.Setenv("GNUPGHOME", filepath.Join(HomeDir(), ".gnupg"))
|
_ = os.Setenv("GNUPGHOME", filepath.Join(HomeDir(), ".gnupg"))
|
||||||
}
|
}
|
||||||
|
|
||||||
// Since git wire protocol has been released from git v2.18
|
if setting.Git.EnableAutoGitWireProtocol {
|
||||||
if setting.Git.EnableAutoGitWireProtocol && CheckGitVersionAtLeast("2.18") == nil {
|
|
||||||
globalCommandArgs = append(globalCommandArgs, "-c", "protocol.version=2")
|
globalCommandArgs = append(globalCommandArgs, "-c", "protocol.version=2")
|
||||||
}
|
}
|
||||||
|
|
||||||
// Explicitly disable credential helper, otherwise Git credentials might leak
|
// Explicitly disable credential helper, otherwise Git credentials might leak
|
||||||
if CheckGitVersionAtLeast("2.9") == nil {
|
|
||||||
globalCommandArgs = append(globalCommandArgs, "-c", "credential.helper=")
|
globalCommandArgs = append(globalCommandArgs, "-c", "credential.helper=")
|
||||||
}
|
|
||||||
SupportProcReceive = CheckGitVersionAtLeast("2.29") == nil
|
|
||||||
SupportHashSha256 = CheckGitVersionAtLeast("2.42") == nil
|
SupportHashSha256 = CheckGitVersionAtLeast("2.42") == nil
|
||||||
SupportCheckAttrOnBare = CheckGitVersionAtLeast("2.40") == nil
|
SupportCheckAttrOnBare = CheckGitVersionAtLeast("2.40") == nil
|
||||||
if SupportHashSha256 {
|
if SupportHashSha256 {
|
||||||
|
@ -195,9 +191,6 @@ func InitFull(ctx context.Context) (err error) {
|
||||||
SupportGrepMaxCount = CheckGitVersionAtLeast("2.38") == nil
|
SupportGrepMaxCount = CheckGitVersionAtLeast("2.38") == nil
|
||||||
|
|
||||||
if setting.LFS.StartServer {
|
if setting.LFS.StartServer {
|
||||||
if CheckGitVersionAtLeast("2.1.2") != nil {
|
|
||||||
return errors.New("LFS server support requires Git >= 2.1.2")
|
|
||||||
}
|
|
||||||
globalCommandArgs = append(globalCommandArgs, "-c", "filter.lfs.required=", "-c", "filter.lfs.smudge=", "-c", "filter.lfs.clean=")
|
globalCommandArgs = append(globalCommandArgs, "-c", "filter.lfs.required=", "-c", "filter.lfs.smudge=", "-c", "filter.lfs.clean=")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -234,18 +227,15 @@ func syncGitConfig() (err error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// Set git some configurations - these must be set to these values for gitea to work correctly
|
// Set git some configurations - these must be set to these values for forgejo to work correctly
|
||||||
if err := configSet("core.quotePath", "false"); err != nil {
|
if err := configSet("core.quotePath", "false"); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
if CheckGitVersionAtLeast("2.10") == nil {
|
|
||||||
if err := configSet("receive.advertisePushOptions", "true"); err != nil {
|
if err := configSet("receive.advertisePushOptions", "true"); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if CheckGitVersionAtLeast("2.18") == nil {
|
|
||||||
if err := configSet("core.commitGraph", "true"); err != nil {
|
if err := configSet("core.commitGraph", "true"); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -255,18 +245,11 @@ func syncGitConfig() (err error) {
|
||||||
if err := configSet("fetch.writeCommitGraph", "true"); err != nil {
|
if err := configSet("fetch.writeCommitGraph", "true"); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
if SupportProcReceive {
|
|
||||||
// set support for AGit flow
|
// set support for AGit flow
|
||||||
if err := configAddNonExist("receive.procReceiveRefs", "refs/for"); err != nil {
|
if err := configAddNonExist("receive.procReceiveRefs", "refs/for"); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
} else {
|
|
||||||
if err := configUnsetAll("receive.procReceiveRefs", "refs/for"); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Due to CVE-2022-24765, git now denies access to git directories which are not owned by current user
|
// Due to CVE-2022-24765, git now denies access to git directories which are not owned by current user
|
||||||
// however, some docker users and samba users find it difficult to configure their systems so that Gitea's git repositories are owned by the Gitea user. (Possibly Windows Service users - but ownership in this case should really be set correctly on the filesystem.)
|
// however, some docker users and samba users find it difficult to configure their systems so that Gitea's git repositories are owned by the Gitea user. (Possibly Windows Service users - but ownership in this case should really be set correctly on the filesystem.)
|
||||||
|
@ -284,11 +267,6 @@ func syncGitConfig() (err error) {
|
||||||
|
|
||||||
switch setting.Repository.Signing.Format {
|
switch setting.Repository.Signing.Format {
|
||||||
case "ssh":
|
case "ssh":
|
||||||
// First do a git version check.
|
|
||||||
if CheckGitVersionAtLeast("2.34.0") != nil {
|
|
||||||
return errors.New("ssh signing requires Git >= 2.34.0")
|
|
||||||
}
|
|
||||||
|
|
||||||
// Get the ssh-keygen binary that Git will use.
|
// Get the ssh-keygen binary that Git will use.
|
||||||
// This can be overridden in app.ini in [git.config] section, so we must
|
// This can be overridden in app.ini in [git.config] section, so we must
|
||||||
// query this information.
|
// query this information.
|
||||||
|
@ -325,8 +303,7 @@ func syncGitConfig() (err error) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// By default partial clones are disabled, enable them from git v2.22
|
if !setting.Git.DisablePartialClone {
|
||||||
if !setting.Git.DisablePartialClone && CheckGitVersionAtLeast("2.22") == nil {
|
|
||||||
if err = configSet("uploadpack.allowfilter", "true"); err != nil {
|
if err = configSet("uploadpack.allowfilter", "true"); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
|
@ -14,7 +14,6 @@ import (
|
||||||
"forgejo.org/modules/test"
|
"forgejo.org/modules/test"
|
||||||
"forgejo.org/modules/util"
|
"forgejo.org/modules/util"
|
||||||
|
|
||||||
"github.com/hashicorp/go-version"
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
@ -105,10 +104,6 @@ func TestSyncConfigGPGFormat(t *testing.T) {
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("SSH format", func(t *testing.T) {
|
t.Run("SSH format", func(t *testing.T) {
|
||||||
if CheckGitVersionAtLeast("2.34.0") != nil {
|
|
||||||
t.SkipNow()
|
|
||||||
}
|
|
||||||
|
|
||||||
r, err := os.OpenRoot(t.TempDir())
|
r, err := os.OpenRoot(t.TempDir())
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
f, err := r.OpenFile("ssh-keygen", os.O_CREATE|os.O_TRUNC, 0o700)
|
f, err := r.OpenFile("ssh-keygen", os.O_CREATE|os.O_TRUNC, 0o700)
|
||||||
|
@ -121,13 +116,6 @@ func TestSyncConfigGPGFormat(t *testing.T) {
|
||||||
assert.True(t, gitConfigContains("[gpg]"))
|
assert.True(t, gitConfigContains("[gpg]"))
|
||||||
assert.True(t, gitConfigContains("format = ssh"))
|
assert.True(t, gitConfigContains("format = ssh"))
|
||||||
|
|
||||||
t.Run("Old version", func(t *testing.T) {
|
|
||||||
oldVersion, err := version.NewVersion("2.33.0")
|
|
||||||
require.NoError(t, err)
|
|
||||||
defer test.MockVariableValue(&GitVersion, oldVersion)()
|
|
||||||
require.ErrorContains(t, syncGitConfig(), "ssh signing requires Git >= 2.34.0")
|
|
||||||
})
|
|
||||||
|
|
||||||
t.Run("No ssh-keygen binary", func(t *testing.T) {
|
t.Run("No ssh-keygen binary", func(t *testing.T) {
|
||||||
require.NoError(t, r.Remove("ssh-keygen"))
|
require.NoError(t, r.Remove("ssh-keygen"))
|
||||||
require.ErrorContains(t, syncGitConfig(), "git signing requires a ssh-keygen binary")
|
require.ErrorContains(t, syncGitConfig(), "git signing requires a ssh-keygen binary")
|
||||||
|
|
|
@ -16,26 +16,6 @@ import (
|
||||||
"forgejo.org/modules/log"
|
"forgejo.org/modules/log"
|
||||||
)
|
)
|
||||||
|
|
||||||
// RevListAllObjects runs rev-list --objects --all and writes to a pipewriter
|
|
||||||
func RevListAllObjects(ctx context.Context, revListWriter *io.PipeWriter, wg *sync.WaitGroup, basePath string, errChan chan<- error) {
|
|
||||||
defer wg.Done()
|
|
||||||
defer revListWriter.Close()
|
|
||||||
|
|
||||||
stderr := new(bytes.Buffer)
|
|
||||||
var errbuf strings.Builder
|
|
||||||
cmd := git.NewCommand(ctx, "rev-list", "--objects", "--all")
|
|
||||||
if err := cmd.Run(&git.RunOpts{
|
|
||||||
Dir: basePath,
|
|
||||||
Stdout: revListWriter,
|
|
||||||
Stderr: stderr,
|
|
||||||
}); err != nil {
|
|
||||||
log.Error("git rev-list --objects --all [%s]: %v - %s", basePath, err, errbuf.String())
|
|
||||||
err = fmt.Errorf("git rev-list --objects --all [%s]: %w - %s", basePath, err, errbuf.String())
|
|
||||||
_ = revListWriter.CloseWithError(err)
|
|
||||||
errChan <- err
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// RevListObjects run rev-list --objects from headSHA to baseSHA
|
// RevListObjects run rev-list --objects from headSHA to baseSHA
|
||||||
func RevListObjects(ctx context.Context, revListWriter *io.PipeWriter, wg *sync.WaitGroup, tmpBasePath, headSHA, baseSHA string, errChan chan<- error) {
|
func RevListObjects(ctx context.Context, revListWriter *io.PipeWriter, wg *sync.WaitGroup, tmpBasePath, headSHA, baseSHA string, errChan chan<- error) {
|
||||||
defer wg.Done()
|
defer wg.Done()
|
||||||
|
|
|
@ -12,14 +12,7 @@ import (
|
||||||
|
|
||||||
// GetRemoteAddress returns remote url of git repository in the repoPath with special remote name
|
// GetRemoteAddress returns remote url of git repository in the repoPath with special remote name
|
||||||
func GetRemoteAddress(ctx context.Context, repoPath, remoteName string) (string, error) {
|
func GetRemoteAddress(ctx context.Context, repoPath, remoteName string) (string, error) {
|
||||||
var cmd *Command
|
result, _, err := NewCommand(ctx, "remote", "get-url").AddDynamicArguments(remoteName).RunStdString(&RunOpts{Dir: repoPath})
|
||||||
if CheckGitVersionAtLeast("2.7") == nil {
|
|
||||||
cmd = NewCommand(ctx, "remote", "get-url").AddDynamicArguments(remoteName)
|
|
||||||
} else {
|
|
||||||
cmd = NewCommand(ctx, "config", "--get").AddDynamicArguments("remote." + remoteName + ".url")
|
|
||||||
}
|
|
||||||
|
|
||||||
result, _, err := cmd.RunStdString(&RunOpts{Dir: repoPath})
|
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
|
@ -5,7 +5,6 @@ package git
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
|
||||||
"io"
|
"io"
|
||||||
"io/fs"
|
"io/fs"
|
||||||
"os"
|
"os"
|
||||||
|
@ -197,7 +196,7 @@ func TestGitAttributeCheckerError(t *testing.T) {
|
||||||
path := t.TempDir()
|
path := t.TempDir()
|
||||||
|
|
||||||
// we can't use unittest.CopyDir because of an import cycle (git.Init in unittest)
|
// we can't use unittest.CopyDir because of an import cycle (git.Init in unittest)
|
||||||
require.NoError(t, CopyFS(path, os.DirFS(filepath.Join(testReposDir, "language_stats_repo"))))
|
require.NoError(t, os.CopyFS(path, os.DirFS(filepath.Join(testReposDir, "language_stats_repo"))))
|
||||||
|
|
||||||
gitRepo, err := openRepositoryWithDefaultContext(path)
|
gitRepo, err := openRepositoryWithDefaultContext(path)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -324,32 +323,3 @@ func TestGitAttributeCheckerError(t *testing.T) {
|
||||||
require.ErrorIs(t, err, fs.ErrClosed)
|
require.ErrorIs(t, err, fs.ErrClosed)
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
// CopyFS is adapted from https://github.com/golang/go/issues/62484
|
|
||||||
// which should be available with go1.23
|
|
||||||
func CopyFS(dir string, fsys fs.FS) error {
|
|
||||||
return fs.WalkDir(fsys, ".", func(path string, d fs.DirEntry, _ error) error {
|
|
||||||
targ := filepath.Join(dir, filepath.FromSlash(path))
|
|
||||||
if d.IsDir() {
|
|
||||||
return os.MkdirAll(targ, 0o777)
|
|
||||||
}
|
|
||||||
r, err := fsys.Open(path)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer r.Close()
|
|
||||||
info, err := r.Stat()
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
w, err := os.OpenFile(targ, os.O_CREATE|os.O_TRUNC|os.O_WRONLY, 0o666|info.Mode()&0o777)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
if _, err := io.Copy(w, r); err != nil {
|
|
||||||
w.Close()
|
|
||||||
return fmt.Errorf("copying %s: %v", path, err)
|
|
||||||
}
|
|
||||||
return w.Close()
|
|
||||||
})
|
|
||||||
}
|
|
||||||
|
|
|
@ -443,7 +443,6 @@ func (repo *Repository) getCommitsBeforeLimit(id ObjectID, num int) ([]*Commit,
|
||||||
}
|
}
|
||||||
|
|
||||||
func (repo *Repository) getBranches(commit *Commit, limit int) ([]string, error) {
|
func (repo *Repository) getBranches(commit *Commit, limit int) ([]string, error) {
|
||||||
if CheckGitVersionAtLeast("2.7.0") == nil {
|
|
||||||
command := NewCommand(repo.Ctx, "for-each-ref", "--format=%(refname:strip=2)").AddOptionValues("--contains", commit.ID.String(), BranchPrefix)
|
command := NewCommand(repo.Ctx, "for-each-ref", "--format=%(refname:strip=2)").AddOptionValues("--contains", commit.ID.String(), BranchPrefix)
|
||||||
|
|
||||||
if limit != -1 {
|
if limit != -1 {
|
||||||
|
@ -459,29 +458,6 @@ func (repo *Repository) getBranches(commit *Commit, limit int) ([]string, error)
|
||||||
return branches, nil
|
return branches, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
stdout, _, err := NewCommand(repo.Ctx, "branch").AddOptionValues("--contains", commit.ID.String()).RunStdString(&RunOpts{Dir: repo.Path})
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
|
|
||||||
refs := strings.Split(stdout, "\n")
|
|
||||||
|
|
||||||
var max int
|
|
||||||
if len(refs) > limit {
|
|
||||||
max = limit
|
|
||||||
} else {
|
|
||||||
max = len(refs) - 1
|
|
||||||
}
|
|
||||||
|
|
||||||
branches := make([]string, max)
|
|
||||||
for i, ref := range refs[:max] {
|
|
||||||
parts := strings.Fields(ref)
|
|
||||||
|
|
||||||
branches[i] = parts[len(parts)-1]
|
|
||||||
}
|
|
||||||
return branches, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetCommitsFromIDs get commits from commit IDs
|
// GetCommitsFromIDs get commits from commit IDs
|
||||||
func (repo *Repository) GetCommitsFromIDs(commitIDs []string) []*Commit {
|
func (repo *Repository) GetCommitsFromIDs(commitIDs []string) []*Commit {
|
||||||
commits := make([]*Commit, 0, len(commitIDs))
|
commits := make([]*Commit, 0, len(commitIDs))
|
||||||
|
|
|
@ -11,10 +11,8 @@ import (
|
||||||
// WriteCommitGraph write commit graph to speed up repo access
|
// WriteCommitGraph write commit graph to speed up repo access
|
||||||
// this requires git v2.18 to be installed
|
// this requires git v2.18 to be installed
|
||||||
func WriteCommitGraph(ctx context.Context, repoPath string) error {
|
func WriteCommitGraph(ctx context.Context, repoPath string) error {
|
||||||
if CheckGitVersionAtLeast("2.18") == nil {
|
|
||||||
if _, _, err := NewCommand(ctx, "commit-graph", "write").RunStdString(&RunOpts{Dir: repoPath}); err != nil {
|
if _, _, err := NewCommand(ctx, "commit-graph", "write").RunStdString(&RunOpts{Dir: repoPath}); err != nil {
|
||||||
return fmt.Errorf("unable to write commit-graph for '%s' : %w", repoPath, err)
|
return fmt.Errorf("unable to write commit-graph for '%s' : %w", repoPath, err)
|
||||||
}
|
}
|
||||||
}
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
|
@ -116,32 +116,37 @@ func (te *TreeEntry) Type() string {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// LinkTarget returns the target of the symlink as string.
|
||||||
|
func (te *TreeEntry) LinkTarget() (string, error) {
|
||||||
|
if !te.IsLink() {
|
||||||
|
return "", ErrBadLink{te.Name(), "not a symlink"}
|
||||||
|
}
|
||||||
|
|
||||||
|
const symlinkLimit = 4096 // according to git config core.longpaths https://stackoverflow.com/a/22575737
|
||||||
|
blob := te.Blob()
|
||||||
|
if blob.Size() > symlinkLimit {
|
||||||
|
return "", ErrBadLink{te.Name(), "symlink too large"}
|
||||||
|
}
|
||||||
|
|
||||||
|
rc, size, err := blob.NewTruncatedReader(symlinkLimit)
|
||||||
|
if err != nil {
|
||||||
|
return "", err
|
||||||
|
}
|
||||||
|
defer rc.Close()
|
||||||
|
|
||||||
|
buf := make([]byte, int(size))
|
||||||
|
_, err = io.ReadFull(rc, buf)
|
||||||
|
return string(buf), err
|
||||||
|
}
|
||||||
|
|
||||||
// FollowLink returns the entry pointed to by a symlink
|
// FollowLink returns the entry pointed to by a symlink
|
||||||
func (te *TreeEntry) FollowLink() (*TreeEntry, string, error) {
|
func (te *TreeEntry) FollowLink() (*TreeEntry, string, error) {
|
||||||
if !te.IsLink() {
|
|
||||||
return nil, "", ErrBadLink{te.Name(), "not a symlink"}
|
|
||||||
}
|
|
||||||
|
|
||||||
// read the link
|
// read the link
|
||||||
r, err := te.Blob().DataAsync()
|
lnk, err := te.LinkTarget()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, "", err
|
return nil, "", err
|
||||||
}
|
}
|
||||||
closed := false
|
|
||||||
defer func() {
|
|
||||||
if !closed {
|
|
||||||
_ = r.Close()
|
|
||||||
}
|
|
||||||
}()
|
|
||||||
buf := make([]byte, te.Size())
|
|
||||||
_, err = io.ReadFull(r, buf)
|
|
||||||
if err != nil {
|
|
||||||
return nil, "", err
|
|
||||||
}
|
|
||||||
_ = r.Close()
|
|
||||||
closed = true
|
|
||||||
|
|
||||||
lnk := string(buf)
|
|
||||||
t := te.ptree
|
t := te.ptree
|
||||||
|
|
||||||
// traverse up directories
|
// traverse up directories
|
||||||
|
|
|
@ -170,7 +170,7 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
|
||||||
|
|
||||||
if issueID, err := token.ParseIssueReference(); err == nil {
|
if issueID, err := token.ParseIssueReference(); err == nil {
|
||||||
idQuery := inner_bleve.NumericEqualityQuery(issueID, "index")
|
idQuery := inner_bleve.NumericEqualityQuery(issueID, "index")
|
||||||
idQuery.SetBoost(5.0)
|
idQuery.SetBoost(20.0)
|
||||||
innerQ.AddQuery(idQuery)
|
innerQ.AddQuery(idQuery)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -197,6 +197,15 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
|
||||||
queries = append(queries, bleve.NewDisjunctionQuery(repoQueries...))
|
queries = append(queries, bleve.NewDisjunctionQuery(repoQueries...))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if options.PriorityRepoID.Has() {
|
||||||
|
eq := inner_bleve.NumericEqualityQuery(options.PriorityRepoID.Value(), "repo_id")
|
||||||
|
eq.SetBoost(10.0)
|
||||||
|
meh := bleve.NewMatchAllQuery()
|
||||||
|
meh.SetBoost(0)
|
||||||
|
should := bleve.NewDisjunctionQuery(eq, meh)
|
||||||
|
queries = append(queries, should)
|
||||||
|
}
|
||||||
|
|
||||||
if options.IsPull.Has() {
|
if options.IsPull.Has() {
|
||||||
queries = append(queries, inner_bleve.BoolFieldQuery(options.IsPull.Value(), "is_pull"))
|
queries = append(queries, inner_bleve.BoolFieldQuery(options.IsPull.Value(), "is_pull"))
|
||||||
}
|
}
|
||||||
|
|
|
@ -53,6 +53,7 @@ func (i *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
|
||||||
|
|
||||||
cond := builder.NewCond()
|
cond := builder.NewCond()
|
||||||
|
|
||||||
|
var priorityIssueIndex int64
|
||||||
if options.Keyword != "" {
|
if options.Keyword != "" {
|
||||||
repoCond := builder.In("repo_id", options.RepoIDs)
|
repoCond := builder.In("repo_id", options.RepoIDs)
|
||||||
if len(options.RepoIDs) == 1 {
|
if len(options.RepoIDs) == 1 {
|
||||||
|
@ -82,6 +83,7 @@ func (i *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
|
||||||
builder.Eq{"`index`": issueID},
|
builder.Eq{"`index`": issueID},
|
||||||
cond,
|
cond,
|
||||||
)
|
)
|
||||||
|
priorityIssueIndex = issueID
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -89,6 +91,7 @@ func (i *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
opt.PriorityIssueIndex = priorityIssueIndex
|
||||||
|
|
||||||
// If pagesize == 0, return total count only. It's a special case for search count.
|
// If pagesize == 0, return total count only. It's a special case for search count.
|
||||||
if options.Paginator != nil && options.Paginator.PageSize == 0 {
|
if options.Paginator != nil && options.Paginator.PageSize == 0 {
|
||||||
|
|
|
@ -78,6 +78,11 @@ func ToDBOptions(ctx context.Context, options *internal.SearchOptions) (*issue_m
|
||||||
User: nil,
|
User: nil,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if options.PriorityRepoID.Has() {
|
||||||
|
opts.SortType = "priorityrepo"
|
||||||
|
opts.PriorityRepoID = options.PriorityRepoID.Value()
|
||||||
|
}
|
||||||
|
|
||||||
if len(options.MilestoneIDs) == 1 && options.MilestoneIDs[0] == 0 {
|
if len(options.MilestoneIDs) == 1 && options.MilestoneIDs[0] == 0 {
|
||||||
opts.MilestoneIDs = []int64{db.NoConditionID}
|
opts.MilestoneIDs = []int64{db.NoConditionID}
|
||||||
} else {
|
} else {
|
||||||
|
|
|
@ -165,7 +165,7 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
|
||||||
}
|
}
|
||||||
var eitherQ elastic.Query = innerQ
|
var eitherQ elastic.Query = innerQ
|
||||||
if issueID, err := token.ParseIssueReference(); err == nil {
|
if issueID, err := token.ParseIssueReference(); err == nil {
|
||||||
indexQ := elastic.NewTermQuery("index", issueID).Boost(15.0)
|
indexQ := elastic.NewTermQuery("index", issueID).Boost(20)
|
||||||
eitherQ = elastic.NewDisMaxQuery().Query(indexQ).Query(innerQ).TieBreaker(0.5)
|
eitherQ = elastic.NewDisMaxQuery().Query(indexQ).Query(innerQ).TieBreaker(0.5)
|
||||||
}
|
}
|
||||||
switch token.Kind {
|
switch token.Kind {
|
||||||
|
@ -188,6 +188,10 @@ func (b *Indexer) Search(ctx context.Context, options *internal.SearchOptions) (
|
||||||
}
|
}
|
||||||
query.Must(q)
|
query.Must(q)
|
||||||
}
|
}
|
||||||
|
if options.PriorityRepoID.Has() {
|
||||||
|
q := elastic.NewTermQuery("repo_id", options.PriorityRepoID.Value()).Boost(10)
|
||||||
|
query.Should(q)
|
||||||
|
}
|
||||||
|
|
||||||
if options.IsPull.Has() {
|
if options.IsPull.Has() {
|
||||||
query.Must(elastic.NewTermQuery("is_pull", options.IsPull.Value()))
|
query.Must(elastic.NewTermQuery("is_pull", options.IsPull.Value()))
|
||||||
|
|
|
@ -77,6 +77,7 @@ type SearchOptions struct {
|
||||||
|
|
||||||
RepoIDs []int64 // repository IDs which the issues belong to
|
RepoIDs []int64 // repository IDs which the issues belong to
|
||||||
AllPublic bool // if include all public repositories
|
AllPublic bool // if include all public repositories
|
||||||
|
PriorityRepoID optional.Option[int64] // issues from this repository will be prioritized when SortByScore
|
||||||
|
|
||||||
IsPull optional.Option[bool] // if the issues is a pull request
|
IsPull optional.Option[bool] // if the issues is a pull request
|
||||||
IsClosed optional.Option[bool] // if the issues is closed
|
IsClosed optional.Option[bool] // if the issues is closed
|
||||||
|
|
|
@ -742,6 +742,25 @@ var cases = []*testIndexerCase{
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
Name: "PriorityRepoID",
|
||||||
|
SearchOptions: &internal.SearchOptions{
|
||||||
|
IsPull: optional.Some(false),
|
||||||
|
IsClosed: optional.Some(false),
|
||||||
|
PriorityRepoID: optional.Some(int64(3)),
|
||||||
|
Paginator: &db.ListOptionsAll,
|
||||||
|
SortBy: internal.SortByScore,
|
||||||
|
},
|
||||||
|
Expected: func(t *testing.T, data map[int64]*internal.IndexerData, result *internal.SearchResult) {
|
||||||
|
for i, v := range result.Hits {
|
||||||
|
if i < 7 {
|
||||||
|
assert.Equal(t, int64(3), data[v.ID].RepoID)
|
||||||
|
} else {
|
||||||
|
assert.NotEqual(t, int64(3), data[v.ID].RepoID)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
},
|
||||||
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
type testIndexerCase struct {
|
type testIndexerCase struct {
|
||||||
|
|
|
@ -39,16 +39,7 @@ func SearchPointerBlobs(ctx context.Context, repo *git.Repository, pointerChan c
|
||||||
go pipeline.BlobsLessThan1024FromCatFileBatchCheck(catFileCheckReader, shasToBatchWriter, &wg)
|
go pipeline.BlobsLessThan1024FromCatFileBatchCheck(catFileCheckReader, shasToBatchWriter, &wg)
|
||||||
|
|
||||||
// 1. Run batch-check on all objects in the repository
|
// 1. Run batch-check on all objects in the repository
|
||||||
if git.CheckGitVersionAtLeast("2.6.0") != nil {
|
|
||||||
revListReader, revListWriter := io.Pipe()
|
|
||||||
shasToCheckReader, shasToCheckWriter := io.Pipe()
|
|
||||||
wg.Add(2)
|
|
||||||
go pipeline.CatFileBatchCheck(ctx, shasToCheckReader, catFileCheckWriter, &wg, basePath)
|
|
||||||
go pipeline.BlobsFromRevListObjects(revListReader, shasToCheckWriter, &wg)
|
|
||||||
go pipeline.RevListAllObjects(ctx, revListWriter, &wg, basePath, errChan)
|
|
||||||
} else {
|
|
||||||
go pipeline.CatFileBatchCheckAllObjects(ctx, catFileCheckWriter, &wg, basePath, errChan)
|
go pipeline.CatFileBatchCheckAllObjects(ctx, catFileCheckWriter, &wg, basePath, errChan)
|
||||||
}
|
|
||||||
wg.Wait()
|
wg.Wait()
|
||||||
|
|
||||||
close(pointerChan)
|
close(pointerChan)
|
||||||
|
|
|
@ -102,5 +102,6 @@
|
||||||
"admin.dashboard.cleanup_offline_runners": "Cleanup offline runners",
|
"admin.dashboard.cleanup_offline_runners": "Cleanup offline runners",
|
||||||
"settings.visibility.description": "Profile visibility affects others' ability to access your non-private repositories. <a href=\"%s\" target=\"_blank\">Learn more</a>",
|
"settings.visibility.description": "Profile visibility affects others' ability to access your non-private repositories. <a href=\"%s\" target=\"_blank\">Learn more</a>",
|
||||||
"avatar.constraints_hint": "Custom avatar may not exceed %[1]s in size or be larger than %[2]dx%[3]d pixels",
|
"avatar.constraints_hint": "Custom avatar may not exceed %[1]s in size or be larger than %[2]dx%[3]d pixels",
|
||||||
|
"og.repo.summary_card.alt_description": "Summary card of repository %[1]s, described as: %[2]s",
|
||||||
"meta.last_line": "Thank you for translating Forgejo! This line isn't seen by the users but it serves other purposes in the translation management. You can place a fun fact in the translation instead of translating it."
|
"meta.last_line": "Thank you for translating Forgejo! This line isn't seen by the users but it serves other purposes in the translation management. You can place a fun fact in the translation instead of translating it."
|
||||||
}
|
}
|
||||||
|
|
911
package-lock.json
generated
24
package.json
|
@ -27,7 +27,7 @@
|
||||||
"esbuild-loader": "4.3.0",
|
"esbuild-loader": "4.3.0",
|
||||||
"escape-goat": "4.0.0",
|
"escape-goat": "4.0.0",
|
||||||
"fast-glob": "3.3.3",
|
"fast-glob": "3.3.3",
|
||||||
"htmx.org": "1.9.12",
|
"htmx.org": "2.0.6",
|
||||||
"idiomorph": "0.3.0",
|
"idiomorph": "0.3.0",
|
||||||
"jquery": "3.7.1",
|
"jquery": "3.7.1",
|
||||||
"katex": "0.16.22",
|
"katex": "0.16.22",
|
||||||
|
@ -64,21 +64,21 @@
|
||||||
"@eslint-community/eslint-plugin-eslint-comments": "4.5.0",
|
"@eslint-community/eslint-plugin-eslint-comments": "4.5.0",
|
||||||
"@playwright/test": "1.52.0",
|
"@playwright/test": "1.52.0",
|
||||||
"@stoplight/spectral-cli": "6.15.0",
|
"@stoplight/spectral-cli": "6.15.0",
|
||||||
"@stylistic/eslint-plugin": "4.4.1",
|
"@stylistic/eslint-plugin": "5.0.0",
|
||||||
"@stylistic/stylelint-plugin": "3.1.2",
|
"@stylistic/stylelint-plugin": "3.1.3",
|
||||||
"@vitejs/plugin-vue": "5.2.4",
|
"@vitejs/plugin-vue": "6.0.0",
|
||||||
"@vitest/coverage-v8": "3.2.3",
|
"@vitest/coverage-v8": "3.2.3",
|
||||||
"@vitest/eslint-plugin": "1.2.2",
|
"@vitest/eslint-plugin": "1.2.2",
|
||||||
"@vue/test-utils": "2.4.6",
|
"@vue/test-utils": "2.4.6",
|
||||||
"eslint": "9.28.0",
|
"eslint": "9.30.0",
|
||||||
"eslint-import-resolver-typescript": "4.4.3",
|
"eslint-import-resolver-typescript": "4.4.4",
|
||||||
"eslint-plugin-array-func": "5.0.2",
|
"eslint-plugin-array-func": "5.0.2",
|
||||||
"eslint-plugin-import-x": "4.15.1",
|
"eslint-plugin-import-x": "4.16.1",
|
||||||
"eslint-plugin-no-jquery": "3.1.1",
|
"eslint-plugin-no-jquery": "3.1.1",
|
||||||
"eslint-plugin-no-use-extend-native": "0.7.2",
|
"eslint-plugin-no-use-extend-native": "0.7.2",
|
||||||
"eslint-plugin-playwright": "2.2.0",
|
"eslint-plugin-playwright": "2.2.0",
|
||||||
"eslint-plugin-regexp": "2.9.0",
|
"eslint-plugin-regexp": "2.9.0",
|
||||||
"eslint-plugin-sonarjs": "3.0.2",
|
"eslint-plugin-sonarjs": "3.0.4",
|
||||||
"eslint-plugin-toml": "0.12.0",
|
"eslint-plugin-toml": "0.12.0",
|
||||||
"eslint-plugin-unicorn": "59.0.1",
|
"eslint-plugin-unicorn": "59.0.1",
|
||||||
"eslint-plugin-vitest-globals": "1.5.0",
|
"eslint-plugin-vitest-globals": "1.5.0",
|
||||||
|
@ -86,18 +86,18 @@
|
||||||
"eslint-plugin-vue-scoped-css": "2.10.0",
|
"eslint-plugin-vue-scoped-css": "2.10.0",
|
||||||
"eslint-plugin-wc": "3.0.1",
|
"eslint-plugin-wc": "3.0.1",
|
||||||
"globals": "16.1.0",
|
"globals": "16.1.0",
|
||||||
"happy-dom": "18.0.0",
|
"happy-dom": "18.0.1",
|
||||||
"license-checker-rseidelsohn": "4.4.2",
|
"license-checker-rseidelsohn": "4.4.2",
|
||||||
"markdownlint-cli": "0.45.0",
|
"markdownlint-cli": "0.45.0",
|
||||||
"postcss-html": "1.8.0",
|
"postcss-html": "1.8.0",
|
||||||
"sharp": "0.34.2",
|
"sharp": "0.34.2",
|
||||||
"stylelint": "16.20.0",
|
"stylelint": "16.21.0",
|
||||||
"stylelint-declaration-block-no-ignored-properties": "2.8.0",
|
"stylelint-declaration-block-no-ignored-properties": "2.8.0",
|
||||||
"stylelint-declaration-strict-value": "1.10.11",
|
"stylelint-declaration-strict-value": "1.10.11",
|
||||||
"stylelint-value-no-unknown-custom-properties": "6.0.1",
|
"stylelint-value-no-unknown-custom-properties": "6.0.1",
|
||||||
"svgo": "3.2.0",
|
"svgo": "4.0.0",
|
||||||
"typescript": "5.8.3",
|
"typescript": "5.8.3",
|
||||||
"typescript-eslint": "8.34.0",
|
"typescript-eslint": "8.35.0",
|
||||||
"vite-string-plugin": "1.3.4",
|
"vite-string-plugin": "1.3.4",
|
||||||
"vitest": "3.2.3"
|
"vitest": "3.2.3"
|
||||||
},
|
},
|
||||||
|
|
2
public/assets/img/svg/gitea-alt.svg
generated
Before Width: | Height: | Size: 12 KiB After Width: | Height: | Size: 12 KiB |
2
public/assets/img/svg/gitea-chef.svg
generated
|
@ -1 +1 @@
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36" class="svg gitea-chef" width="16" height="16" aria-hidden="true"><g fill="none" fill-rule="evenodd"><path fill="#435363" d="M18 25.8c-4.3 0-7.7-3.6-7.7-8s3.4-7.9 7.7-7.9c3.5 0 6.4 2.4 7.3 5.7h3c-1-5-5.2-8.7-10.3-8.7-5.9 0-10.6 4.9-10.6 10.9 0 6.1 4.7 11 10.6 11 5.1 0 9.3-3.7 10.3-8.7h-3c-.9 3.3-3.8 5.7-7.3 5.7"/><path fill="#435363" d="M12.8 23.2c1.3 1.4 3.1 2.3 5.2 2.3v-3.2c-1.2 0-2.3-.5-3.1-1.3z"/><path fill="#F38B00" d="M10.6 17.8c0 1.1.3 2.2.6 3.1l2.9-1.3c-.3-.5-.4-1.1-.4-1.8 0-2.4 1.9-4.4 4.3-4.4v-3.2c-4.1 0-7.4 3.4-7.4 7.6"/><path fill="#435363" d="m20.6 10.7-1.1 3c.9.4 1.7 1.1 2.2 1.9H25c-.7-2.2-2.3-4-4.4-4.9"/><path fill="#F38B00" d="m19.5 22 1.1 2.9c2.1-.8 3.7-2.6 4.4-4.8h-3.3c-.5.8-1.3 1.5-2.2 1.9"/><path fill="#435363" d="M4.4 22.1c-.1-.2-.1-.3-.1-.5-.1-.2-.1-.3-.2-.5V21c0-.1 0-.3-.1-.4v-.5c-.1-.1-.1-.2-.1-.3-.1-.6-.1-1.3-.1-2H.9c0 .8 0 1.5.1 2.2 0 .2.1.4.1.6v.1c0 .2.1.4.1.5s0 .2.1.3v.3c.1.1.1.2.1.4 0 0 .1.1.1.2 0 .2 0 .3.1.4v.2c.2.7.5 1.3.7 2L5 23.8c-.2-.6-.4-1.1-.6-1.7"/><path fill="#F38B00" d="M18 32.6c-3.9 0-7.5-1.7-10.1-4.4l-2 2.2c3.1 3.2 7.3 5.2 12.1 5.2 8.7 0 15.8-6.8 16.9-15.5H32c-1.1 7-7 12.5-14 12.5M18 3.1c3.1 0 6.1 1.1 8.4 2.9l1.8-2.4C25.3 1.4 21.8.1 18 .1 10.7.1 4.5 4.8 2.1 11.4l2.7 1.1C6.8 7 12 3.1 18 3.1"/><path fill="#435363" d="M32 15.6h2.9c-.3-2.6-1.2-5-2.5-7.2L30 10c1 1.7 1.7 3.6 2 5.6"/><path fill="#F38B00" d="M28.7 15.6h2.9c-.8-5.1-4.1-9.3-8.6-11.1l-1.1 2.8c3.5 1.3 6 4.5 6.8 8.3"/><path fill="#435363" d="M18 6.5v-3c-5.9 0-10.9 3.8-12.9 9.1l2.7 1.1C9.4 9.5 13.3 6.5 18 6.5"/><path fill="#F38B00" d="M7 17.8H4.1c0 6.1 3.6 11.2 8.7 13.4l1.1-2.8C9.9 26.7 7 22.6 7 17.8"/><path fill="#435363" d="M18 29.2v3c6.9 0 12.6-5.3 13.6-12.1h-2.9c-1 5.2-5.4 9.1-10.7 9.1"/></g></svg>
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 36 36" class="svg gitea-chef" width="16" height="16" aria-hidden="true"><g fill="none" fill-rule="evenodd"><path fill="#435363" d="M18 25.8c-4.3 0-7.7-3.6-7.7-8s3.4-7.9 7.7-7.9c3.5 0 6.4 2.4 7.3 5.7h3c-1-5-5.2-8.7-10.3-8.7-5.9 0-10.6 4.9-10.6 10.9 0 6.1 4.7 11 10.6 11 5.1 0 9.3-3.7 10.3-8.7h-3c-.9 3.3-3.8 5.7-7.3 5.7"/><path fill="#435363" d="M12.8 23.2c1.3 1.4 3.1 2.3 5.2 2.3v-3.2c-1.2 0-2.3-.5-3.1-1.3z"/><path fill="#f38b00" d="M10.6 17.8c0 1.1.3 2.2.6 3.1l2.9-1.3c-.3-.5-.4-1.1-.4-1.8 0-2.4 1.9-4.4 4.3-4.4v-3.2c-4.1 0-7.4 3.4-7.4 7.6"/><path fill="#435363" d="m20.6 10.7-1.1 3c.9.4 1.7 1.1 2.2 1.9H25c-.7-2.2-2.3-4-4.4-4.9"/><path fill="#f38b00" d="m19.5 22 1.1 2.9c2.1-.8 3.7-2.6 4.4-4.8h-3.3c-.5.8-1.3 1.5-2.2 1.9"/><path fill="#435363" d="M4.4 22.1c-.1-.2-.1-.3-.1-.5-.1-.2-.1-.3-.2-.5V21c0-.1 0-.3-.1-.4v-.5c-.1-.1-.1-.2-.1-.3-.1-.6-.1-1.3-.1-2H.9c0 .8 0 1.5.1 2.2 0 .2.1.4.1.6v.1c0 .2.1.4.1.5s0 .2.1.3v.3c.1.1.1.2.1.4 0 0 .1.1.1.2 0 .2 0 .3.1.4v.2c.2.7.5 1.3.7 2L5 23.8c-.2-.6-.4-1.1-.6-1.7"/><path fill="#f38b00" d="M18 32.6c-3.9 0-7.5-1.7-10.1-4.4l-2 2.2c3.1 3.2 7.3 5.2 12.1 5.2 8.7 0 15.8-6.8 16.9-15.5H32c-1.1 7-7 12.5-14 12.5M18 3.1c3.1 0 6.1 1.1 8.4 2.9l1.8-2.4C25.3 1.4 21.8.1 18 .1 10.7.1 4.5 4.8 2.1 11.4l2.7 1.1C6.8 7 12 3.1 18 3.1"/><path fill="#435363" d="M32 15.6h2.9c-.3-2.6-1.2-5-2.5-7.2L30 10c1 1.7 1.7 3.6 2 5.6"/><path fill="#f38b00" d="M28.7 15.6h2.9c-.8-5.1-4.1-9.3-8.6-11.1l-1.1 2.8c3.5 1.3 6 4.5 6.8 8.3"/><path fill="#435363" d="M18 6.5v-3c-5.9 0-10.9 3.8-12.9 9.1l2.7 1.1C9.4 9.5 13.3 6.5 18 6.5"/><path fill="#f38b00" d="M7 17.8H4.1c0 6.1 3.6 11.2 8.7 13.4l1.1-2.8C9.9 26.7 7 22.6 7 17.8"/><path fill="#435363" d="M18 29.2v3c6.9 0 12.6-5.3 13.6-12.1h-2.9c-1 5.2-5.4 9.1-10.7 9.1"/></g></svg>
|
Before Width: | Height: | Size: 1.7 KiB After Width: | Height: | Size: 1.7 KiB |
2
public/assets/img/svg/gitea-debian.svg
generated
|
@ -1 +1 @@
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 210 260" class="svg gitea-debian" width="16" height="16" aria-hidden="true"><g fill="#D70751"><path d="M124.525 137.053c-4.125.058.78 2.125 6.165 2.954a55 55 0 0 0 4.04-3.479c-3.354.821-6.765.838-10.205.525m22.14-5.52c2.457-3.389 4.246-7.102 4.878-10.939-.551 2.736-2.035 5.099-3.435 7.592-7.711 4.854-.726-2.883-.004-5.824-8.29 10.436-1.138 6.257-1.439 9.171m8.174-21.265c.497-7.428-1.462-5.08-2.121-2.245.766.4 1.377 5.237 2.121 2.245M108.883 8.736c2.201.395 4.757.698 4.398 1.224 2.407-.528 2.954-1.015-4.398-1.224M113.281 9.96l-1.556.32 1.448-.127z"/><path d="M181.93 113.085c.247 6.671-1.95 9.907-3.932 15.637l-3.564 1.781c-2.919 5.666.282 3.598-1.807 8.105-4.556 4.049-13.823 12.67-16.789 13.457-2.163-.047 1.469-2.554 1.943-3.537-6.097 4.188-4.894 6.285-14.217 8.83l-.273-.607c-23.001 10.818-54.947-10.622-54.526-39.876-.246 1.857-.698 1.393-1.208 2.144-1.186-15.052 6.952-30.17 20.675-36.343 13.427-6.646 29.163-3.918 38.78 5.044-5.282-6.92-15.795-14.254-28.255-13.568-12.208.193-23.625 7.95-27.436 16.369-6.253 3.938-6.979 15.177-9.704 17.233-3.665 26.943 6.896 38.583 24.762 52.275 2.812 1.896.792 2.184 1.173 3.627-5.936-2.779-11.372-6.976-15.841-12.114 2.372 3.473 4.931 6.847 8.239 9.499-5.596-1.897-13.074-13.563-15.256-14.038 9.647 17.274 39.142 30.295 54.587 23.836-7.146.263-16.226.146-24.256-2.822-3.371-1.734-7.958-5.331-7.14-6.003 21.079 7.875 42.854 5.965 61.09-8.655 4.641-3.614 9.709-9.761 11.173-9.846-2.206 3.317.377 1.596-1.318 4.523 4.625-7.456-2.008-3.035 4.779-12.877l2.507 3.453c-.931-6.188 7.687-13.704 6.813-23.492 1.975-2.994 2.206 3.22.107 10.107 2.912-7.64.767-8.867 1.516-15.171.81 2.118 1.867 4.37 2.412 6.606-1.895-7.382 1.948-12.433 2.898-16.724-.937-.415-2.928 3.264-3.383-5.457.065-3.788 1.054-1.985 1.435-2.917-.744-.427-2.694-3.33-3.88-8.9.86-1.308 2.3 3.393 3.47 3.586-.753-4.429-2.049-7.805-2.103-11.202-3.421-7.149-1.211.953-3.985-3.069-3.641-11.357 3.021-2.637 3.47-7.796 5.52 7.995 8.667 20.387 10.11 25.519-1.103-6.258-2.883-12.32-5.058-18.185 1.677.705-2.699-12.875 2.18-3.882-5.21-19.172-22.302-37.087-38.025-45.493 1.924 1.76 4.354 3.971 3.481 4.317-7.819-4.656-6.444-5.018-7.565-6.985-6.369-2.591-6.788.208-11.007.004-12.005-6.368-14.318-5.69-25.368-9.681l.502 2.349c-7.953-2.649-9.265 1.005-17.862.009-.523-.409 2.753-1.479 5.452-1.871-7.69 1.015-7.329-1.515-14.854.279 1.855-1.301 3.815-2.162 5.793-3.269-6.271.381-14.971 3.649-12.286.677-10.235 4.569-28.403 10.976-38.597 20.535l-.321-2.142c-4.672 5.608-20.371 16.748-21.622 24.011l-1.249.291c-2.431 4.116-4.004 8.781-5.932 13.016-3.18 5.417-4.661 2.085-4.208 2.934-6.253 12.679-9.359 23.332-12.043 32.069 1.912 2.858.046 17.206.769 28.688-3.141 56.709 39.8 111.77 86.737 124.48 6.88 2.459 17.11 2.364 25.813 2.618-10.268-2.937-11.595-1.556-21.595-5.044-7.215-3.398-8.797-7.277-13.907-11.711l2.022 3.573c-10.021-3.547-5.829-4.39-13.982-6.972l2.16-2.82c-3.249-.246-8.604-5.475-10.069-8.371l-3.553.14c-4.27-5.269-6.545-9.063-6.379-12.005l-1.148 2.047c-1.301-2.235-15.709-19.759-8.234-15.679-1.389-1.271-3.235-2.067-5.237-5.703l1.522-1.739c-3.597-4.627-6.621-10.562-6.391-12.536 1.919 2.592 3.25 3.075 4.568 3.52-9.083-22.539-9.593-1.242-16.474-22.942l1.456-.116c-1.116-1.682-1.793-3.506-2.69-5.298l.633-6.313c-6.541-7.562-1.829-32.151-.887-45.637.655-5.485 5.459-11.322 9.114-20.477l-2.227-.384c4.256-7.423 24.301-29.814 33.583-28.662 4.499-5.649-.892-.02-1.772-1.443 9.878-10.223 12.984-7.222 19.65-9.061 7.19-4.268-6.17 1.664-2.761-1.628 12.427-3.174 8.808-7.216 25.021-8.828 1.71.973-3.969 1.503-5.395 2.766 10.354-5.066 32.769-3.914 47.326 2.811 16.895 7.896 35.873 31.232 36.622 53.189l.852.229c-.431 8.729 1.336 18.822-1.727 28.094l2.1-4.385"/><path d="m79.5 142.715-.578 2.893c2.71 3.683 4.861 7.673 8.323 10.552-2.49-4.863-4.341-6.872-7.745-13.445m6.409-.251c-1.435-1.587-2.284-3.497-3.235-5.4.909 3.345 2.771 6.219 4.504 9.143zm113.411-24.65-.605 1.52c-1.111 7.892-3.511 15.701-7.189 22.941a72.1 72.1 0 0 0 7.79-24.461M109.698 6.757c2.789-1.022 6.855-.56 9.814-1.233-3.855.324-7.693.517-11.484 1.005zM11.781 58.824c.642 5.951-4.477 8.26 1.134 4.337 3.007-6.773-1.175-1.87-1.134-4.337M5.188 86.362c1.292-3.967 1.526-6.349 2.02-8.645-3.571 4.566-1.643 5.539-2.02 8.645"/></g></svg>
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 210 260" class="svg gitea-debian" width="16" height="16" aria-hidden="true"><g fill="#d70751"><path d="M124.525 137.053c-4.125.058.78 2.125 6.165 2.954a55 55 0 0 0 4.04-3.479c-3.354.821-6.765.838-10.205.525m22.14-5.52c2.457-3.389 4.246-7.102 4.878-10.939-.551 2.736-2.035 5.099-3.435 7.592-7.711 4.854-.726-2.883-.004-5.824-8.29 10.436-1.138 6.257-1.439 9.171m8.174-21.265c.497-7.428-1.462-5.08-2.121-2.245.766.4 1.377 5.237 2.121 2.245M108.883 8.736c2.201.395 4.757.698 4.398 1.224 2.407-.528 2.954-1.015-4.398-1.224M113.281 9.96l-1.556.32 1.448-.127z"/><path d="M181.93 113.085c.247 6.671-1.95 9.907-3.932 15.637l-3.564 1.781c-2.919 5.666.282 3.598-1.807 8.105-4.556 4.049-13.823 12.67-16.789 13.457-2.163-.047 1.469-2.554 1.943-3.537-6.097 4.188-4.894 6.285-14.217 8.83l-.273-.607c-23.001 10.818-54.947-10.622-54.526-39.876-.246 1.857-.698 1.393-1.208 2.144-1.186-15.052 6.952-30.17 20.675-36.343 13.427-6.646 29.163-3.918 38.78 5.044-5.282-6.92-15.795-14.254-28.255-13.568-12.208.193-23.625 7.95-27.436 16.369-6.253 3.938-6.979 15.177-9.704 17.233-3.665 26.943 6.896 38.583 24.762 52.275 2.812 1.896.792 2.184 1.173 3.627-5.936-2.779-11.372-6.976-15.841-12.114 2.372 3.473 4.931 6.847 8.239 9.499-5.596-1.897-13.074-13.563-15.256-14.038 9.647 17.274 39.142 30.295 54.587 23.836-7.146.263-16.226.146-24.256-2.822-3.371-1.734-7.958-5.331-7.14-6.003 21.079 7.875 42.854 5.965 61.09-8.655 4.641-3.614 9.709-9.761 11.173-9.846-2.206 3.317.377 1.596-1.318 4.523 4.625-7.456-2.008-3.035 4.779-12.877l2.507 3.453c-.931-6.188 7.687-13.704 6.813-23.492 1.975-2.994 2.206 3.22.107 10.107 2.912-7.64.767-8.867 1.516-15.171.81 2.118 1.867 4.37 2.412 6.606-1.895-7.382 1.948-12.433 2.898-16.724-.937-.415-2.928 3.264-3.383-5.457.065-3.788 1.054-1.985 1.435-2.917-.744-.427-2.694-3.33-3.88-8.9.86-1.308 2.3 3.393 3.47 3.586-.753-4.429-2.049-7.805-2.103-11.202-3.421-7.149-1.211.953-3.985-3.069-3.641-11.357 3.021-2.637 3.47-7.796 5.52 7.995 8.667 20.387 10.11 25.519-1.103-6.258-2.883-12.32-5.058-18.185 1.677.705-2.699-12.875 2.18-3.882-5.21-19.172-22.302-37.087-38.025-45.493 1.924 1.76 4.354 3.971 3.481 4.317-7.819-4.656-6.444-5.018-7.565-6.985-6.369-2.591-6.788.208-11.007.004-12.005-6.368-14.318-5.69-25.368-9.681l.502 2.349c-7.953-2.649-9.265 1.005-17.862.009-.523-.409 2.753-1.479 5.452-1.871-7.69 1.015-7.329-1.515-14.854.279 1.855-1.301 3.815-2.162 5.793-3.269-6.271.381-14.971 3.649-12.286.677-10.235 4.569-28.403 10.976-38.597 20.535l-.321-2.142c-4.672 5.608-20.371 16.748-21.622 24.011l-1.249.291c-2.431 4.116-4.004 8.781-5.932 13.016-3.18 5.417-4.661 2.085-4.208 2.934-6.253 12.679-9.359 23.332-12.043 32.069 1.912 2.858.046 17.206.769 28.688-3.141 56.709 39.8 111.77 86.737 124.48 6.88 2.459 17.11 2.364 25.813 2.618-10.268-2.937-11.595-1.556-21.595-5.044-7.215-3.398-8.797-7.277-13.907-11.711l2.022 3.573c-10.021-3.547-5.829-4.39-13.982-6.972l2.16-2.82c-3.249-.246-8.604-5.475-10.069-8.371l-3.553.14c-4.27-5.269-6.545-9.063-6.379-12.005l-1.148 2.047c-1.301-2.235-15.709-19.759-8.234-15.679-1.389-1.271-3.235-2.067-5.237-5.703l1.522-1.739c-3.597-4.627-6.621-10.562-6.391-12.536 1.919 2.592 3.25 3.075 4.568 3.52-9.083-22.539-9.593-1.242-16.474-22.942l1.456-.116c-1.116-1.682-1.793-3.506-2.69-5.298l.633-6.313c-6.541-7.562-1.829-32.151-.887-45.637.655-5.485 5.459-11.322 9.114-20.477l-2.227-.384c4.256-7.423 24.301-29.814 33.583-28.662 4.499-5.649-.892-.02-1.772-1.443 9.878-10.223 12.984-7.222 19.65-9.061 7.19-4.268-6.17 1.664-2.761-1.628 12.427-3.174 8.808-7.216 25.021-8.828 1.71.973-3.969 1.503-5.395 2.766 10.354-5.066 32.769-3.914 47.326 2.811 16.895 7.896 35.873 31.232 36.622 53.189l.852.229c-.431 8.729 1.336 18.822-1.727 28.094l2.1-4.385"/><path d="m79.5 142.715-.578 2.893c2.71 3.683 4.861 7.673 8.323 10.552-2.49-4.863-4.341-6.872-7.745-13.445m6.409-.251c-1.435-1.587-2.284-3.497-3.235-5.4.909 3.345 2.771 6.219 4.504 9.143zm113.411-24.65-.605 1.52c-1.111 7.892-3.511 15.701-7.189 22.941a72.1 72.1 0 0 0 7.79-24.461M109.698 6.757c2.789-1.022 6.855-.56 9.814-1.233-3.855.324-7.693.517-11.484 1.005zM11.781 58.824c.642 5.951-4.477 8.26 1.134 4.337 3.007-6.773-1.175-1.87-1.134-4.337M5.188 86.362c1.292-3.967 1.526-6.349 2.02-8.645-3.571 4.566-1.643 5.539-2.02 8.645"/></g></svg>
|
Before Width: | Height: | Size: 4.1 KiB After Width: | Height: | Size: 4.1 KiB |
2
public/assets/img/svg/gitea-gitbucket.svg
generated
|
@ -1 +1 @@
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" version="1.0" viewBox="0 0 316 329" class="svg gitea-gitbucket" width="16" height="16" aria-hidden="true"><path d="M123 21.1c-44.8 2.8-84 12.8-97.1 24.6-5 4.5-5 7.1 0 11.6C41.7 71.5 96.6 82.9 150 83h10.6l5.4-5.6c11.8-12.1 21.3-12.4 32.6-1.2l5.2 5 13.3-1.7c33.8-4.2 61.5-12.7 71.8-22 5.3-4.8 5.3-7.2 0-12-10.1-9.1-39.1-18.1-70.4-21.9-28.3-3.4-65.6-4.4-95.5-2.5M23.2 80.6c.4 1.6 7 42.9 14.8 91.9 7.9 49 14.7 89.5 15.2 90.2 1.7 2.1 25.8 11.4 41.6 15.9 13 3.7 35.1 8.4 40 8.4.6 0 1.2-.6 1.2-1.3 0-.6-17.4-18.5-38.6-39.7C57.9 206.6 55 203.2 55 196s3-10.7 38.3-45.9c30.1-30 34.8-34.3 36.6-33.5 1.1.5 8.7 7.4 17 15.2l15.1 14.4v5.9c0 7.3 2.4 12.4 7.7 16.7l3.8 3v61.8l-3.8 3.4c-10.2 8.9-10.2 22.9-.1 30.7 3.1 2.3 4.9 2.8 10.8 3.1 8.2.4 11.5-1.1 16.2-7.3 2.2-3 2.9-5.1 3.2-10 .4-6.5-.2-8.3-5.3-15.4l-2.5-3.4v-26.6c0-26.7.3-31.1 2.3-31.1.5 0 5.4 4.4 10.9 9.7 9.6 9.5 9.9 10 10.8 15.7 1.7 10.3 8.9 16.6 19 16.6 7.6 0 13.5-3.9 17.4-11.7 3.2-6.4 1.6-14.3-4.3-20.6-4.1-4.4-7.3-5.7-14.9-5.7h-6.8l-12.7-12.1c-10.7-10.1-12.7-12.6-13.2-15.7-1.2-7.2-1.6-8.2-4.7-11.7-3.9-4.5-7.7-6.5-12.2-6.5-1.9 0-4.5-.4-5.8-.9s-9.9-8.2-19.3-17l-17-16-7.1-.1c-10.6 0-36-2.7-52.4-5.5-22.8-4-38.5-8.6-57.9-17.2-1.1-.5-1.3 0-.9 2.3M278.5 83.6c-8.6 3.6-28 8.8-42.5 11.4-6.9 1.2-12.9 2.6-13.5 3.1-.6.6 9.3 11.2 27.5 29.4 15.6 15.6 28.5 28.3 28.7 28.1s1.9-15.8 3.8-34.7 3.7-35.6 4-37.2c.6-3.4-.2-3.4-8-.1M255.2 259.3c-7.8 7.8-14.2 14.6-14.2 15s.7.7 1.6.7c2.2 0 23-8.9 24.2-10.3.9-1.1 3.5-18.7 2.9-19.3-.2-.2-6.7 6.1-14.5 13.9M56 283.5c0 3.4 4 9.5 8.4 12.9 6.1 4.6 19.7 10.4 31.7 13.5 16.9 4.3 32.1 6.2 53.4 6.8l19 .5-7-7.1c-6.8-6.9-7.1-7.1-12-7.1-18.9 0-55.1-7.9-80.6-17.6C62.5 283 57 281 56.6 281c-.3 0-.6 1.1-.6 2.5M262 283.4c-5.3 2.8-25 9.7-36 12.6l-11.4 2.9-7.8 7.8c-4.2 4.2-7.6 7.9-7.4 8.1.9.8 24.4-4.1 33.4-6.9 16.4-5.3 26.7-11.4 30.8-18.5 2.4-4 3.1-7.4 1.7-7.4-.5.1-1.9.7-3.3 1.4"/></svg>
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 316 329" class="svg gitea-gitbucket" width="16" height="16" aria-hidden="true"><path d="M123 21.1c-44.8 2.8-84 12.8-97.1 24.6-5 4.5-5 7.1 0 11.6C41.7 71.5 96.6 82.9 150 83h10.6l5.4-5.6c11.8-12.1 21.3-12.4 32.6-1.2l5.2 5 13.3-1.7c33.8-4.2 61.5-12.7 71.8-22 5.3-4.8 5.3-7.2 0-12-10.1-9.1-39.1-18.1-70.4-21.9-28.3-3.4-65.6-4.4-95.5-2.5M23.2 80.6c.4 1.6 7 42.9 14.8 91.9 7.9 49 14.7 89.5 15.2 90.2 1.7 2.1 25.8 11.4 41.6 15.9 13 3.7 35.1 8.4 40 8.4.6 0 1.2-.6 1.2-1.3 0-.6-17.4-18.5-38.6-39.7C57.9 206.6 55 203.2 55 196s3-10.7 38.3-45.9c30.1-30 34.8-34.3 36.6-33.5 1.1.5 8.7 7.4 17 15.2l15.1 14.4v5.9c0 7.3 2.4 12.4 7.7 16.7l3.8 3v61.8l-3.8 3.4c-10.2 8.9-10.2 22.9-.1 30.7 3.1 2.3 4.9 2.8 10.8 3.1 8.2.4 11.5-1.1 16.2-7.3 2.2-3 2.9-5.1 3.2-10 .4-6.5-.2-8.3-5.3-15.4l-2.5-3.4v-26.6c0-26.7.3-31.1 2.3-31.1.5 0 5.4 4.4 10.9 9.7 9.6 9.5 9.9 10 10.8 15.7 1.7 10.3 8.9 16.6 19 16.6 7.6 0 13.5-3.9 17.4-11.7 3.2-6.4 1.6-14.3-4.3-20.6-4.1-4.4-7.3-5.7-14.9-5.7h-6.8l-12.7-12.1c-10.7-10.1-12.7-12.6-13.2-15.7-1.2-7.2-1.6-8.2-4.7-11.7-3.9-4.5-7.7-6.5-12.2-6.5-1.9 0-4.5-.4-5.8-.9s-9.9-8.2-19.3-17l-17-16-7.1-.1c-10.6 0-36-2.7-52.4-5.5-22.8-4-38.5-8.6-57.9-17.2-1.1-.5-1.3 0-.9 2.3M278.5 83.6c-8.6 3.6-28 8.8-42.5 11.4-6.9 1.2-12.9 2.6-13.5 3.1-.6.6 9.3 11.2 27.5 29.4 15.6 15.6 28.5 28.3 28.7 28.1s1.9-15.8 3.8-34.7 3.7-35.6 4-37.2c.6-3.4-.2-3.4-8-.1M255.2 259.3c-7.8 7.8-14.2 14.6-14.2 15s.7.7 1.6.7c2.2 0 23-8.9 24.2-10.3.9-1.1 3.5-18.7 2.9-19.3-.2-.2-6.7 6.1-14.5 13.9M56 283.5c0 3.4 4 9.5 8.4 12.9 6.1 4.6 19.7 10.4 31.7 13.5 16.9 4.3 32.1 6.2 53.4 6.8l19 .5-7-7.1c-6.8-6.9-7.1-7.1-12-7.1-18.9 0-55.1-7.9-80.6-17.6C62.5 283 57 281 56.6 281c-.3 0-.6 1.1-.6 2.5M262 283.4c-5.3 2.8-25 9.7-36 12.6l-11.4 2.9-7.8 7.8c-4.2 4.2-7.6 7.9-7.4 8.1.9.8 24.4-4.1 33.4-6.9 16.4-5.3 26.7-11.4 30.8-18.5 2.4-4 3.1-7.4 1.7-7.4-.5.1-1.9.7-3.3 1.4"/></svg>
|
Before Width: | Height: | Size: 1.8 KiB After Width: | Height: | Size: 1.8 KiB |
2
public/assets/img/svg/gitea-gitlab.svg
generated
|
@ -1 +1 @@
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 32 32" class="svg gitea-gitlab" width="16" height="16" aria-hidden="true"><path fill="#E24329" d="m31.462 12.779-.045-.115-4.35-11.35a1.14 1.14 0 0 0-.447-.541 1.16 1.16 0 0 0-1.343.071c-.187.15-.322.356-.386.587l-2.94 9.001h-11.9l-2.941-9a1.14 1.14 0 0 0-1.045-.84 1.15 1.15 0 0 0-1.13.72L.579 12.68l-.045.113a8.09 8.09 0 0 0 2.68 9.34l.016.012.038.03 6.635 4.967 3.28 2.484 1.994 1.51a1.35 1.35 0 0 0 1.627 0l1.994-1.51 3.282-2.484 6.673-4.997.018-.013a8.09 8.09 0 0 0 2.69-9.352Z"/><path fill="#FC6D26" d="m31.462 12.779-.045-.115a14.75 14.75 0 0 0-5.856 2.634l-9.553 7.24L22.1 27.14l6.673-4.997.019-.013a8.09 8.09 0 0 0 2.67-9.352Z"/><path fill="#FCA326" d="m9.908 27.14 3.275 2.485 1.994 1.51a1.35 1.35 0 0 0 1.627 0l1.994-1.51 3.282-2.484s-2.835-2.14-6.092-4.603z"/><path fill="#FC6D26" d="M6.435 15.305A14.7 14.7 0 0 0 .58 12.672l-.045.113a8.09 8.09 0 0 0 2.68 9.347l.016.012.038.03 6.635 4.967 6.105-4.603-9.573-7.233Z"/></svg>
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 32 32" class="svg gitea-gitlab" width="16" height="16" aria-hidden="true"><path fill="#e24329" d="m31.462 12.779-.045-.115-4.35-11.35a1.14 1.14 0 0 0-.447-.541 1.16 1.16 0 0 0-1.343.071c-.187.15-.322.356-.386.587l-2.94 9.001h-11.9l-2.941-9a1.14 1.14 0 0 0-1.045-.84 1.15 1.15 0 0 0-1.13.72L.579 12.68l-.045.113a8.09 8.09 0 0 0 2.68 9.34l.016.012.038.03 6.635 4.967 3.28 2.484 1.994 1.51a1.35 1.35 0 0 0 1.627 0l1.994-1.51 3.282-2.484 6.673-4.997.018-.013a8.09 8.09 0 0 0 2.69-9.352Z"/><path fill="#fc6d26" d="m31.462 12.779-.045-.115a14.75 14.75 0 0 0-5.856 2.634l-9.553 7.24L22.1 27.14l6.673-4.997.019-.013a8.09 8.09 0 0 0 2.67-9.352Z"/><path fill="#fca326" d="m9.908 27.14 3.275 2.485 1.994 1.51a1.35 1.35 0 0 0 1.627 0l1.994-1.51 3.282-2.484s-2.835-2.14-6.092-4.603z"/><path fill="#fc6d26" d="M6.435 15.305A14.7 14.7 0 0 0 .58 12.672l-.045.113a8.09 8.09 0 0 0 2.68 9.347l.016.012.038.03 6.635 4.967 6.105-4.603-9.573-7.233Z"/></svg>
|
Before Width: | Height: | Size: 988 B After Width: | Height: | Size: 988 B |
2
public/assets/img/svg/gitea-google.svg
generated
|
@ -1 +1 @@
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" class="gitea-google__svg gitea-google__gitea-google svg gitea-google" viewBox="0 0 24 24" width="16" height="16"><path fill="#4285F4" d="M22.56 12.25c0-.78-.07-1.53-.2-2.25H12v4.26h5.92c-.26 1.37-1.04 2.53-2.21 3.31v2.77h3.57c2.08-1.92 3.28-4.74 3.28-8.09"/><path fill="#34A853" d="M12 23c2.97 0 5.46-.98 7.28-2.66l-3.57-2.77c-.98.66-2.23 1.06-3.71 1.06-2.86 0-5.29-1.93-6.16-4.53H2.18v2.84C3.99 20.53 7.7 23 12 23"/><path fill="#FBBC05" d="M5.84 14.09c-.22-.66-.35-1.36-.35-2.09s.13-1.43.35-2.09V7.07H2.18C1.43 8.55 1 10.22 1 12s.43 3.45 1.18 4.93l2.85-2.22z"/><path fill="#EA4335" d="M12 5.38c1.62 0 3.06.56 4.21 1.64l3.15-3.15C17.45 2.09 14.97 1 12 1 7.7 1 3.99 3.47 2.18 7.07l3.66 2.84c.87-2.6 3.3-4.53 6.16-4.53"/><path fill="none" d="M1 1h22v22H1z"/></svg>
|
<svg xmlns="http://www.w3.org/2000/svg" aria-hidden="true" class="gitea-google__svg gitea-google__gitea-google svg gitea-google" viewBox="0 0 24 24" width="16" height="16"><path fill="#4285f4" d="M22.56 12.25c0-.78-.07-1.53-.2-2.25H12v4.26h5.92c-.26 1.37-1.04 2.53-2.21 3.31v2.77h3.57c2.08-1.92 3.28-4.74 3.28-8.09"/><path fill="#34a853" d="M12 23c2.97 0 5.46-.98 7.28-2.66l-3.57-2.77c-.98.66-2.23 1.06-3.71 1.06-2.86 0-5.29-1.93-6.16-4.53H2.18v2.84C3.99 20.53 7.7 23 12 23"/><path fill="#fbbc05" d="M5.84 14.09c-.22-.66-.35-1.36-.35-2.09s.13-1.43.35-2.09V7.07H2.18C1.43 8.55 1 10.22 1 12s.43 3.45 1.18 4.93l2.85-2.22z"/><path fill="#ea4335" d="M12 5.38c1.62 0 3.06.56 4.21 1.64l3.15-3.15C17.45 2.09 14.97 1 12 1 7.7 1 3.99 3.47 2.18 7.07l3.66 2.84c.87-2.6 3.3-4.53 6.16-4.53"/><path fill="none" d="M1 1h22v22H1z"/></svg>
|
Before Width: | Height: | Size: 821 B After Width: | Height: | Size: 821 B |
2
public/assets/img/svg/gitea-maven.svg
generated
Before Width: | Height: | Size: 9.8 KiB After Width: | Height: | Size: 9.8 KiB |
2
public/assets/img/svg/gitea-microsoftonline.svg
generated
|
@ -1 +1 @@
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 48 48" class="svg gitea-microsoftonline" width="16" height="16" aria-hidden="true"><path fill="url(#gitea-microsoftonline__a)" d="m20.084 3.026-.224.136a8 8 0 0 0-1.009.722l.648-.456H25L26 11l-5 5-5 3.475v4.008a8 8 0 0 0 3.857 6.844l5.264 3.186L14 40h-2.145l-3.998-2.42A8 8 0 0 1 4 30.737V17.26a8 8 0 0 1 3.86-6.846l12-7.258q.111-.068.224-.131Z"/><path fill="url(#gitea-microsoftonline__b)" d="m20.084 3.026-.224.136a8 8 0 0 0-1.009.722l.648-.456H25L26 11l-5 5-5 3.475v4.008a8 8 0 0 0 3.857 6.844l5.264 3.186L14 40h-2.145l-3.998-2.42A8 8 0 0 1 4 30.737V17.26a8 8 0 0 1 3.86-6.846l12-7.258q.111-.068.224-.131Z"/><path fill="url(#gitea-microsoftonline__c)" d="M32 19v4.48a8 8 0 0 1-3.857 6.844l-12 7.264a8 8 0 0 1-8.008.16l11.722 7.096a8 8 0 0 0 8.286 0l12-7.264A8 8 0 0 0 44 30.736V27.5L43 26z"/><path fill="url(#gitea-microsoftonline__d)" d="M32 19v4.48a8 8 0 0 1-3.857 6.844l-12 7.264a8 8 0 0 1-8.008.16l11.722 7.096a8 8 0 0 0 8.286 0l12-7.264A8 8 0 0 0 44 30.736V27.5L43 26z"/><path fill="url(#gitea-microsoftonline__e)" d="m40.14 10.415-12-7.258a8 8 0 0 0-8.042-.139l-.238.144A8 8 0 0 0 16 10.008v9.483l3.86-2.334a8 8 0 0 1 8.28 0l12 7.258A8 8 0 0 1 43.997 31q.004-.132.004-.263V17.26a8 8 0 0 0-3.86-6.845Z"/><path fill="url(#gitea-microsoftonline__f)" d="m40.14 10.415-12-7.258a8 8 0 0 0-8.042-.139l-.238.144A8 8 0 0 0 16 10.008v9.483l3.86-2.334a8 8 0 0 1 8.28 0l12 7.258A8 8 0 0 1 43.997 31q.004-.132.004-.263V17.26a8 8 0 0 0-3.86-6.845Z"/><path fill="url(#gitea-microsoftonline__g)" d="M4.004 30.998"/><path fill="url(#gitea-microsoftonline__h)" d="M4.004 30.998"/><defs><radialGradient id="gitea-microsoftonline__a" cx="0" cy="0" r="1" gradientTransform="rotate(110.528 5.021 11.358)scale(33.3657 58.1966)" gradientUnits="userSpaceOnUse"><stop offset=".064" stop-color="#AE7FE2"/><stop offset="1" stop-color="#0078D4"/></radialGradient><radialGradient id="gitea-microsoftonline__c" cx="0" cy="0" r="1" gradientTransform="matrix(30.7198 -4.51832 2.98465 20.29248 10.43 36.351)" gradientUnits="userSpaceOnUse"><stop offset=".134" stop-color="#D59DFF"/><stop offset="1" stop-color="#5E438F"/></radialGradient><radialGradient id="gitea-microsoftonline__e" cx="0" cy="0" r="1" gradientTransform="matrix(-24.1583 -6.12555 10.3118 -40.66824 41.055 26.504)" gradientUnits="userSpaceOnUse"><stop offset=".058" stop-color="#50E6FF"/><stop offset="1" stop-color="#436DCD"/></radialGradient><radialGradient id="gitea-microsoftonline__g" cx="0" cy="0" r="1" gradientTransform="matrix(-24.1583 -6.12555 10.3118 -40.66824 41.055 26.504)" gradientUnits="userSpaceOnUse"><stop offset=".058" stop-color="#50E6FF"/><stop offset="1" stop-color="#436DCD"/></radialGradient><linearGradient id="gitea-microsoftonline__b" x1="17.512" x2="12.751" y1="37.868" y2="29.635" gradientUnits="userSpaceOnUse"><stop stop-color="#114A8B"/><stop offset="1" stop-color="#0078D4" stop-opacity="0"/></linearGradient><linearGradient id="gitea-microsoftonline__d" x1="40.357" x2="35.255" y1="25.377" y2="32.692" gradientUnits="userSpaceOnUse"><stop stop-color="#493474"/><stop offset="1" stop-color="#8C66BA" stop-opacity="0"/></linearGradient><linearGradient id="gitea-microsoftonline__f" x1="16.976" x2="24.487" y1="3.057" y2="3.057" gradientUnits="userSpaceOnUse"><stop stop-color="#2D3F80"/><stop offset="1" stop-color="#436DCD" stop-opacity="0"/></linearGradient><linearGradient id="gitea-microsoftonline__h" x1="16.976" x2="24.487" y1="3.057" y2="3.057" gradientUnits="userSpaceOnUse"><stop stop-color="#2D3F80"/><stop offset="1" stop-color="#436DCD" stop-opacity="0"/></linearGradient></defs></svg>
|
<svg xmlns="http://www.w3.org/2000/svg" fill="none" viewBox="0 0 48 48" class="svg gitea-microsoftonline" width="16" height="16" aria-hidden="true"><path fill="url(#gitea-microsoftonline__a)" d="m20.084 3.026-.224.136a8 8 0 0 0-1.009.722l.648-.456H25L26 11l-5 5-5 3.475v4.008a8 8 0 0 0 3.857 6.844l5.264 3.186L14 40h-2.145l-3.998-2.42A8 8 0 0 1 4 30.737V17.26a8 8 0 0 1 3.86-6.846l12-7.258q.111-.068.224-.131Z"/><path fill="url(#gitea-microsoftonline__b)" d="m20.084 3.026-.224.136a8 8 0 0 0-1.009.722l.648-.456H25L26 11l-5 5-5 3.475v4.008a8 8 0 0 0 3.857 6.844l5.264 3.186L14 40h-2.145l-3.998-2.42A8 8 0 0 1 4 30.737V17.26a8 8 0 0 1 3.86-6.846l12-7.258q.111-.068.224-.131Z"/><path fill="url(#gitea-microsoftonline__c)" d="M32 19v4.48a8 8 0 0 1-3.857 6.844l-12 7.264a8 8 0 0 1-8.008.16l11.722 7.096a8 8 0 0 0 8.286 0l12-7.264A8 8 0 0 0 44 30.736V27.5L43 26z"/><path fill="url(#gitea-microsoftonline__d)" d="M32 19v4.48a8 8 0 0 1-3.857 6.844l-12 7.264a8 8 0 0 1-8.008.16l11.722 7.096a8 8 0 0 0 8.286 0l12-7.264A8 8 0 0 0 44 30.736V27.5L43 26z"/><path fill="url(#gitea-microsoftonline__e)" d="m40.14 10.415-12-7.258a8 8 0 0 0-8.042-.139l-.238.144A8 8 0 0 0 16 10.008v9.483l3.86-2.334a8 8 0 0 1 8.28 0l12 7.258A8 8 0 0 1 43.997 31q.004-.132.004-.263V17.26a8 8 0 0 0-3.86-6.845Z"/><path fill="url(#gitea-microsoftonline__f)" d="m40.14 10.415-12-7.258a8 8 0 0 0-8.042-.139l-.238.144A8 8 0 0 0 16 10.008v9.483l3.86-2.334a8 8 0 0 1 8.28 0l12 7.258A8 8 0 0 1 43.997 31q.004-.132.004-.263V17.26a8 8 0 0 0-3.86-6.845Z"/><path fill="url(#gitea-microsoftonline__g)" d="M4.004 30.998"/><path fill="url(#gitea-microsoftonline__h)" d="M4.004 30.998"/><defs><radialGradient id="gitea-microsoftonline__a" cx="0" cy="0" r="1" gradientTransform="rotate(110.528 5.021 11.358)scale(33.3657 58.1966)" gradientUnits="userSpaceOnUse"><stop offset=".064" stop-color="#ae7fe2"/><stop offset="1" stop-color="#0078d4"/></radialGradient><radialGradient id="gitea-microsoftonline__c" cx="0" cy="0" r="1" gradientTransform="rotate(-8.367 253.693 -53.118)scale(31.0503 20.5108)" gradientUnits="userSpaceOnUse"><stop offset=".134" stop-color="#d59dff"/><stop offset="1" stop-color="#5e438f"/></radialGradient><radialGradient id="gitea-microsoftonline__e" cx="0" cy="0" r="1" gradientTransform="rotate(194.228 22.182 10.69)scale(24.9228 41.9552)" gradientUnits="userSpaceOnUse"><stop offset=".058" stop-color="#50e6ff"/><stop offset="1" stop-color="#436dcd"/></radialGradient><radialGradient id="gitea-microsoftonline__g" cx="0" cy="0" r="1" gradientTransform="rotate(194.228 22.182 10.69)scale(24.9228 41.9552)" gradientUnits="userSpaceOnUse"><stop offset=".058" stop-color="#50e6ff"/><stop offset="1" stop-color="#436dcd"/></radialGradient><linearGradient id="gitea-microsoftonline__b" x1="17.512" x2="12.751" y1="37.868" y2="29.635" gradientUnits="userSpaceOnUse"><stop stop-color="#114a8b"/><stop offset="1" stop-color="#0078d4" stop-opacity="0"/></linearGradient><linearGradient id="gitea-microsoftonline__d" x1="40.357" x2="35.255" y1="25.377" y2="32.692" gradientUnits="userSpaceOnUse"><stop stop-color="#493474"/><stop offset="1" stop-color="#8c66ba" stop-opacity="0"/></linearGradient><linearGradient id="gitea-microsoftonline__f" x1="16.976" x2="24.487" y1="3.057" y2="3.057" gradientUnits="userSpaceOnUse"><stop stop-color="#2d3f80"/><stop offset="1" stop-color="#436dcd" stop-opacity="0"/></linearGradient><linearGradient id="gitea-microsoftonline__h" x1="16.976" x2="24.487" y1="3.057" y2="3.057" gradientUnits="userSpaceOnUse"><stop stop-color="#2d3f80"/><stop offset="1" stop-color="#436dcd" stop-opacity="0"/></linearGradient></defs></svg>
|
Before Width: | Height: | Size: 3.6 KiB After Width: | Height: | Size: 3.5 KiB |
2
public/assets/img/svg/gitea-npm.svg
generated
|
@ -1 +1 @@
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 18 7" class="svg gitea-npm" width="16" height="16" aria-hidden="true"><path fill="#CB3837" d="M0 0h18v6H9v1H5V6H0zm1 5h2V2h1v3h1V1H1zm5-4v5h2V5h2V1zm2 1h1v2H8zm3-1v4h2V2h1v3h1V2h1v3h1V1z"/><path fill="#fff" d="M1 5h2V2h1v3h1V1H1zM6 1v5h2V5h2V1zm3 3H8V2h1zM11 1v4h2V2h1v3h1V2h1v3h1V1z"/></svg>
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 18 7" class="svg gitea-npm" width="16" height="16" aria-hidden="true"><path fill="#cb3837" d="M0 0h18v6H9v1H5V6H0zm1 5h2V2h1v3h1V1H1zm5-4v5h2V5h2V1zm2 1h1v2H8zm3-1v4h2V2h1v3h1V2h1v3h1V1z"/><path fill="#fff" d="M1 5h2V2h1v3h1V1H1zM6 1v5h2V5h2V1zm3 3H8V2h1zM11 1v4h2V2h1v3h1V2h1v3h1V1z"/></svg>
|
Before Width: | Height: | Size: 345 B After Width: | Height: | Size: 345 B |
2
public/assets/img/svg/gitea-onedev.svg
generated
|
@ -1 +1 @@
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" version="1.0" viewBox="0 0 700 700" class="svg gitea-onedev" width="16" height="16" aria-hidden="true"><path d="M315.5 99.6c-29.5 4-55.8 12-81.2 24.8L223 130l-5.2-4c-14.9-11.3-37.6-14.9-55.8-9-19.1 6.3-35.1 22.2-41.1 41-2.7 8.3-3.6 22.9-1.9 31.2 1.5 8 5 16.5 9.1 22.5 3.1 4.7 3.1 4.8 1.4 7.8C106 260 95.1 294.4 92 337.7c-1.1 15.7-.1 40.2 2.1 53l1.1 6.5-4.9 4.4c-2.8 2.3-7.5 7.6-10.6 11.6-19.4 25.5-24.7 57.9-14.4 88.3 9.2 26.9 31.2 48.8 58.4 58.1 20.6 6.9 40.6 7 61.1.1l6.7-2.2 10.5 7.1c45.6 31 92 45.5 146 45.5 33 0 61.6-5.2 91-16.4 67.6-25.8 122.9-81.1 148.4-148.4l2.7-7.2 7.7-3.8c9.1-4.5 21.1-15.7 25.9-24.3 21.1-37.5-1-84.3-43.2-91.7-19.9-3.5-39.3 2.7-53.9 17.2-7.1 7.1-11.7 14.5-15.3 24.7-3.4 9.4-3.8 25.8-.9 35.3 2.8 9.5 8.5 19.3 15.3 26.4 7.2 7.6 7.2 6 0 20.5-8.9 18.1-20.3 34.1-35.2 49.5-34.6 35.7-78.2 56.3-128.3 60.3-42.8 3.4-89.3-8.9-125-33-1.1-.8-1-1.7.8-5.2 12.1-23.6 13.5-53.7 3.9-78-8.7-21.8-27.5-41.6-48.6-51.2-9-4.1-22.7-7.4-34-8.3l-9.1-.7-.8-9.6c-3.5-46.9 13.5-99.8 45.5-141.7 6.5-8.6 24.3-26.7 33.6-34.2 43.8-35.6 101.3-52.8 158.1-47.2 39.9 3.9 79 19.1 110.6 43 16.9 12.8 37.5 34.9 48.6 52l4.3 6.7-3.3 5.2c-2.9 4.7-3.3 6.3-3.6 13.4-.3 7.3-.1 8.6 2.5 13.6 3.2 6.1 10.2 12 16.3 13.9 22.8 6.8 43-16.9 32.6-38.2-3.1-6.4-9.3-12.2-14.7-13.8-2.5-.8-4.1-2.1-5.2-4.3-.9-1.7-3.2-5.8-5.1-9.2l-3.5-6 3.6-5c17.7-24.4 15.8-57.5-4.4-79.4-8-8.6-15.5-13.6-25.9-17.2-19.8-6.8-38.9-4.2-56.5 7.8l-7.8 5.3-15.3-7.4c-27.9-13.4-55-21.3-84-24.4-13.3-1.5-48.1-1.2-60.3.5"/><path d="M271.8 271.1c-13.9 2.1-30.5 17.3-40.5 37.4-18.3 36.4-13.4 81.5 9.8 91.5 15.2 6.5 34.5-2.7 48.6-23.2 5.5-8 9.7-15.7 9-16.5-.3-.2-2 .3-3.8 1.2-2.4 1.3-5.1 1.6-10.5 1.3-6.1-.3-7.9-.8-11.6-3.4-8.9-6.2-12.4-19.1-7.9-29 2.4-5.2 9-10.8 14.7-12.4 9.1-2.6 20 1.4 25.2 9.2l2.7 4.2.3-12.4c.4-18.9-3.4-31.6-12.4-40.5-6.3-6.3-14.2-8.8-23.6-7.4M420.5 271c-11.6 1.9-20.2 11.3-24.9 27-2.1 6.9-3.1 20-2.2 27.4l.8 5.7 2.1-3.2c10.2-15 31.6-14 39.9 2 6 11.5 1.5 25.1-10.4 31.2-5 2.5-15 2.6-20 .1l-3.6-1.9 1.4 3.3c6.1 14.5 20 30.1 32.3 36.1 5.7 2.8 14.4 4 20.4 2.9 5.2-1 12.1-6.1 16.1-11.9 18.1-26.4 8.1-79-20-105.8-10.8-10.2-21.6-14.6-31.9-12.9M322.5 431.9c-16.1 1.6-23.5 6.1-23.5 14.3 0 11.4 13 21.1 34 25.4 10.2 2 31.2 1.5 40.5-1 13.5-3.7 23.8-10.3 27.6-17.7 4.9-9.7-.2-17.1-13.8-20-6.1-1.2-54.2-2-64.8-1"/></svg>
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 700 700" class="svg gitea-onedev" width="16" height="16" aria-hidden="true"><path d="M315.5 99.6c-29.5 4-55.8 12-81.2 24.8L223 130l-5.2-4c-14.9-11.3-37.6-14.9-55.8-9-19.1 6.3-35.1 22.2-41.1 41-2.7 8.3-3.6 22.9-1.9 31.2 1.5 8 5 16.5 9.1 22.5 3.1 4.7 3.1 4.8 1.4 7.8C106 260 95.1 294.4 92 337.7c-1.1 15.7-.1 40.2 2.1 53l1.1 6.5-4.9 4.4c-2.8 2.3-7.5 7.6-10.6 11.6-19.4 25.5-24.7 57.9-14.4 88.3 9.2 26.9 31.2 48.8 58.4 58.1 20.6 6.9 40.6 7 61.1.1l6.7-2.2 10.5 7.1c45.6 31 92 45.5 146 45.5 33 0 61.6-5.2 91-16.4 67.6-25.8 122.9-81.1 148.4-148.4l2.7-7.2 7.7-3.8c9.1-4.5 21.1-15.7 25.9-24.3 21.1-37.5-1-84.3-43.2-91.7-19.9-3.5-39.3 2.7-53.9 17.2-7.1 7.1-11.7 14.5-15.3 24.7-3.4 9.4-3.8 25.8-.9 35.3 2.8 9.5 8.5 19.3 15.3 26.4 7.2 7.6 7.2 6 0 20.5-8.9 18.1-20.3 34.1-35.2 49.5-34.6 35.7-78.2 56.3-128.3 60.3-42.8 3.4-89.3-8.9-125-33-1.1-.8-1-1.7.8-5.2 12.1-23.6 13.5-53.7 3.9-78-8.7-21.8-27.5-41.6-48.6-51.2-9-4.1-22.7-7.4-34-8.3l-9.1-.7-.8-9.6c-3.5-46.9 13.5-99.8 45.5-141.7 6.5-8.6 24.3-26.7 33.6-34.2 43.8-35.6 101.3-52.8 158.1-47.2 39.9 3.9 79 19.1 110.6 43 16.9 12.8 37.5 34.9 48.6 52l4.3 6.7-3.3 5.2c-2.9 4.7-3.3 6.3-3.6 13.4-.3 7.3-.1 8.6 2.5 13.6 3.2 6.1 10.2 12 16.3 13.9 22.8 6.8 43-16.9 32.6-38.2-3.1-6.4-9.3-12.2-14.7-13.8-2.5-.8-4.1-2.1-5.2-4.3-.9-1.7-3.2-5.8-5.1-9.2l-3.5-6 3.6-5c17.7-24.4 15.8-57.5-4.4-79.4-8-8.6-15.5-13.6-25.9-17.2-19.8-6.8-38.9-4.2-56.5 7.8l-7.8 5.3-15.3-7.4c-27.9-13.4-55-21.3-84-24.4-13.3-1.5-48.1-1.2-60.3.5"/><path d="M271.8 271.1c-13.9 2.1-30.5 17.3-40.5 37.4-18.3 36.4-13.4 81.5 9.8 91.5 15.2 6.5 34.5-2.7 48.6-23.2 5.5-8 9.7-15.7 9-16.5-.3-.2-2 .3-3.8 1.2-2.4 1.3-5.1 1.6-10.5 1.3-6.1-.3-7.9-.8-11.6-3.4-8.9-6.2-12.4-19.1-7.9-29 2.4-5.2 9-10.8 14.7-12.4 9.1-2.6 20 1.4 25.2 9.2l2.7 4.2.3-12.4c.4-18.9-3.4-31.6-12.4-40.5-6.3-6.3-14.2-8.8-23.6-7.4M420.5 271c-11.6 1.9-20.2 11.3-24.9 27-2.1 6.9-3.1 20-2.2 27.4l.8 5.7 2.1-3.2c10.2-15 31.6-14 39.9 2 6 11.5 1.5 25.1-10.4 31.2-5 2.5-15 2.6-20 .1l-3.6-1.9 1.4 3.3c6.1 14.5 20 30.1 32.3 36.1 5.7 2.8 14.4 4 20.4 2.9 5.2-1 12.1-6.1 16.1-11.9 18.1-26.4 8.1-79-20-105.8-10.8-10.2-21.6-14.6-31.9-12.9M322.5 431.9c-16.1 1.6-23.5 6.1-23.5 14.3 0 11.4 13 21.1 34 25.4 10.2 2 31.2 1.5 40.5-1 13.5-3.7 23.8-10.3 27.6-17.7 4.9-9.7-.2-17.1-13.8-20-6.1-1.2-54.2-2-64.8-1"/></svg>
|
Before Width: | Height: | Size: 2.3 KiB After Width: | Height: | Size: 2.2 KiB |
2
public/assets/img/svg/gitea-openid.svg
generated
|
@ -1 +1 @@
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" version="1.0" viewBox="0 0 2400 2400" class="svg gitea-openid" width="16" height="16" aria-hidden="true"><path fill="#ff7c00" d="m1270 218.3-173.1 84.3-.7 981.8c-.3 540 .2 981.4 1.1 981l174.5-81.8 172.3-80.8v-984.4c0-541.7-.2-984.7-.4-984.4z"/><path fill="#aaa" d="M981.9 785.5c-425.3 63.2-766.5 264.1-889 523a491.5 491.5 0 0 0-43.6 146c-4.2 29.2-4.7 95-1.2 124 19 152.6 115.2 299.9 273.2 418.8 147.7 111 350.5 196.5 568.6 239.7 59 11.6 179 29 200.5 29 2.3 0 3-23.2 3-109.1v-109.2l-5.1-1-37.9-6a1182 1182 0 0 1-305.4-90.6c-122.2-55.7-225.1-137.7-284.6-226.4-107.5-160.5-81.3-344.3 70-491.3 57-55.5 115.4-95.2 199.5-136.1a1112.6 1112.6 0 0 1 269.4-89.2l29.7-6c3.7-1.2 4-8.6 4-111.5V779.5l-6.3.2a823 823 0 0 0-44.8 5.8m525 104c0 103 .2 110.4 4.1 111.6l29.5 6a1221.6 1221.6 0 0 1 207.7 61.3A1088 1088 0 0 1 1862 1123c4.6 3.7 1.4 5.8-88 56-51.1 28.5-93 52.7-93 53.4 0 1.9 671.6 146.8 673.2 145.2 1.2-1.2-45.5-496-47-497.6-.2-.2-38.5 21-85 47.2l-89.6 50.2c-4.2 2-8.8.2-27.9-10.7-130.8-75-289.6-132.2-460.8-166.1a1871 1871 0 0 0-132.9-21.1c-4 0-4.2 6.7-4.2 110z"/><path fill="#cbaa7c" d="M1094.5 2156.9c0 60.6.3 85.5.5 55 .5-30.2.5-79.9 0-110.3-.2-30.2-.5-5.3-.5 55.3"/></svg>
|
<svg xmlns="http://www.w3.org/2000/svg" viewBox="0 0 2400 2400" class="svg gitea-openid" width="16" height="16" aria-hidden="true"><path fill="#ff7c00" d="m1270 218.3-173.1 84.3-.7 981.8c-.3 540 .2 981.4 1.1 981l174.5-81.8 172.3-80.8v-984.4c0-541.7-.2-984.7-.4-984.4z"/><path fill="#aaa" d="M981.9 785.5c-425.3 63.2-766.5 264.1-889 523a491.5 491.5 0 0 0-43.6 146c-4.2 29.2-4.7 95-1.2 124 19 152.6 115.2 299.9 273.2 418.8 147.7 111 350.5 196.5 568.6 239.7 59 11.6 179 29 200.5 29 2.3 0 3-23.2 3-109.1v-109.2l-5.1-1-37.9-6a1182 1182 0 0 1-305.4-90.6c-122.2-55.7-225.1-137.7-284.6-226.4-107.5-160.5-81.3-344.3 70-491.3 57-55.5 115.4-95.2 199.5-136.1a1112.6 1112.6 0 0 1 269.4-89.2l29.7-6c3.7-1.2 4-8.6 4-111.5V779.5l-6.3.2a823 823 0 0 0-44.8 5.8m525 104c0 103 .2 110.4 4.1 111.6l29.5 6a1221.6 1221.6 0 0 1 207.7 61.3A1088 1088 0 0 1 1862 1123c4.6 3.7 1.4 5.8-88 56-51.1 28.5-93 52.7-93 53.4 0 1.9 671.6 146.8 673.2 145.2 1.2-1.2-45.5-496-47-497.6-.2-.2-38.5 21-85 47.2l-89.6 50.2c-4.2 2-8.8.2-27.9-10.7-130.8-75-289.6-132.2-460.8-166.1a1871 1871 0 0 0-132.9-21.1c-4 0-4.2 6.7-4.2 110z"/><path fill="#cbaa7c" d="M1094.5 2156.9c0 60.6.3 85.5.5 55 .5-30.2.5-79.9 0-110.3-.2-30.2-.5-5.3-.5 55.3"/></svg>
|
Before Width: | Height: | Size: 1.2 KiB After Width: | Height: | Size: 1.2 KiB |
2
public/assets/img/svg/gitea-rubygems.svg
generated
Before Width: | Height: | Size: 7.3 KiB After Width: | Height: | Size: 7.3 KiB |
2
public/assets/img/svg/gitea-swift.svg
generated
|
@ -1 +1 @@
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" xml:space="preserve" viewBox="0 0 59.5 59.5" class="svg gitea-swift" width="16" height="16" aria-hidden="true"><path fill="#F05138" d="M59.387 16.45a83 83 0 0 0-.027-1.792c-.034-1.301-.111-2.614-.343-3.9-.234-1.308-.618-2.523-1.222-3.71a12.46 12.46 0 0 0-5.452-5.452C51.156.992 49.94.609 48.635.374c-1.287-.232-2.6-.308-3.902-.343a86 86 0 0 0-1.792-.027Q41.876-.001 40.813 0H18.578q-1.064-.001-2.127.004c-.598.004-1.196.01-1.793.027q-.488.012-.978.036c-.978.047-1.959.133-2.924.307-.98.176-1.908.436-2.811.81A12.5 12.5 0 0 0 3.89 3.89a12.5 12.5 0 0 0-2.294 3.158C.992 8.235.61 9.45.374 10.758c-.231 1.286-.308 2.599-.343 3.9a86 86 0 0 0-.027 1.792Q-.002 17.515 0 18.578v22.234q-.001 1.064.004 2.129c.004.597.01 1.194.027 1.79.035 1.302.112 2.615.343 3.902.235 1.306.618 2.522 1.222 3.71a12.457 12.457 0 0 0 5.453 5.453c1.186.603 2.401.986 3.707 1.22 1.287.232 2.6.309 3.902.344q.896.023 1.793.026 1.063.006 2.127.004h22.235q1.065.002 2.128-.004.897-.003 1.792-.026c1.302-.035 2.615-.112 3.902-.344 1.306-.234 2.521-.617 3.708-1.221a12.46 12.46 0 0 0 5.452-5.453c.604-1.187.988-2.403 1.223-3.71.23-1.286.308-2.599.342-3.9.017-.597.023-1.194.027-1.791q.006-1.065.004-2.129V18.578q.001-1.065-.004-2.128"/><path fill="#fff" d="m47.061 36.661-.004-.005c.066-.223.133-.446.19-.675 2.466-9.82-3.55-21.432-13.731-27.545 4.461 6.048 6.434 13.373 4.681 19.78-.156.572-.344 1.12-.552 1.653-.225-.148-.51-.316-.89-.526 0 0-10.128-6.253-21.104-17.312-.288-.29 5.853 8.776 12.822 16.14-3.283-1.843-12.434-8.5-18.227-13.802.712 1.186 1.559 2.33 2.49 3.43 4.837 6.135 11.145 13.704 18.703 19.517-5.31 3.25-12.814 3.502-20.285.003a30.7 30.7 0 0 1-5.193-3.098c3.162 5.058 8.033 9.423 13.96 11.97 7.07 3.039 14.1 2.833 19.337.05l-.004.007.079-.047q.323-.172.637-.358c2.516-1.306 7.485-2.63 10.152 2.559.653 1.27 2.041-5.46-3.062-11.739z"/></svg>
|
<svg xmlns="http://www.w3.org/2000/svg" xml:space="preserve" viewBox="0 0 59.5 59.5" class="svg gitea-swift" width="16" height="16" aria-hidden="true"><path fill="#f05138" d="M59.387 16.45a83 83 0 0 0-.027-1.792c-.034-1.301-.111-2.614-.343-3.9-.234-1.308-.618-2.523-1.222-3.71a12.46 12.46 0 0 0-5.452-5.452C51.156.992 49.94.609 48.635.374c-1.287-.232-2.6-.308-3.902-.343a86 86 0 0 0-1.792-.027Q41.876-.001 40.813 0H18.578q-1.064-.001-2.127.004c-.598.004-1.196.01-1.793.027q-.488.012-.978.036c-.978.047-1.959.133-2.924.307-.98.176-1.908.436-2.811.81A12.5 12.5 0 0 0 3.89 3.89a12.5 12.5 0 0 0-2.294 3.158C.992 8.235.61 9.45.374 10.758c-.231 1.286-.308 2.599-.343 3.9a86 86 0 0 0-.027 1.792Q-.002 17.515 0 18.578v22.234q-.001 1.064.004 2.129c.004.597.01 1.194.027 1.79.035 1.302.112 2.615.343 3.902.235 1.306.618 2.522 1.222 3.71a12.457 12.457 0 0 0 5.453 5.453c1.186.603 2.401.986 3.707 1.22 1.287.232 2.6.309 3.902.344q.896.023 1.793.026 1.063.006 2.127.004h22.235q1.065.002 2.128-.004.897-.003 1.792-.026c1.302-.035 2.615-.112 3.902-.344 1.306-.234 2.521-.617 3.708-1.221a12.46 12.46 0 0 0 5.452-5.453c.604-1.187.988-2.403 1.223-3.71.23-1.286.308-2.599.342-3.9.017-.597.023-1.194.027-1.791q.006-1.065.004-2.129V18.578q.001-1.065-.004-2.128"/><path fill="#fff" d="m47.061 36.661-.004-.005c.066-.223.133-.446.19-.675 2.466-9.82-3.55-21.432-13.731-27.545 4.461 6.048 6.434 13.373 4.681 19.78-.156.572-.344 1.12-.552 1.653-.225-.148-.51-.316-.89-.526 0 0-10.128-6.253-21.104-17.312-.288-.29 5.853 8.776 12.822 16.14-3.283-1.843-12.434-8.5-18.227-13.802.712 1.186 1.559 2.33 2.49 3.43 4.837 6.135 11.145 13.704 18.703 19.517-5.31 3.25-12.814 3.502-20.285.003a30.7 30.7 0 0 1-5.193-3.098c3.162 5.058 8.033 9.423 13.96 11.97 7.07 3.039 14.1 2.833 19.337.05l-.004.007.079-.047q.323-.172.637-.358c2.516-1.306 7.485-2.63 10.152 2.559.653 1.27 2.041-5.46-3.062-11.739z"/></svg>
|
Before Width: | Height: | Size: 1.8 KiB After Width: | Height: | Size: 1.8 KiB |
2
public/assets/img/svg/gitea-vagrant.svg
generated
|
@ -1 +1 @@
|
||||||
<svg xmlns="http://www.w3.org/2000/svg" preserveAspectRatio="xMidYMid" viewBox="0 0 255 263" class="svg gitea-vagrant" width="16" height="16" aria-hidden="true"><path fill="#1159CC" d="M254.22 20.234 196.03 53.47l-1.64 20.618-44.19 99.772-26.27 17.34 3.18 71.6 49.53-28.55 77.58-189.946zM92.45 56.933V34.051l-.238-.136-38.483 19.102 1.642 23.034L103.4 180.6l26.02-14.71-2.31-28.09z"/><path fill="#127EFF" d="m219.56 0-57.75 33.814h-.04v23.119L127.11 137.8v27.02l-23.12 13.41L57.788 74.146V53.81L92.45 33.848 34.668 0 .006 20.234v24.783L78.022 234.49l49.088 28.31v-71.16l23.09-13.41-.27-.17 46.51-103.914V53.81l57.78-33.576z"/></svg>
|
<svg xmlns="http://www.w3.org/2000/svg" preserveAspectRatio="xMidYMid" viewBox="0 0 255 263" class="svg gitea-vagrant" width="16" height="16" aria-hidden="true"><path fill="#1159cc" d="M254.22 20.234 196.03 53.47l-1.64 20.618-44.19 99.772-26.27 17.34 3.18 71.6 49.53-28.55 77.58-189.946zM92.45 56.933V34.051l-.238-.136-38.483 19.102 1.642 23.034L103.4 180.6l26.02-14.71-2.31-28.09z"/><path fill="#127eff" d="m219.56 0-57.75 33.814h-.04v23.119L127.11 137.8v27.02l-23.12 13.41L57.788 74.146V53.81L92.45 33.848 34.668 0 .006 20.234v24.783L78.022 234.49l49.088 28.31v-71.16l23.09-13.41-.27-.17 46.51-103.914V53.81l57.78-33.576z"/></svg>
|
Before Width: | Height: | Size: 632 B After Width: | Height: | Size: 632 B |
|
@ -64,7 +64,7 @@ func CompareDiff(ctx *context.APIContext) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
_, headGitRepo, ci, _, _ := parseCompareInfo(ctx, api.CreatePullRequestOption{
|
headRepository, headGitRepo, ci, _, _ := parseCompareInfo(ctx, api.CreatePullRequestOption{
|
||||||
Base: infos[0],
|
Base: infos[0],
|
||||||
Head: infos[1],
|
Head: infos[1],
|
||||||
})
|
})
|
||||||
|
@ -80,7 +80,7 @@ func CompareDiff(ctx *context.APIContext) {
|
||||||
apiFiles := []*api.CommitAffectedFiles{}
|
apiFiles := []*api.CommitAffectedFiles{}
|
||||||
userCache := make(map[string]*user_model.User)
|
userCache := make(map[string]*user_model.User)
|
||||||
for i := 0; i < len(ci.Commits); i++ {
|
for i := 0; i < len(ci.Commits); i++ {
|
||||||
apiCommit, err := convert.ToCommit(ctx, ctx.Repo.Repository, ctx.Repo.GitRepo, ci.Commits[i], userCache,
|
apiCommit, err := convert.ToCommit(ctx, headRepository, headGitRepo, ci.Commits[i], userCache,
|
||||||
convert.ToCommitOptions{
|
convert.ToCommitOptions{
|
||||||
Stat: true,
|
Stat: true,
|
||||||
Verification: verification,
|
Verification: verification,
|
||||||
|
|
|
@ -121,6 +121,12 @@ func SearchIssues(ctx *context.APIContext) {
|
||||||
// description: Number of items per page
|
// description: Number of items per page
|
||||||
// type: integer
|
// type: integer
|
||||||
// minimum: 0
|
// minimum: 0
|
||||||
|
// - name: sort
|
||||||
|
// in: query
|
||||||
|
// description: Type of sort
|
||||||
|
// type: string
|
||||||
|
// enum: [relevance, latest, oldest, recentupdate, leastupdate, mostcomment, leastcomment, nearduedate, farduedate]
|
||||||
|
// default: latest
|
||||||
// responses:
|
// responses:
|
||||||
// "200":
|
// "200":
|
||||||
// "$ref": "#/responses/IssueList"
|
// "$ref": "#/responses/IssueList"
|
||||||
|
@ -276,7 +282,7 @@ func SearchIssues(ctx *context.APIContext) {
|
||||||
IsClosed: isClosed,
|
IsClosed: isClosed,
|
||||||
IncludedAnyLabelIDs: includedAnyLabels,
|
IncludedAnyLabelIDs: includedAnyLabels,
|
||||||
MilestoneIDs: includedMilestones,
|
MilestoneIDs: includedMilestones,
|
||||||
SortBy: issue_indexer.SortByCreatedDesc,
|
SortBy: issue_indexer.ParseSortBy(ctx.FormString("sort"), issue_indexer.SortByCreatedDesc),
|
||||||
}
|
}
|
||||||
|
|
||||||
if since != 0 {
|
if since != 0 {
|
||||||
|
@ -305,9 +311,10 @@ func SearchIssues(ctx *context.APIContext) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: It's unsupported to sort by priority repo when searching by indexer,
|
priorityRepoID := ctx.FormInt64("priority_repo_id")
|
||||||
// it's indeed an regression, but I think it is worth to support filtering by indexer first.
|
if priorityRepoID > 0 {
|
||||||
_ = ctx.FormInt64("priority_repo_id")
|
searchOpt.PriorityRepoID = optional.Some(priorityRepoID)
|
||||||
|
}
|
||||||
|
|
||||||
ids, total, err := issue_indexer.SearchIssues(ctx, searchOpt)
|
ids, total, err := issue_indexer.SearchIssues(ctx, searchOpt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -1084,7 +1084,6 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption)
|
||||||
err error
|
err error
|
||||||
)
|
)
|
||||||
|
|
||||||
// If there is no head repository, it means pull request between same repository.
|
|
||||||
headInfos := strings.Split(form.Head, ":")
|
headInfos := strings.Split(form.Head, ":")
|
||||||
if len(headInfos) == 1 {
|
if len(headInfos) == 1 {
|
||||||
isSameRepo = true
|
isSameRepo = true
|
||||||
|
@ -1094,7 +1093,7 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption)
|
||||||
headUser, err = user_model.GetUserByName(ctx, headInfos[0])
|
headUser, err = user_model.GetUserByName(ctx, headInfos[0])
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if user_model.IsErrUserNotExist(err) {
|
if user_model.IsErrUserNotExist(err) {
|
||||||
ctx.NotFound("GetUserByName")
|
ctx.NotFound(fmt.Errorf("the owner %s does not exist", headInfos[0]))
|
||||||
} else {
|
} else {
|
||||||
ctx.Error(http.StatusInternalServerError, "GetUserByName", err)
|
ctx.Error(http.StatusInternalServerError, "GetUserByName", err)
|
||||||
}
|
}
|
||||||
|
@ -1104,7 +1103,7 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption)
|
||||||
// The head repository can also point to the same repo
|
// The head repository can also point to the same repo
|
||||||
isSameRepo = ctx.Repo.Owner.ID == headUser.ID
|
isSameRepo = ctx.Repo.Owner.ID == headUser.ID
|
||||||
} else {
|
} else {
|
||||||
ctx.NotFound()
|
ctx.NotFound(fmt.Errorf("the head part of {basehead} %s must contain zero or one colon (:) but contains %d", form.Head, len(headInfos)-1))
|
||||||
return nil, nil, nil, "", ""
|
return nil, nil, nil, "", ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1116,16 +1115,10 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption)
|
||||||
baseIsBranch := ctx.Repo.GitRepo.IsBranchExist(baseBranch)
|
baseIsBranch := ctx.Repo.GitRepo.IsBranchExist(baseBranch)
|
||||||
baseIsTag := ctx.Repo.GitRepo.IsTagExist(baseBranch)
|
baseIsTag := ctx.Repo.GitRepo.IsTagExist(baseBranch)
|
||||||
if !baseIsCommit && !baseIsBranch && !baseIsTag {
|
if !baseIsCommit && !baseIsBranch && !baseIsTag {
|
||||||
// Check for short SHA usage
|
ctx.NotFound(fmt.Errorf("could not find '%s' to be a commit, branch or tag in the base repository %s/%s", baseBranch, baseRepo.Owner.Name, baseRepo.Name))
|
||||||
if baseCommit, _ := ctx.Repo.GitRepo.GetCommit(baseBranch); baseCommit != nil {
|
|
||||||
baseBranch = baseCommit.ID.String()
|
|
||||||
} else {
|
|
||||||
ctx.NotFound("BaseNotExist")
|
|
||||||
return nil, nil, nil, "", ""
|
return nil, nil, nil, "", ""
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
// Check if current user has fork of repository or in the same repository.
|
|
||||||
headRepo := repo_model.GetForkedRepo(ctx, headUser.ID, baseRepo.ID)
|
headRepo := repo_model.GetForkedRepo(ctx, headUser.ID, baseRepo.ID)
|
||||||
if headRepo == nil && !isSameRepo {
|
if headRepo == nil && !isSameRepo {
|
||||||
err := baseRepo.GetBaseRepo(ctx)
|
err := baseRepo.GetBaseRepo(ctx)
|
||||||
|
@ -1134,13 +1127,11 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption)
|
||||||
return nil, nil, nil, "", ""
|
return nil, nil, nil, "", ""
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if baseRepo's base repository is the same as headUser's repository.
|
|
||||||
if baseRepo.BaseRepo == nil || baseRepo.BaseRepo.OwnerID != headUser.ID {
|
if baseRepo.BaseRepo == nil || baseRepo.BaseRepo.OwnerID != headUser.ID {
|
||||||
log.Trace("parseCompareInfo[%d]: does not have fork or in same repository", baseRepo.ID)
|
log.Trace("parseCompareInfo[%d]: does not have fork or in same repository", baseRepo.ID)
|
||||||
ctx.NotFound("GetBaseRepo")
|
ctx.NotFound(fmt.Errorf("%[1]s does not have a fork of %[2]s/%[3]s and %[2]s/%[3]s is not a fork of a repository from %[1]s", headUser.Name, baseRepo.Owner.Name, baseRepo.Name))
|
||||||
return nil, nil, nil, "", ""
|
return nil, nil, nil, "", ""
|
||||||
}
|
}
|
||||||
// Assign headRepo so it can be used below.
|
|
||||||
headRepo = baseRepo.BaseRepo
|
headRepo = baseRepo.BaseRepo
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -1202,21 +1193,20 @@ func parseCompareInfo(ctx *context.APIContext, form api.CreatePullRequestOption)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Check if head branch is valid.
|
// Check if head branch is valid.
|
||||||
headIsCommit := ctx.Repo.GitRepo.IsCommitExist(headBranch)
|
headIsCommit := headGitRepo.IsCommitExist(headBranch)
|
||||||
headIsBranch := ctx.Repo.GitRepo.IsBranchExist(headBranch)
|
headIsBranch := headGitRepo.IsBranchExist(headBranch)
|
||||||
headIsTag := ctx.Repo.GitRepo.IsTagExist(headBranch)
|
headIsTag := headGitRepo.IsTagExist(headBranch)
|
||||||
if !headIsCommit && !headIsBranch && !headIsTag {
|
if !headIsCommit && !headIsBranch && !headIsTag {
|
||||||
// Check if headBranch is short sha commit hash
|
ctx.NotFound(fmt.Errorf("could not find '%s' to be a commit, branch or tag in the head repository %s/%s", headBranch, headRepo.Owner.Name, headRepo.Name))
|
||||||
if headCommit, _ := headGitRepo.GetCommit(headBranch); headCommit != nil {
|
|
||||||
headBranch = headCommit.ID.String()
|
|
||||||
} else {
|
|
||||||
headGitRepo.Close()
|
|
||||||
ctx.NotFound("IsRefExist", nil)
|
|
||||||
return nil, nil, nil, "", ""
|
return nil, nil, nil, "", ""
|
||||||
}
|
}
|
||||||
}
|
|
||||||
|
|
||||||
headBranchRef := headBranch
|
headBranchRef := headBranch
|
||||||
|
if headIsBranch {
|
||||||
|
headBranchRef = git.BranchPrefix + headBranch
|
||||||
|
} else if headIsTag {
|
||||||
|
headBranchRef = git.TagPrefix + headBranch
|
||||||
|
}
|
||||||
|
|
||||||
compareInfo, err := headGitRepo.GetCompareInfo(repo_model.RepoPath(baseRepo.Owner.Name, baseRepo.Name), baseBranchRef, headBranchRef, false, false)
|
compareInfo, err := headGitRepo.GetCompareInfo(repo_model.RepoPath(baseRepo.Owner.Name, baseRepo.Name), baseBranchRef, headBranchRef, false, false)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -5,6 +5,7 @@ package repo
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"encoding/base64"
|
"encoding/base64"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
@ -506,11 +507,8 @@ func findWikiRepoCommit(ctx *context.APIContext) (*git.Repository, *git.Commit)
|
||||||
// given tree entry, encoded with base64. Writes to ctx if an error occurs.
|
// given tree entry, encoded with base64. Writes to ctx if an error occurs.
|
||||||
func wikiContentsByEntry(ctx *context.APIContext, entry *git.TreeEntry) string {
|
func wikiContentsByEntry(ctx *context.APIContext, entry *git.TreeEntry) string {
|
||||||
blob := entry.Blob()
|
blob := entry.Blob()
|
||||||
if blob.Size() > setting.API.DefaultMaxBlobSize {
|
content, err := blob.GetContentBase64(setting.API.DefaultMaxBlobSize)
|
||||||
return ""
|
if err != nil && !errors.As(err, &git.BlobTooLargeError{}) {
|
||||||
}
|
|
||||||
content, err := blob.GetBlobContentBase64()
|
|
||||||
if err != nil {
|
|
||||||
ctx.Error(http.StatusInternalServerError, "GetBlobContentBase64", err)
|
ctx.Error(http.StatusInternalServerError, "GetBlobContentBase64", err)
|
||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
|
@ -205,7 +205,7 @@ func HookPostReceive(ctx *gitea_context.PrivateContext) {
|
||||||
|
|
||||||
// post update for agit pull request
|
// post update for agit pull request
|
||||||
// FIXME: use pr.Flow to test whether it's an Agit PR or a GH PR
|
// FIXME: use pr.Flow to test whether it's an Agit PR or a GH PR
|
||||||
if git.SupportProcReceive && refFullName.IsPull() {
|
if refFullName.IsPull() {
|
||||||
if repo == nil {
|
if repo == nil {
|
||||||
repo = loadRepository(ctx, ownerName, repoName)
|
repo = loadRepository(ctx, ownerName, repoName)
|
||||||
if ctx.Written() {
|
if ctx.Written() {
|
||||||
|
|
|
@ -205,7 +205,7 @@ func HookPreReceive(ctx *gitea_context.PrivateContext) {
|
||||||
preReceiveBranch(ourCtx, oldCommitID, newCommitID, refFullName)
|
preReceiveBranch(ourCtx, oldCommitID, newCommitID, refFullName)
|
||||||
case refFullName.IsTag():
|
case refFullName.IsTag():
|
||||||
preReceiveTag(ourCtx, oldCommitID, newCommitID, refFullName)
|
preReceiveTag(ourCtx, oldCommitID, newCommitID, refFullName)
|
||||||
case git.SupportProcReceive && refFullName.IsFor():
|
case refFullName.IsFor():
|
||||||
preReceiveFor(ourCtx, oldCommitID, newCommitID, refFullName)
|
preReceiveFor(ourCtx, oldCommitID, newCommitID, refFullName)
|
||||||
default:
|
default:
|
||||||
if ourCtx.isOverQuota {
|
if ourCtx.isOverQuota {
|
||||||
|
|
|
@ -7,7 +7,6 @@ import (
|
||||||
"net/http"
|
"net/http"
|
||||||
|
|
||||||
repo_model "forgejo.org/models/repo"
|
repo_model "forgejo.org/models/repo"
|
||||||
"forgejo.org/modules/git"
|
|
||||||
"forgejo.org/modules/log"
|
"forgejo.org/modules/log"
|
||||||
"forgejo.org/modules/private"
|
"forgejo.org/modules/private"
|
||||||
"forgejo.org/modules/web"
|
"forgejo.org/modules/web"
|
||||||
|
@ -18,10 +17,6 @@ import (
|
||||||
// HookProcReceive proc-receive hook - only handles agit Proc-Receive requests at present
|
// HookProcReceive proc-receive hook - only handles agit Proc-Receive requests at present
|
||||||
func HookProcReceive(ctx *gitea_context.PrivateContext) {
|
func HookProcReceive(ctx *gitea_context.PrivateContext) {
|
||||||
opts := web.GetForm(ctx).(*private.HookOptions)
|
opts := web.GetForm(ctx).(*private.HookOptions)
|
||||||
if !git.SupportProcReceive {
|
|
||||||
ctx.Status(http.StatusNotFound)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
results, err := agit.ProcReceive(ctx, ctx.Repo.Repository, ctx.Repo.GitRepo, opts)
|
results, err := agit.ProcReceive(ctx, ctx.Repo.Repository, ctx.Repo.GitRepo, opts)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -14,7 +14,6 @@ import (
|
||||||
repo_model "forgejo.org/models/repo"
|
repo_model "forgejo.org/models/repo"
|
||||||
"forgejo.org/models/unit"
|
"forgejo.org/models/unit"
|
||||||
user_model "forgejo.org/models/user"
|
user_model "forgejo.org/models/user"
|
||||||
"forgejo.org/modules/git"
|
|
||||||
"forgejo.org/modules/log"
|
"forgejo.org/modules/log"
|
||||||
"forgejo.org/modules/private"
|
"forgejo.org/modules/private"
|
||||||
"forgejo.org/modules/setting"
|
"forgejo.org/modules/setting"
|
||||||
|
@ -303,7 +302,7 @@ func ServCommand(ctx *context.PrivateContext) {
|
||||||
// the permission check to read. The pre-receive hook will do another
|
// the permission check to read. The pre-receive hook will do another
|
||||||
// permission check which ensure for non AGit flow references the write
|
// permission check which ensure for non AGit flow references the write
|
||||||
// permission is checked.
|
// permission is checked.
|
||||||
if git.SupportProcReceive && unitType == unit.TypeCode && ctx.FormString("verb") == "git-receive-pack" {
|
if unitType == unit.TypeCode && ctx.FormString("verb") == "git-receive-pack" {
|
||||||
mode = perm.AccessModeRead
|
mode = perm.AccessModeRead
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -7,7 +7,6 @@ import (
|
||||||
"net/http"
|
"net/http"
|
||||||
"path"
|
"path"
|
||||||
|
|
||||||
"forgejo.org/modules/git"
|
|
||||||
"forgejo.org/modules/httpcache"
|
"forgejo.org/modules/httpcache"
|
||||||
"forgejo.org/modules/log"
|
"forgejo.org/modules/log"
|
||||||
"forgejo.org/modules/setting"
|
"forgejo.org/modules/setting"
|
||||||
|
@ -15,10 +14,6 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func SSHInfo(rw http.ResponseWriter, req *http.Request) {
|
func SSHInfo(rw http.ResponseWriter, req *http.Request) {
|
||||||
if !git.SupportProcReceive {
|
|
||||||
rw.WriteHeader(http.StatusNotFound)
|
|
||||||
return
|
|
||||||
}
|
|
||||||
rw.Header().Set("content-type", "text/json;charset=UTF-8")
|
rw.Header().Set("content-type", "text/json;charset=UTF-8")
|
||||||
_, err := rw.Write([]byte(`{"type":"agit","version":1}`))
|
_, err := rw.Write([]byte(`{"type":"agit","version":1}`))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
|
|
@ -312,22 +312,16 @@ func ParseCompareInfo(ctx *context.Context) *common.CompareInfo {
|
||||||
baseIsTag := ctx.Repo.GitRepo.IsTagExist(ci.BaseBranch)
|
baseIsTag := ctx.Repo.GitRepo.IsTagExist(ci.BaseBranch)
|
||||||
|
|
||||||
if !baseIsCommit && !baseIsBranch && !baseIsTag {
|
if !baseIsCommit && !baseIsBranch && !baseIsTag {
|
||||||
// Check if baseBranch is short sha commit hash
|
if ci.BaseBranch == ctx.Repo.GetObjectFormat().EmptyObjectID().String() {
|
||||||
if baseCommit, _ := ctx.Repo.GitRepo.GetCommit(ci.BaseBranch); baseCommit != nil {
|
|
||||||
ci.BaseBranch = baseCommit.ID.String()
|
|
||||||
ctx.Data["BaseBranch"] = ci.BaseBranch
|
|
||||||
baseIsCommit = true
|
|
||||||
} else if ci.BaseBranch == ctx.Repo.GetObjectFormat().EmptyObjectID().String() {
|
|
||||||
if isSameRepo {
|
if isSameRepo {
|
||||||
ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ci.HeadBranch))
|
ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ci.HeadBranch))
|
||||||
} else {
|
} else {
|
||||||
ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ci.HeadRepo.FullName()) + ":" + util.PathEscapeSegments(ci.HeadBranch))
|
ctx.Redirect(ctx.Repo.RepoLink + "/compare/" + util.PathEscapeSegments(ci.HeadRepo.FullName()) + ":" + util.PathEscapeSegments(ci.HeadBranch))
|
||||||
}
|
}
|
||||||
return nil
|
|
||||||
} else {
|
} else {
|
||||||
ctx.NotFound("IsRefExist", nil)
|
ctx.NotFound("IsRefExist", nil)
|
||||||
return nil
|
|
||||||
}
|
}
|
||||||
|
return nil
|
||||||
}
|
}
|
||||||
ctx.Data["BaseIsCommit"] = baseIsCommit
|
ctx.Data["BaseIsCommit"] = baseIsCommit
|
||||||
ctx.Data["BaseIsBranch"] = baseIsBranch
|
ctx.Data["BaseIsBranch"] = baseIsBranch
|
||||||
|
@ -514,16 +508,9 @@ func ParseCompareInfo(ctx *context.Context) *common.CompareInfo {
|
||||||
headIsBranch := ci.HeadGitRepo.IsBranchExist(ci.HeadBranch)
|
headIsBranch := ci.HeadGitRepo.IsBranchExist(ci.HeadBranch)
|
||||||
headIsTag := ci.HeadGitRepo.IsTagExist(ci.HeadBranch)
|
headIsTag := ci.HeadGitRepo.IsTagExist(ci.HeadBranch)
|
||||||
if !headIsCommit && !headIsBranch && !headIsTag {
|
if !headIsCommit && !headIsBranch && !headIsTag {
|
||||||
// Check if headBranch is short sha commit hash
|
|
||||||
if headCommit, _ := ci.HeadGitRepo.GetCommit(ci.HeadBranch); headCommit != nil {
|
|
||||||
ci.HeadBranch = headCommit.ID.String()
|
|
||||||
ctx.Data["HeadBranch"] = ci.HeadBranch
|
|
||||||
headIsCommit = true
|
|
||||||
} else {
|
|
||||||
ctx.NotFound("IsRefExist", nil)
|
ctx.NotFound("IsRefExist", nil)
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
}
|
|
||||||
ctx.Data["HeadIsCommit"] = headIsCommit
|
ctx.Data["HeadIsCommit"] = headIsCommit
|
||||||
ctx.Data["HeadIsBranch"] = headIsBranch
|
ctx.Data["HeadIsBranch"] = headIsBranch
|
||||||
ctx.Data["HeadIsTag"] = headIsTag
|
ctx.Data["HeadIsTag"] = headIsTag
|
||||||
|
|
|
@ -183,9 +183,7 @@ func httpBase(ctx *context.Context) *serviceHandler {
|
||||||
|
|
||||||
if repoExist {
|
if repoExist {
|
||||||
// Because of special ref "refs/for" .. , need delay write permission check
|
// Because of special ref "refs/for" .. , need delay write permission check
|
||||||
if git.SupportProcReceive {
|
|
||||||
accessMode = perm.AccessModeRead
|
accessMode = perm.AccessModeRead
|
||||||
}
|
|
||||||
|
|
||||||
if ctx.Data["IsActionsToken"] == true {
|
if ctx.Data["IsActionsToken"] == true {
|
||||||
taskID := ctx.Data["ActionsTaskID"].(int64)
|
taskID := ctx.Data["ActionsTaskID"].(int64)
|
||||||
|
|
|
@ -2775,7 +2775,7 @@ func SearchIssues(ctx *context.Context) {
|
||||||
IncludedAnyLabelIDs: includedAnyLabels,
|
IncludedAnyLabelIDs: includedAnyLabels,
|
||||||
MilestoneIDs: includedMilestones,
|
MilestoneIDs: includedMilestones,
|
||||||
ProjectID: projectID,
|
ProjectID: projectID,
|
||||||
SortBy: issue_indexer.SortByCreatedDesc,
|
SortBy: issue_indexer.ParseSortBy(ctx.FormString("sort"), issue_indexer.SortByCreatedDesc),
|
||||||
}
|
}
|
||||||
|
|
||||||
if since != 0 {
|
if since != 0 {
|
||||||
|
@ -2804,9 +2804,10 @@ func SearchIssues(ctx *context.Context) {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// FIXME: It's unsupported to sort by priority repo when searching by indexer,
|
priorityRepoID := ctx.FormInt64("priority_repo_id")
|
||||||
// it's indeed an regression, but I think it is worth to support filtering by indexer first.
|
if priorityRepoID > 0 {
|
||||||
_ = ctx.FormInt64("priority_repo_id")
|
searchOpt.PriorityRepoID = optional.Some(priorityRepoID)
|
||||||
|
}
|
||||||
|
|
||||||
ids, total, err := issue_indexer.SearchIssues(ctx, searchOpt)
|
ids, total, err := issue_indexer.SearchIssues(ctx, searchOpt)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
|
@ -2944,7 +2945,7 @@ func ListIssues(ctx *context.Context) {
|
||||||
IsPull: isPull,
|
IsPull: isPull,
|
||||||
IsClosed: isClosed,
|
IsClosed: isClosed,
|
||||||
ProjectID: projectID,
|
ProjectID: projectID,
|
||||||
SortBy: issue_indexer.SortByCreatedDesc,
|
SortBy: issue_indexer.ParseSortBy(ctx.FormString("sort"), issue_indexer.SortByCreatedDesc),
|
||||||
}
|
}
|
||||||
if since != 0 {
|
if since != 0 {
|
||||||
searchOpt.UpdatedAfterUnix = optional.Some(since)
|
searchOpt.UpdatedAfterUnix = optional.Some(since)
|
||||||
|
|
|
@ -996,6 +996,13 @@ func viewPullFiles(ctx *context.Context, specifiedStartCommit, specifiedEndCommi
|
||||||
ctx.Data["Verification"] = verification
|
ctx.Data["Verification"] = verification
|
||||||
ctx.Data["Author"] = user_model.ValidateCommitWithEmail(ctx, curCommit)
|
ctx.Data["Author"] = user_model.ValidateCommitWithEmail(ctx, curCommit)
|
||||||
|
|
||||||
|
if err := asymkey_model.CalculateTrustStatus(verification, ctx.Repo.Repository.GetTrustModel(), func(user *user_model.User) (bool, error) {
|
||||||
|
return repo_model.IsOwnerMemberCollaborator(ctx, ctx.Repo.Repository, user.ID)
|
||||||
|
}, nil); err != nil {
|
||||||
|
ctx.ServerError("CalculateTrustStatus", err)
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
note := &git.Note{}
|
note := &git.Note{}
|
||||||
err = git.GetNote(ctx, ctx.Repo.GitRepo, specifiedEndCommit, note)
|
err = git.GetNote(ctx, ctx.Repo.GitRepo, specifiedEndCommit, note)
|
||||||
if err == nil {
|
if err == nil {
|
||||||
|
|
|
@ -644,7 +644,11 @@ func RepoAssignment(ctx *Context) context.CancelFunc {
|
||||||
ctx.Data["OpenGraphImageURL"] = repo.SummaryCardURL()
|
ctx.Data["OpenGraphImageURL"] = repo.SummaryCardURL()
|
||||||
ctx.Data["OpenGraphImageWidth"] = cardWidth
|
ctx.Data["OpenGraphImageWidth"] = cardWidth
|
||||||
ctx.Data["OpenGraphImageHeight"] = cardHeight
|
ctx.Data["OpenGraphImageHeight"] = cardHeight
|
||||||
|
if util.IsEmptyString(repo.Description) {
|
||||||
ctx.Data["OpenGraphImageAltText"] = ctx.Tr("repo.summary_card_alt", repo.FullName())
|
ctx.Data["OpenGraphImageAltText"] = ctx.Tr("repo.summary_card_alt", repo.FullName())
|
||||||
|
} else {
|
||||||
|
ctx.Data["OpenGraphImageAltText"] = ctx.Tr("og.repo.summary_card.alt_description", repo.FullName(), repo.Description)
|
||||||
|
}
|
||||||
|
|
||||||
if repo.IsFork {
|
if repo.IsFork {
|
||||||
RetrieveBaseRepo(ctx, repo)
|
RetrieveBaseRepo(ctx, repo)
|
||||||
|
|
|
@ -1157,7 +1157,7 @@ func GetDiffSimple(ctx context.Context, gitRepo *git.Repository, opts *DiffOptio
|
||||||
// so if we are using at least this version of git we don't have to tell ParsePatch to do
|
// so if we are using at least this version of git we don't have to tell ParsePatch to do
|
||||||
// the skipping for us
|
// the skipping for us
|
||||||
parsePatchSkipToFile := opts.SkipTo
|
parsePatchSkipToFile := opts.SkipTo
|
||||||
if opts.SkipTo != "" && git.CheckGitVersionAtLeast("2.31") == nil {
|
if opts.SkipTo != "" {
|
||||||
cmdDiff.AddOptionFormat("--skip-to=%s", opts.SkipTo)
|
cmdDiff.AddOptionFormat("--skip-to=%s", opts.SkipTo)
|
||||||
parsePatchSkipToFile = ""
|
parsePatchSkipToFile = ""
|
||||||
}
|
}
|
||||||
|
|
|
@ -103,11 +103,7 @@ func createTemporaryRepoForPR(ctx context.Context, pr *issues_model.PullRequest)
|
||||||
remoteRepoName := "head_repo"
|
remoteRepoName := "head_repo"
|
||||||
baseBranch := "base"
|
baseBranch := "base"
|
||||||
|
|
||||||
fetchArgs := git.TrustedCmdArgs{"--no-tags"}
|
fetchArgs := git.TrustedCmdArgs{"--no-tags", "--no-write-commit-graph"}
|
||||||
if git.CheckGitVersionAtLeast("2.25.0") == nil {
|
|
||||||
// Writing the commit graph can be slow and is not needed here
|
|
||||||
fetchArgs = append(fetchArgs, "--no-write-commit-graph")
|
|
||||||
}
|
|
||||||
|
|
||||||
// addCacheRepo adds git alternatives for the cacheRepoPath in the repoPath
|
// addCacheRepo adds git alternatives for the cacheRepoPath in the repoPath
|
||||||
addCacheRepo := func(repoPath, cacheRepoPath string) error {
|
addCacheRepo := func(repoPath, cacheRepoPath string) error {
|
||||||
|
|
|
@ -5,6 +5,7 @@ package files
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
"context"
|
||||||
|
"errors"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/url"
|
"net/url"
|
||||||
"path"
|
"path"
|
||||||
|
@ -205,7 +206,7 @@ func GetContents(ctx context.Context, repo *repo_model.Repository, treePath, ref
|
||||||
} else if entry.IsLink() {
|
} else if entry.IsLink() {
|
||||||
contentsResponse.Type = string(ContentTypeLink)
|
contentsResponse.Type = string(ContentTypeLink)
|
||||||
// The target of a symlink file is the content of the file
|
// The target of a symlink file is the content of the file
|
||||||
targetFromContent, err := entry.Blob().GetBlobContent(1024)
|
targetFromContent, err := entry.LinkTarget()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
|
@ -273,13 +274,11 @@ func GetBlobBySHA(ctx context.Context, repo *repo_model.Repository, gitRepo *git
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
content := ""
|
content, err := gitBlob.GetContentBase64(setting.API.DefaultMaxBlobSize)
|
||||||
if gitBlob.Size() <= setting.API.DefaultMaxBlobSize {
|
if err != nil && !errors.As(err, &git.BlobTooLargeError{}) {
|
||||||
content, err = gitBlob.GetBlobContentBase64()
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
}
|
|
||||||
return &api.GitBlob{
|
return &api.GitBlob{
|
||||||
SHA: gitBlob.ID.String(),
|
SHA: gitBlob.ID.String(),
|
||||||
URL: repo.APIURL() + "/git/blobs/" + url.PathEscape(gitBlob.ID.String()),
|
URL: repo.APIURL() + "/git/blobs/" + url.PathEscape(gitBlob.ID.String()),
|
||||||
|
|
|
@ -104,36 +104,35 @@ func GetAuthorAndCommitterUsers(author, committer *IdentityOptions, doer *user_m
|
||||||
// then we use bogus User objects for them to store their FullName and Email.
|
// then we use bogus User objects for them to store their FullName and Email.
|
||||||
// If only one of the two are provided, we set both of them to it.
|
// If only one of the two are provided, we set both of them to it.
|
||||||
// If neither are provided, both are the doer.
|
// If neither are provided, both are the doer.
|
||||||
if committer != nil && committer.Email != "" {
|
getUser := func(identity *IdentityOptions) *user_model.User {
|
||||||
if doer != nil && strings.EqualFold(doer.Email, committer.Email) {
|
if identity == nil || identity.Email == "" {
|
||||||
committerUser = doer // the committer is the doer, so will use their user object
|
return nil
|
||||||
if committer.Name != "" {
|
}
|
||||||
committerUser.FullName = committer.Name
|
|
||||||
|
if doer != nil && strings.EqualFold(doer.Email, identity.Email) {
|
||||||
|
user := doer // the committer is the doer, so will use their user object
|
||||||
|
if identity.Name != "" {
|
||||||
|
user.FullName = identity.Name
|
||||||
}
|
}
|
||||||
// Use the provided email and not revert to placeholder mail.
|
// Use the provided email and not revert to placeholder mail.
|
||||||
committerUser.KeepEmailPrivate = false
|
user.KeepEmailPrivate = false
|
||||||
} else {
|
return user
|
||||||
committerUser = &user_model.User{
|
}
|
||||||
FullName: committer.Name,
|
|
||||||
Email: committer.Email,
|
var id int64
|
||||||
}
|
if doer != nil {
|
||||||
}
|
id = doer.ID
|
||||||
}
|
}
|
||||||
if author != nil && author.Email != "" {
|
return &user_model.User{
|
||||||
if doer != nil && strings.EqualFold(doer.Email, author.Email) {
|
ID: id, // Needed to ensure the doer is checked to pass rules for instance signing of CRUD actions.
|
||||||
authorUser = doer // the author is the doer, so will use their user object
|
FullName: identity.Name,
|
||||||
if authorUser.Name != "" {
|
Email: identity.Email,
|
||||||
authorUser.FullName = author.Name
|
|
||||||
}
|
|
||||||
// Use the provided email and not revert to placeholder mail.
|
|
||||||
authorUser.KeepEmailPrivate = false
|
|
||||||
} else {
|
|
||||||
authorUser = &user_model.User{
|
|
||||||
FullName: author.Name,
|
|
||||||
Email: author.Email,
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
committerUser = getUser(committer)
|
||||||
|
authorUser = getUser(author)
|
||||||
|
|
||||||
if authorUser == nil {
|
if authorUser == nil {
|
||||||
if committerUser != nil {
|
if committerUser != nil {
|
||||||
authorUser = committerUser // No valid author was given so use the committer
|
authorUser = committerUser // No valid author was given so use the committer
|
||||||
|
|
|
@ -147,11 +147,7 @@ func ApplyDiffPatch(ctx context.Context, repo *repo_model.Repository, doer *user
|
||||||
stdout := &strings.Builder{}
|
stdout := &strings.Builder{}
|
||||||
stderr := &strings.Builder{}
|
stderr := &strings.Builder{}
|
||||||
|
|
||||||
cmdApply := git.NewCommand(ctx, "apply", "--index", "--recount", "--cached", "--ignore-whitespace", "--whitespace=fix", "--binary")
|
cmdApply := git.NewCommand(ctx, "apply", "--index", "--recount", "--cached", "--ignore-whitespace", "--whitespace=fix", "--binary", "-3")
|
||||||
if git.CheckGitVersionAtLeast("2.32") == nil {
|
|
||||||
cmdApply.AddArguments("-3")
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := cmdApply.Run(&git.RunOpts{
|
if err := cmdApply.Run(&git.RunOpts{
|
||||||
Dir: t.basePath,
|
Dir: t.basePath,
|
||||||
Stdout: stdout,
|
Stdout: stdout,
|
||||||
|
|
|
@ -350,7 +350,7 @@ func parseHookPullRequestEventType(event webhook_module.HookEventType) (string,
|
||||||
case webhook_module.HookEventPullRequestReviewApproved:
|
case webhook_module.HookEventPullRequestReviewApproved:
|
||||||
return "approved", nil
|
return "approved", nil
|
||||||
case webhook_module.HookEventPullRequestReviewRejected:
|
case webhook_module.HookEventPullRequestReviewRejected:
|
||||||
return "rejected", nil
|
return "requested changes", nil
|
||||||
case webhook_module.HookEventPullRequestReviewComment:
|
case webhook_module.HookEventPullRequestReviewComment:
|
||||||
return "comment", nil
|
return "comment", nil
|
||||||
default:
|
default:
|
||||||
|
|
18
templates/swagger/v1_json.tmpl
generated
|
@ -4524,6 +4524,24 @@
|
||||||
"description": "Number of items per page",
|
"description": "Number of items per page",
|
||||||
"name": "limit",
|
"name": "limit",
|
||||||
"in": "query"
|
"in": "query"
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"enum": [
|
||||||
|
"relevance",
|
||||||
|
"latest",
|
||||||
|
"oldest",
|
||||||
|
"recentupdate",
|
||||||
|
"leastupdate",
|
||||||
|
"mostcomment",
|
||||||
|
"leastcomment",
|
||||||
|
"nearduedate",
|
||||||
|
"farduedate"
|
||||||
|
],
|
||||||
|
"type": "string",
|
||||||
|
"default": "latest",
|
||||||
|
"description": "Type of sort",
|
||||||
|
"name": "sort",
|
||||||
|
"in": "query"
|
||||||
}
|
}
|
||||||
],
|
],
|
||||||
"responses": {
|
"responses": {
|
||||||
|
|
|
@ -9,16 +9,23 @@ import (
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"forgejo.org/models/db"
|
||||||
|
issues_model "forgejo.org/models/issues"
|
||||||
|
repo_model "forgejo.org/models/repo"
|
||||||
unit_model "forgejo.org/models/unit"
|
unit_model "forgejo.org/models/unit"
|
||||||
"forgejo.org/models/unittest"
|
"forgejo.org/models/unittest"
|
||||||
user_model "forgejo.org/models/user"
|
user_model "forgejo.org/models/user"
|
||||||
"forgejo.org/modules/git"
|
"forgejo.org/modules/git"
|
||||||
"forgejo.org/modules/indexer/stats"
|
"forgejo.org/modules/indexer/stats"
|
||||||
|
"forgejo.org/modules/optional"
|
||||||
|
"forgejo.org/modules/timeutil"
|
||||||
|
issue_service "forgejo.org/services/issue"
|
||||||
files_service "forgejo.org/services/repository/files"
|
files_service "forgejo.org/services/repository/files"
|
||||||
"forgejo.org/tests"
|
"forgejo.org/tests"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
"xorm.io/xorm/convert"
|
||||||
)
|
)
|
||||||
|
|
||||||
// first entry represents filename
|
// first entry represents filename
|
||||||
|
@ -29,19 +36,34 @@ type FileChanges struct {
|
||||||
Versions []string
|
Versions []string
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// performs additional repo setup as needed
|
||||||
|
type SetupRepo func(*user_model.User, *repo_model.Repository)
|
||||||
|
|
||||||
// put your Git repo declarations in here
|
// put your Git repo declarations in here
|
||||||
// feel free to amend the helper function below or use the raw variant directly
|
// feel free to amend the helper function below or use the raw variant directly
|
||||||
func DeclareGitRepos(t *testing.T) func() {
|
func DeclareGitRepos(t *testing.T) func() {
|
||||||
|
now := timeutil.TimeStampNow()
|
||||||
|
postIssue := func(repo *repo_model.Repository, user *user_model.User, age int64, title, content string) {
|
||||||
|
issue := &issues_model.Issue{
|
||||||
|
RepoID: repo.ID,
|
||||||
|
PosterID: user.ID,
|
||||||
|
Title: title,
|
||||||
|
Content: content,
|
||||||
|
CreatedUnix: now.Add(-age),
|
||||||
|
}
|
||||||
|
require.NoError(t, issue_service.NewIssue(db.DefaultContext, repo, issue, nil, nil, nil))
|
||||||
|
}
|
||||||
|
|
||||||
cleanupFunctions := []func(){
|
cleanupFunctions := []func(){
|
||||||
newRepo(t, 2, "diff-test", []FileChanges{{
|
newRepo(t, 2, "diff-test", nil, []FileChanges{{
|
||||||
Filename: "testfile",
|
Filename: "testfile",
|
||||||
Versions: []string{"hello", "hallo", "hola", "native", "ubuntu-latest", "- runs-on: ubuntu-latest", "- runs-on: debian-latest"},
|
Versions: []string{"hello", "hallo", "hola", "native", "ubuntu-latest", "- runs-on: ubuntu-latest", "- runs-on: debian-latest"},
|
||||||
}}),
|
}}, nil),
|
||||||
newRepo(t, 2, "language-stats-test", []FileChanges{{
|
newRepo(t, 2, "language-stats-test", nil, []FileChanges{{
|
||||||
Filename: "main.rs",
|
Filename: "main.rs",
|
||||||
Versions: []string{"fn main() {", "println!(\"Hello World!\");", "}"},
|
Versions: []string{"fn main() {", "println!(\"Hello World!\");", "}"},
|
||||||
}}),
|
}}, nil),
|
||||||
newRepo(t, 2, "mentions-highlighted", []FileChanges{
|
newRepo(t, 2, "mentions-highlighted", nil, []FileChanges{
|
||||||
{
|
{
|
||||||
Filename: "history1.md",
|
Filename: "history1.md",
|
||||||
Versions: []string{""},
|
Versions: []string{""},
|
||||||
|
@ -52,15 +74,38 @@ func DeclareGitRepos(t *testing.T) func() {
|
||||||
Versions: []string{""},
|
Versions: []string{""},
|
||||||
CommitMsg: "Another commit which mentions @user1 in the title\nand @user2 in the text",
|
CommitMsg: "Another commit which mentions @user1 in the title\nand @user2 in the text",
|
||||||
},
|
},
|
||||||
}),
|
}, nil),
|
||||||
newRepo(t, 2, "file-uploads", []FileChanges{{
|
newRepo(t, 2, "file-uploads", nil, []FileChanges{{
|
||||||
Filename: "UPLOAD_TEST.md",
|
Filename: "UPLOAD_TEST.md",
|
||||||
Versions: []string{"# File upload test\nUse this repo to test various file upload features in new branches."},
|
Versions: []string{"# File upload test\nUse this repo to test various file upload features in new branches."},
|
||||||
}}),
|
}}, nil),
|
||||||
newRepo(t, 2, "unicode-escaping", []FileChanges{{
|
newRepo(t, 2, "unicode-escaping", nil, []FileChanges{{
|
||||||
Filename: "a-file",
|
Filename: "a-file",
|
||||||
Versions: []string{"{a}{а}"},
|
Versions: []string{"{a}{а}"},
|
||||||
}}),
|
}}, nil),
|
||||||
|
newRepo(t, 11, "dependency-test", &tests.DeclarativeRepoOptions{
|
||||||
|
UnitConfig: optional.Some(map[unit_model.Type]convert.Conversion{
|
||||||
|
unit_model.TypeIssues: &repo_model.IssuesConfig{
|
||||||
|
EnableDependencies: true,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
}, []FileChanges{}, func(user *user_model.User, repo *repo_model.Repository) {
|
||||||
|
postIssue(repo, user, 500, "first issue here", "an issue created earlier")
|
||||||
|
postIssue(repo, user, 400, "second issue here (not 1)", "not the right issue, but in the right repo")
|
||||||
|
postIssue(repo, user, 300, "third issue here", "depends on things")
|
||||||
|
postIssue(repo, user, 200, "unrelated issue", "shrug emoji")
|
||||||
|
postIssue(repo, user, 100, "newest issue", "very new")
|
||||||
|
}),
|
||||||
|
newRepo(t, 11, "dependency-test-2", &tests.DeclarativeRepoOptions{
|
||||||
|
UnitConfig: optional.Some(map[unit_model.Type]convert.Conversion{
|
||||||
|
unit_model.TypeIssues: &repo_model.IssuesConfig{
|
||||||
|
EnableDependencies: true,
|
||||||
|
},
|
||||||
|
}),
|
||||||
|
}, []FileChanges{}, func(user *user_model.User, repo *repo_model.Repository) {
|
||||||
|
postIssue(repo, user, 450, "right issue", "an issue containing word right")
|
||||||
|
postIssue(repo, user, 150, "left issue", "an issue containing word left")
|
||||||
|
}),
|
||||||
// add your repo declarations here
|
// add your repo declarations here
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -71,12 +116,18 @@ func DeclareGitRepos(t *testing.T) func() {
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func newRepo(t *testing.T, userID int64, repoName string, fileChanges []FileChanges) func() {
|
func newRepo(t *testing.T, userID int64, repoName string, initOpts *tests.DeclarativeRepoOptions, fileChanges []FileChanges, setup SetupRepo) func() {
|
||||||
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: userID})
|
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: userID})
|
||||||
somerepo, _, cleanupFunc := tests.CreateDeclarativeRepo(t, user, repoName,
|
|
||||||
[]unit_model.Type{unit_model.TypeCode, unit_model.TypeIssues}, nil,
|
opts := tests.DeclarativeRepoOptions{}
|
||||||
nil,
|
if initOpts != nil {
|
||||||
)
|
opts = *initOpts
|
||||||
|
}
|
||||||
|
opts.Name = optional.Some(repoName)
|
||||||
|
if !opts.EnabledUnits.Has() {
|
||||||
|
opts.EnabledUnits = optional.Some([]unit_model.Type{unit_model.TypeCode, unit_model.TypeIssues})
|
||||||
|
}
|
||||||
|
somerepo, _, cleanupFunc := tests.CreateDeclarativeRepoWithOptions(t, user, opts)
|
||||||
|
|
||||||
var lastCommitID string
|
var lastCommitID string
|
||||||
for _, file := range fileChanges {
|
for _, file := range fileChanges {
|
||||||
|
@ -122,6 +173,10 @@ func newRepo(t *testing.T, userID int64, repoName string, fileChanges []FileChan
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if setup != nil {
|
||||||
|
setup(user, somerepo)
|
||||||
|
}
|
||||||
|
|
||||||
err := stats.UpdateRepoIndexer(somerepo)
|
err := stats.UpdateRepoIndexer(somerepo)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
|
|
@ -262,3 +262,91 @@ test('New Issue: Milestone', async ({page}, workerInfo) => {
|
||||||
await expect(selectedMilestone).toContainText('No milestone');
|
await expect(selectedMilestone).toContainText('No milestone');
|
||||||
await save_visual(page);
|
await save_visual(page);
|
||||||
});
|
});
|
||||||
|
|
||||||
|
test.describe('Dependency dropdown', () => {
|
||||||
|
test.use({user: 'user11'});
|
||||||
|
test('Issue: Dependencies', async ({page}) => {
|
||||||
|
const response = await page.goto('/user11/dependency-test/issues/3');
|
||||||
|
expect(response?.status()).toBe(200);
|
||||||
|
|
||||||
|
const depsBlock = page.locator('.issue-content-right .depending');
|
||||||
|
const deleteDepBtn = page.locator('.issue-content-right .depending .delete-dependency-button');
|
||||||
|
|
||||||
|
const input = page.locator('#new-dependency-drop-list .search');
|
||||||
|
const current = page.locator('#new-dependency-drop-list .text').first();
|
||||||
|
const menu = page.locator('#new-dependency-drop-list .menu');
|
||||||
|
const items = page.locator('#new-dependency-drop-list .menu .item');
|
||||||
|
|
||||||
|
const confirmDelete = async () => {
|
||||||
|
const modal = page.locator('.modal.remove-dependency');
|
||||||
|
await expect(modal).toBeVisible();
|
||||||
|
await expect(modal).toContainText('This will remove the dependency from this issue');
|
||||||
|
await modal.locator('button.ok').click();
|
||||||
|
};
|
||||||
|
|
||||||
|
// A kludge to set the dropdown to the *wrong* value so it lets us select the correct one next.
|
||||||
|
const resetDropdown = async () => {
|
||||||
|
if (await current.textContent().then((s) => s.includes('#4'))) return;
|
||||||
|
await input.click();
|
||||||
|
await input.fill('unrelated');
|
||||||
|
await expect(items.first()).toContainText('unrelated');
|
||||||
|
await items.first().click();
|
||||||
|
await expect(current).toContainText('#4');
|
||||||
|
await input.click();
|
||||||
|
};
|
||||||
|
|
||||||
|
await expect(depsBlock).toBeVisible();
|
||||||
|
while (await deleteDepBtn.first().isVisible()) {
|
||||||
|
await deleteDepBtn.first().click(); // wipe added dependencies from any previously failed tests
|
||||||
|
await confirmDelete();
|
||||||
|
}
|
||||||
|
await expect(depsBlock).toContainText('No dependencies set');
|
||||||
|
|
||||||
|
await input.scrollIntoViewIfNeeded();
|
||||||
|
await input.click();
|
||||||
|
|
||||||
|
const first = 'first issue here';
|
||||||
|
const second = 'second issue here';
|
||||||
|
const newest = 'newest issue';
|
||||||
|
|
||||||
|
// Without query, it should show issues in the same repo, sorted by date, except current one.
|
||||||
|
await expect(menu).toBeVisible();
|
||||||
|
await expect(items).toHaveCount(4); // 5 issues in this repo, minus current one
|
||||||
|
await expect(items.first()).toContainText(newest);
|
||||||
|
await expect(items.last()).toContainText(first);
|
||||||
|
await resetDropdown();
|
||||||
|
|
||||||
|
// With query, it should search all repos, but show current repo issues first.
|
||||||
|
await input.fill('right');
|
||||||
|
await expect(items.first()).toContainText(second);
|
||||||
|
await expect.poll(() => items.count()).toBeGreaterThan(1); // there is an issue in user11/dependency-test-2 containing the word "right"
|
||||||
|
await resetDropdown();
|
||||||
|
|
||||||
|
// When entering an issue number, it should always show that one first, then all text matches.
|
||||||
|
await input.fill('1');
|
||||||
|
await expect(items.first()).toContainText(first);
|
||||||
|
await expect(items.nth(1)).toBeVisible();
|
||||||
|
await resetDropdown();
|
||||||
|
|
||||||
|
// Should behave the same with a prefix
|
||||||
|
await input.fill('#1');
|
||||||
|
await expect(items.first()).toContainText(first);
|
||||||
|
|
||||||
|
// Selecting an issue
|
||||||
|
await items.first().click();
|
||||||
|
await expect(current).toContainText(first);
|
||||||
|
|
||||||
|
// Add dependency
|
||||||
|
const link = page.locator('.issue-content-right .depending .dependency a.title');
|
||||||
|
await page.locator('.issue-content-right .depending button').click();
|
||||||
|
await expect(link).toHaveAttribute('href', '/user11/dependency-test/issues/1');
|
||||||
|
|
||||||
|
// Remove dependency
|
||||||
|
await expect(deleteDepBtn).toBeVisible();
|
||||||
|
await deleteDepBtn.click();
|
||||||
|
|
||||||
|
await confirmDelete();
|
||||||
|
|
||||||
|
await expect(depsBlock).toContainText('No dependencies set');
|
||||||
|
});
|
||||||
|
});
|
||||||
|
|
|
@ -93,6 +93,7 @@ func createSessions(t testing.TB) {
|
||||||
users := []string{
|
users := []string{
|
||||||
"user1",
|
"user1",
|
||||||
"user2",
|
"user2",
|
||||||
|
"user11",
|
||||||
"user12",
|
"user12",
|
||||||
"user18",
|
"user18",
|
||||||
"user29",
|
"user29",
|
||||||
|
|
|
@ -4,8 +4,10 @@
|
||||||
package integration
|
package integration
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"cmp"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"slices"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
auth_model "forgejo.org/models/auth"
|
auth_model "forgejo.org/models/auth"
|
||||||
|
@ -46,6 +48,7 @@ func TestAPIUserBlock(t *testing.T) {
|
||||||
var blockedUsers []api.BlockedUser
|
var blockedUsers []api.BlockedUser
|
||||||
DecodeJSON(t, resp, &blockedUsers)
|
DecodeJSON(t, resp, &blockedUsers)
|
||||||
assert.Len(t, blockedUsers, 2)
|
assert.Len(t, blockedUsers, 2)
|
||||||
|
slices.SortFunc(blockedUsers, func(a, b api.BlockedUser) int { return cmp.Compare(a.BlockID, b.BlockID) })
|
||||||
assert.EqualValues(t, 1, blockedUsers[0].BlockID)
|
assert.EqualValues(t, 1, blockedUsers[0].BlockID)
|
||||||
assert.EqualValues(t, 2, blockedUsers[1].BlockID)
|
assert.EqualValues(t, 2, blockedUsers[1].BlockID)
|
||||||
})
|
})
|
||||||
|
|
|
@ -116,7 +116,7 @@ func testAPICreateBranches(t *testing.T, giteaURL *url.URL) {
|
||||||
ctx := NewAPITestContext(t, "user2", "my-noo-repo-"+objectFormat.Name(), auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
ctx := NewAPITestContext(t, "user2", "my-noo-repo-"+objectFormat.Name(), auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
giteaURL.Path = ctx.GitPath()
|
giteaURL.Path = ctx.GitPath()
|
||||||
|
|
||||||
t.Run("CreateRepo", doAPICreateRepository(ctx, false, objectFormat))
|
t.Run("CreateRepo", doAPICreateRepository(ctx, nil, objectFormat))
|
||||||
testCases := []struct {
|
testCases := []struct {
|
||||||
OldBranch string
|
OldBranch string
|
||||||
NewBranch string
|
NewBranch string
|
||||||
|
|
|
@ -48,10 +48,11 @@ func (ctx APITestContext) GitPath() string {
|
||||||
return fmt.Sprintf("%s/%s.git", ctx.Username, ctx.Reponame)
|
return fmt.Sprintf("%s/%s.git", ctx.Username, ctx.Reponame)
|
||||||
}
|
}
|
||||||
|
|
||||||
func doAPICreateRepository(ctx APITestContext, empty bool, objectFormat git.ObjectFormat, callback ...func(*testing.T, api.Repository)) func(*testing.T) {
|
func doAPICreateRepository(ctx APITestContext, opts *api.CreateRepoOption, objectFormat git.ObjectFormat, callback ...func(*testing.T, api.Repository)) func(*testing.T) {
|
||||||
return func(t *testing.T) {
|
return func(t *testing.T) {
|
||||||
createRepoOption := &api.CreateRepoOption{
|
if opts == nil {
|
||||||
AutoInit: !empty,
|
opts = &api.CreateRepoOption{
|
||||||
|
AutoInit: true,
|
||||||
Description: "Temporary repo",
|
Description: "Temporary repo",
|
||||||
Name: ctx.Reponame,
|
Name: ctx.Reponame,
|
||||||
Private: true,
|
Private: true,
|
||||||
|
@ -59,9 +60,10 @@ func doAPICreateRepository(ctx APITestContext, empty bool, objectFormat git.Obje
|
||||||
Gitignores: "",
|
Gitignores: "",
|
||||||
License: "WTFPL",
|
License: "WTFPL",
|
||||||
Readme: "Default",
|
Readme: "Default",
|
||||||
ObjectFormatName: objectFormat.Name(),
|
|
||||||
}
|
}
|
||||||
req := NewRequestWithJSON(t, "POST", "/api/v1/user/repos", createRepoOption).
|
}
|
||||||
|
opts.ObjectFormatName = objectFormat.Name()
|
||||||
|
req := NewRequestWithJSON(t, "POST", "/api/v1/user/repos", opts).
|
||||||
AddTokenAuth(ctx.Token)
|
AddTokenAuth(ctx.Token)
|
||||||
if ctx.ExpectedCode != 0 {
|
if ctx.ExpectedCode != 0 {
|
||||||
ctx.Session.MakeRequest(t, req, ctx.ExpectedCode)
|
ctx.Session.MakeRequest(t, req, ctx.ExpectedCode)
|
||||||
|
@ -237,8 +239,8 @@ func doAPICreatePullRequest(ctx APITestContext, owner, repo, baseBranch, headBra
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func doAPIGetPullRequest(ctx APITestContext, owner, repo string, index int64) func(*testing.T) (api.PullRequest, error) {
|
func doAPIGetPullRequest(ctx APITestContext, owner, repo string, index int64) func(*testing.T) api.PullRequest {
|
||||||
return func(t *testing.T) (api.PullRequest, error) {
|
return func(t *testing.T) api.PullRequest {
|
||||||
req := NewRequest(t, http.MethodGet, fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d", owner, repo, index)).
|
req := NewRequest(t, http.MethodGet, fmt.Sprintf("/api/v1/repos/%s/%s/pulls/%d", owner, repo, index)).
|
||||||
AddTokenAuth(ctx.Token)
|
AddTokenAuth(ctx.Token)
|
||||||
|
|
||||||
|
@ -248,10 +250,9 @@ func doAPIGetPullRequest(ctx APITestContext, owner, repo string, index int64) fu
|
||||||
}
|
}
|
||||||
resp := ctx.Session.MakeRequest(t, req, expected)
|
resp := ctx.Session.MakeRequest(t, req, expected)
|
||||||
|
|
||||||
decoder := json.NewDecoder(resp.Body)
|
|
||||||
pr := api.PullRequest{}
|
pr := api.PullRequest{}
|
||||||
err := decoder.Decode(&pr)
|
DecodeJSON(t, resp, &pr)
|
||||||
return pr, err
|
return pr
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -347,20 +348,40 @@ func doAPICancelAutoMergePullRequest(ctx APITestContext, owner, repo string, ind
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func doAPIGetBranch(ctx APITestContext, branch string, callback ...func(*testing.T, api.Branch)) func(*testing.T) {
|
func doAPIGetBranch(ctx APITestContext, branch string) func(*testing.T) api.Branch {
|
||||||
return func(t *testing.T) {
|
return func(t *testing.T) api.Branch {
|
||||||
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/branches/%s", ctx.Username, ctx.Reponame, branch).
|
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/branches/%s", ctx.Username, ctx.Reponame, branch).
|
||||||
AddTokenAuth(ctx.Token)
|
AddTokenAuth(ctx.Token)
|
||||||
|
expected := http.StatusOK
|
||||||
|
if ctx.ExpectedCode != 0 {
|
||||||
|
expected = ctx.ExpectedCode
|
||||||
|
}
|
||||||
|
resp := ctx.Session.MakeRequest(t, req, expected)
|
||||||
|
|
||||||
|
branch := api.Branch{}
|
||||||
|
DecodeJSON(t, resp, &branch)
|
||||||
|
return branch
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
func doAPICreateTag(ctx APITestContext, tag, target, message string, callback ...func(*testing.T, api.Tag)) func(*testing.T) {
|
||||||
|
return func(t *testing.T) {
|
||||||
|
req := NewRequestWithJSON(t, "POST", fmt.Sprintf("/api/v1/repos/%s/%s/tags", ctx.Username, ctx.Reponame), &api.CreateTagOption{
|
||||||
|
TagName: tag,
|
||||||
|
Message: message,
|
||||||
|
Target: target,
|
||||||
|
}).
|
||||||
|
AddTokenAuth(ctx.Token)
|
||||||
if ctx.ExpectedCode != 0 {
|
if ctx.ExpectedCode != 0 {
|
||||||
ctx.Session.MakeRequest(t, req, ctx.ExpectedCode)
|
ctx.Session.MakeRequest(t, req, ctx.ExpectedCode)
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
resp := ctx.Session.MakeRequest(t, req, http.StatusOK)
|
resp := ctx.Session.MakeRequest(t, req, http.StatusCreated)
|
||||||
|
|
||||||
var branch api.Branch
|
var tag api.Tag
|
||||||
DecodeJSON(t, resp, &branch)
|
DecodeJSON(t, resp, &tag)
|
||||||
if len(callback) > 0 {
|
if len(callback) > 0 {
|
||||||
callback[0](t, branch)
|
callback[0](t, tag)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -4,56 +4,268 @@
|
||||||
package integration
|
package integration
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"encoding/base64"
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"net/http/httptest"
|
||||||
|
"net/url"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
auth_model "forgejo.org/models/auth"
|
auth_model "forgejo.org/models/auth"
|
||||||
"forgejo.org/models/unittest"
|
"forgejo.org/models/unittest"
|
||||||
user_model "forgejo.org/models/user"
|
user_model "forgejo.org/models/user"
|
||||||
|
"forgejo.org/modules/git"
|
||||||
api "forgejo.org/modules/structs"
|
api "forgejo.org/modules/structs"
|
||||||
"forgejo.org/tests"
|
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestAPICompareBranches(t *testing.T) {
|
|
||||||
defer tests.PrepareTestEnv(t)()
|
|
||||||
|
|
||||||
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
|
||||||
// Login as User2.
|
|
||||||
session := loginUser(t, user.Name)
|
|
||||||
token := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteRepository)
|
|
||||||
|
|
||||||
repoName := "repo20"
|
|
||||||
|
|
||||||
req := NewRequestf(t, "GET", "/api/v1/repos/user2/%s/compare/add-csv...remove-files-b", repoName).
|
|
||||||
AddTokenAuth(token)
|
|
||||||
resp := MakeRequest(t, req, http.StatusOK)
|
|
||||||
|
|
||||||
var apiResp *api.Compare
|
|
||||||
DecodeJSON(t, resp, &apiResp)
|
|
||||||
|
|
||||||
assert.Equal(t, 2, apiResp.TotalCommits)
|
|
||||||
assert.Len(t, apiResp.Commits, 2)
|
|
||||||
assert.Len(t, apiResp.Files, 3)
|
|
||||||
}
|
|
||||||
|
|
||||||
func TestAPICompareCommits(t *testing.T) {
|
func TestAPICompareCommits(t *testing.T) {
|
||||||
defer tests.PrepareTestEnv(t)()
|
forEachObjectFormat(t, testAPICompareCommits)
|
||||||
|
}
|
||||||
|
|
||||||
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
func testAPICompareCommits(t *testing.T, objectFormat git.ObjectFormat) {
|
||||||
// Login as User2.
|
onGiteaRun(t, func(t *testing.T, u *url.URL) {
|
||||||
session := loginUser(t, user.Name)
|
newBranchAndFile := func(ctx APITestContext, user *user_model.User, branch, filename string) func(*testing.T) {
|
||||||
token := getTokenForLoggedInUser(t, session, auth_model.AccessTokenScopeWriteRepository)
|
return func(t *testing.T) {
|
||||||
|
doAPICreateFile(ctx, filename, &api.CreateFileOptions{
|
||||||
|
FileOptions: api.FileOptions{
|
||||||
|
NewBranchName: branch,
|
||||||
|
Message: "create " + filename,
|
||||||
|
Author: api.Identity{
|
||||||
|
Name: user.Name,
|
||||||
|
Email: user.Email,
|
||||||
|
},
|
||||||
|
Committer: api.Identity{
|
||||||
|
Name: user.Name,
|
||||||
|
Email: user.Email,
|
||||||
|
},
|
||||||
|
Dates: api.CommitDateOptions{
|
||||||
|
Author: time.Now(),
|
||||||
|
Committer: time.Now(),
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ContentBase64: base64.StdEncoding.EncodeToString([]byte("content " + filename)),
|
||||||
|
})(t)
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
req := NewRequestf(t, "GET", "/api/v1/repos/user2/repo20/compare/c8e31bc...8babce9").
|
requireErrorContains := func(t *testing.T, resp *httptest.ResponseRecorder, expected string) {
|
||||||
AddTokenAuth(token)
|
t.Helper()
|
||||||
|
|
||||||
|
type response struct {
|
||||||
|
Message string `json:"message"`
|
||||||
|
Errors []string `json:"errors"`
|
||||||
|
}
|
||||||
|
var bodyResp response
|
||||||
|
DecodeJSON(t, resp, &bodyResp)
|
||||||
|
|
||||||
|
if strings.Contains(bodyResp.Message, expected) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
for _, error := range bodyResp.Errors {
|
||||||
|
if strings.Contains(error, expected) {
|
||||||
|
return
|
||||||
|
}
|
||||||
|
}
|
||||||
|
t.Log(fmt.Sprintf("expected %s in %+v", expected, bodyResp))
|
||||||
|
t.Fail()
|
||||||
|
}
|
||||||
|
|
||||||
|
user2 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||||
|
user2repo := "repoA"
|
||||||
|
user2Ctx := NewAPITestContext(t, user2.Name, user2repo, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
|
t.Run("CreateUser2Repository", doAPICreateRepository(user2Ctx, &api.CreateRepoOption{
|
||||||
|
AutoInit: true,
|
||||||
|
Description: "Temporary repo",
|
||||||
|
Name: user2Ctx.Reponame,
|
||||||
|
}, objectFormat))
|
||||||
|
user2branchName := "user2branch"
|
||||||
|
t.Run("CreateUser2RepositoryBranch", newBranchAndFile(user2Ctx, user2, user2branchName, "user2branchfilename.txt"))
|
||||||
|
user2branch := doAPIGetBranch(user2Ctx, user2branchName)(t)
|
||||||
|
user2master := doAPIGetBranch(user2Ctx, "master")(t)
|
||||||
|
user2tag1 := "tag1"
|
||||||
|
t.Run("CreateUser2RepositoryTag1", doAPICreateTag(user2Ctx, user2tag1, "master", "user2branchtag1"))
|
||||||
|
user2tag2 := "tag2"
|
||||||
|
t.Run("CreateUser2RepositoryTag1", doAPICreateTag(user2Ctx, user2tag2, user2branchName, "user2branchtag2"))
|
||||||
|
|
||||||
|
shortCommitLength := 7
|
||||||
|
|
||||||
|
for _, testCase := range []struct {
|
||||||
|
name string
|
||||||
|
a string
|
||||||
|
b string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "Commits",
|
||||||
|
a: user2master.Commit.ID,
|
||||||
|
b: user2branch.Commit.ID,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "ShortCommits",
|
||||||
|
a: user2master.Commit.ID[:shortCommitLength],
|
||||||
|
b: user2branch.Commit.ID[:shortCommitLength],
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Branches",
|
||||||
|
a: "master",
|
||||||
|
b: user2branchName,
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Tags",
|
||||||
|
a: user2tag1,
|
||||||
|
b: user2tag2,
|
||||||
|
},
|
||||||
|
} {
|
||||||
|
t.Run("SameRepo"+testCase.name, func(t *testing.T) {
|
||||||
|
// a...b
|
||||||
|
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/compare/%s...%s", user2.Name, user2repo, testCase.a, testCase.b).
|
||||||
|
AddTokenAuth(user2Ctx.Token)
|
||||||
resp := MakeRequest(t, req, http.StatusOK)
|
resp := MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
var apiResp *api.Compare
|
var apiResp *api.Compare
|
||||||
DecodeJSON(t, resp, &apiResp)
|
DecodeJSON(t, resp, &apiResp)
|
||||||
|
|
||||||
assert.Equal(t, 2, apiResp.TotalCommits)
|
assert.Equal(t, 1, apiResp.TotalCommits)
|
||||||
assert.Len(t, apiResp.Commits, 2)
|
assert.Len(t, apiResp.Commits, 1)
|
||||||
assert.Len(t, apiResp.Files, 3)
|
assert.Len(t, apiResp.Files, 1)
|
||||||
|
|
||||||
|
// b...a
|
||||||
|
req = NewRequestf(t, "GET", "/api/v1/repos/%s/%s/compare/%s...%s", user2.Name, user2repo, testCase.b, testCase.a).
|
||||||
|
AddTokenAuth(user2Ctx.Token)
|
||||||
|
resp = MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
|
DecodeJSON(t, resp, &apiResp)
|
||||||
|
|
||||||
|
assert.Equal(t, 0, apiResp.TotalCommits)
|
||||||
|
assert.Empty(t, apiResp.Commits)
|
||||||
|
assert.Empty(t, apiResp.Files)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
user4 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 4})
|
||||||
|
user4Ctx := NewAPITestContext(t, user4.Name, user2repo, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
|
|
||||||
|
t.Run("ForkNotFound", func(t *testing.T) {
|
||||||
|
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/compare/%s...%s:%s", user2.Name, user2repo, "master", user4.Name, user2branchName).
|
||||||
|
AddTokenAuth(user2Ctx.Token)
|
||||||
|
resp := MakeRequest(t, req, http.StatusNotFound)
|
||||||
|
requireErrorContains(t, resp, "user4 does not have a fork of user2/repoA and user2/repoA is not a fork of a repository from user4")
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("User4ForksUser2Repository", doAPIForkRepository(user4Ctx, user2.Name))
|
||||||
|
user4branchName := "user4branch"
|
||||||
|
t.Run("CreateUser4RepositoryBranch", newBranchAndFile(user4Ctx, user4, user4branchName, "user4branchfilename.txt"))
|
||||||
|
user4branch := doAPIGetBranch(user4Ctx, user4branchName)(t)
|
||||||
|
user4tag4 := "tag4"
|
||||||
|
t.Run("CreateUser4RepositoryTag4", doAPICreateTag(user4Ctx, user4tag4, user4branchName, "user4branchtag4"))
|
||||||
|
|
||||||
|
t.Run("FromTheForkedRepo", func(t *testing.T) {
|
||||||
|
// user4/repoA is a fork of user2/repoA and when evaluating
|
||||||
|
//
|
||||||
|
// user4/repoA/compare/master...user2:user2branch
|
||||||
|
//
|
||||||
|
// user2/repoA is not explicitly specified, it is implicitly the repository
|
||||||
|
// from which user4/repoA was forked
|
||||||
|
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/compare/%s...%s:%s", user4.Name, user2repo, "master", user2.Name, user2branchName).
|
||||||
|
AddTokenAuth(user4Ctx.Token)
|
||||||
|
resp := MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
|
var apiResp *api.Compare
|
||||||
|
DecodeJSON(t, resp, &apiResp)
|
||||||
|
|
||||||
|
assert.Equal(t, 1, apiResp.TotalCommits)
|
||||||
|
assert.Len(t, apiResp.Commits, 1)
|
||||||
|
assert.Len(t, apiResp.Files, 1)
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, testCase := range []struct {
|
||||||
|
name string
|
||||||
|
a string
|
||||||
|
b string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
name: "Commits",
|
||||||
|
a: user2master.Commit.ID,
|
||||||
|
b: fmt.Sprintf("%s:%s", user4.Name, user4branch.Commit.ID),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "ShortCommits",
|
||||||
|
a: user2master.Commit.ID[:shortCommitLength],
|
||||||
|
b: fmt.Sprintf("%s:%s", user4.Name, user4branch.Commit.ID[:shortCommitLength]),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Branches",
|
||||||
|
a: "master",
|
||||||
|
b: fmt.Sprintf("%s:%s", user4.Name, user4branchName),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "Tags",
|
||||||
|
a: user2tag1,
|
||||||
|
b: fmt.Sprintf("%s:%s", user4.Name, user4tag4),
|
||||||
|
},
|
||||||
|
{
|
||||||
|
name: "SameRepo",
|
||||||
|
a: "master",
|
||||||
|
b: fmt.Sprintf("%s:%s", user2.Name, user2branchName),
|
||||||
|
},
|
||||||
|
} {
|
||||||
|
t.Run("ForkedRepo"+testCase.name, func(t *testing.T) {
|
||||||
|
// user2/repoA is forked into user4/repoA and when evaluating
|
||||||
|
//
|
||||||
|
// user2/repoA/compare/a...user4:b
|
||||||
|
//
|
||||||
|
// user4/repoA is not explicitly specified, it is implicitly the repository
|
||||||
|
// owned by user4 which is a fork of repoA
|
||||||
|
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/compare/%s...%s", user2.Name, user2repo, testCase.a, testCase.b).
|
||||||
|
AddTokenAuth(user2Ctx.Token)
|
||||||
|
resp := MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
|
var apiResp *api.Compare
|
||||||
|
DecodeJSON(t, resp, &apiResp)
|
||||||
|
|
||||||
|
assert.Equal(t, 1, apiResp.TotalCommits)
|
||||||
|
assert.Len(t, apiResp.Commits, 1)
|
||||||
|
assert.Len(t, apiResp.Files, 1)
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
|
t.Run("ForkUserDoesNotExist", func(t *testing.T) {
|
||||||
|
notUser := "notauser"
|
||||||
|
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/compare/master...%s:branchname", user2.Name, user2repo, notUser).
|
||||||
|
AddTokenAuth(user2Ctx.Token)
|
||||||
|
resp := MakeRequest(t, req, http.StatusNotFound)
|
||||||
|
requireErrorContains(t, resp, fmt.Sprintf("the owner %s does not exist", notUser))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("HeadHasTooManyColon", func(t *testing.T) {
|
||||||
|
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/compare/master...one:two:many", user2.Name, user2repo).
|
||||||
|
AddTokenAuth(user2Ctx.Token)
|
||||||
|
resp := MakeRequest(t, req, http.StatusNotFound)
|
||||||
|
requireErrorContains(t, resp, fmt.Sprintf("must contain zero or one colon (:) but contains 2"))
|
||||||
|
})
|
||||||
|
|
||||||
|
for _, testCase := range []struct {
|
||||||
|
what string
|
||||||
|
baseHead string
|
||||||
|
}{
|
||||||
|
{
|
||||||
|
what: "base",
|
||||||
|
baseHead: "notexists...master",
|
||||||
|
},
|
||||||
|
{
|
||||||
|
what: "head",
|
||||||
|
baseHead: "master...notexists",
|
||||||
|
},
|
||||||
|
} {
|
||||||
|
t.Run("BaseHeadNotExists "+testCase.what, func(t *testing.T) {
|
||||||
|
req := NewRequestf(t, "GET", "/api/v1/repos/%s/%s/compare/%s", user2.Name, user2repo, testCase.baseHead).
|
||||||
|
AddTokenAuth(user2Ctx.Token)
|
||||||
|
resp := MakeRequest(t, req, http.StatusNotFound)
|
||||||
|
requireErrorContains(t, resp, fmt.Sprintf("could not find 'notexists' to be a commit, branch or tag in the %s", testCase.what))
|
||||||
|
})
|
||||||
|
}
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -287,7 +287,14 @@ func TestAPICreateFile(t *testing.T) {
|
||||||
// Test creating a file in an empty repository
|
// Test creating a file in an empty repository
|
||||||
forEachObjectFormat(t, func(t *testing.T, objectFormat git.ObjectFormat) {
|
forEachObjectFormat(t, func(t *testing.T, objectFormat git.ObjectFormat) {
|
||||||
reponame := "empty-repo-" + objectFormat.Name()
|
reponame := "empty-repo-" + objectFormat.Name()
|
||||||
doAPICreateRepository(NewAPITestContext(t, "user2", reponame, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser), true, objectFormat)(t)
|
ctx := NewAPITestContext(t, "user2", reponame, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
|
opts := &api.CreateRepoOption{
|
||||||
|
Description: "Temporary repo",
|
||||||
|
Name: ctx.Reponame,
|
||||||
|
Private: true,
|
||||||
|
Template: true,
|
||||||
|
}
|
||||||
|
doAPICreateRepository(ctx, opts, objectFormat)(t)
|
||||||
createFileOptions = getCreateFileOptions()
|
createFileOptions = getCreateFileOptions()
|
||||||
fileID++
|
fileID++
|
||||||
treePath = fmt.Sprintf("new/file%d.txt", fileID)
|
treePath = fmt.Sprintf("new/file%d.txt", fileID)
|
||||||
|
|
|
@ -17,17 +17,20 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestAPIGetRawFileOrLFS(t *testing.T) {
|
func TestAPIGetRawFileOrLFS(t *testing.T) {
|
||||||
defer tests.PrepareTestEnv(t)()
|
onGiteaRun(t, func(t *testing.T, u *url.URL) {
|
||||||
|
t.Run("Normal raw file", func(t *testing.T) {
|
||||||
|
defer tests.PrintCurrentTest(t)()
|
||||||
|
|
||||||
// Test with raw file
|
|
||||||
req := NewRequest(t, "GET", "/api/v1/repos/user2/repo1/media/README.md")
|
req := NewRequest(t, "GET", "/api/v1/repos/user2/repo1/media/README.md")
|
||||||
resp := MakeRequest(t, req, http.StatusOK)
|
resp := MakeRequest(t, req, http.StatusOK)
|
||||||
assert.Equal(t, "# repo1\n\nDescription for repo1", resp.Body.String())
|
assert.Equal(t, "# repo1\n\nDescription for repo1", resp.Body.String())
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("LFS raw file", func(t *testing.T) {
|
||||||
|
defer tests.PrintCurrentTest(t)()
|
||||||
|
|
||||||
// Test with LFS
|
|
||||||
onGiteaRun(t, func(t *testing.T, u *url.URL) {
|
|
||||||
httpContext := NewAPITestContext(t, "user2", "repo-lfs-test", auth_model.AccessTokenScopeWriteRepository)
|
httpContext := NewAPITestContext(t, "user2", "repo-lfs-test", auth_model.AccessTokenScopeWriteRepository)
|
||||||
doAPICreateRepository(httpContext, false, git.Sha1ObjectFormat, func(t *testing.T, repository api.Repository) { // FIXME: use forEachObjectFormat
|
doAPICreateRepository(httpContext, nil, git.Sha1ObjectFormat, func(t *testing.T, repository api.Repository) { // FIXME: use forEachObjectFormat
|
||||||
u.Path = httpContext.GitPath()
|
u.Path = httpContext.GitPath()
|
||||||
dstPath := t.TempDir()
|
dstPath := t.TempDir()
|
||||||
|
|
||||||
|
@ -49,4 +52,5 @@ func TestAPIGetRawFileOrLFS(t *testing.T) {
|
||||||
doAPIDeleteRepository(httpContext)
|
doAPIDeleteRepository(httpContext)
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
|
|
@ -64,7 +64,7 @@ func TestAPILFSMediaType(t *testing.T) {
|
||||||
|
|
||||||
func createLFSTestRepository(t *testing.T, name string) *repo_model.Repository {
|
func createLFSTestRepository(t *testing.T, name string) *repo_model.Repository {
|
||||||
ctx := NewAPITestContext(t, "user2", "lfs-"+name+"-repo", auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
ctx := NewAPITestContext(t, "user2", "lfs-"+name+"-repo", auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
t.Run("CreateRepo", doAPICreateRepository(ctx, false, git.Sha1ObjectFormat)) // FIXME: use forEachObjectFormat
|
t.Run("CreateRepo", doAPICreateRepository(ctx, nil, git.Sha1ObjectFormat)) // FIXME: use forEachObjectFormat
|
||||||
|
|
||||||
repo, err := repo_model.GetRepositoryByOwnerAndName(db.DefaultContext, "user2", "lfs-"+name+"-repo")
|
repo, err := repo_model.GetRepositoryByOwnerAndName(db.DefaultContext, "user2", "lfs-"+name+"-repo")
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
|
@ -415,7 +415,7 @@ func testAPIRepoMigrateConflict(t *testing.T, u *url.URL) {
|
||||||
httpContext := baseAPITestContext
|
httpContext := baseAPITestContext
|
||||||
|
|
||||||
httpContext.Reponame = "repo-tmp-17"
|
httpContext.Reponame = "repo-tmp-17"
|
||||||
t.Run("CreateRepo", doAPICreateRepository(httpContext, false, git.Sha1ObjectFormat)) // FIXME: use forEachObjectFormat
|
t.Run("CreateRepo", doAPICreateRepository(httpContext, nil, git.Sha1ObjectFormat)) // FIXME: use forEachObjectFormat
|
||||||
|
|
||||||
user, err := user_model.GetUserByName(db.DefaultContext, httpContext.Username)
|
user, err := user_model.GetUserByName(db.DefaultContext, httpContext.Username)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
@ -498,7 +498,7 @@ func testAPIRepoCreateConflict(t *testing.T, u *url.URL) {
|
||||||
httpContext := baseAPITestContext
|
httpContext := baseAPITestContext
|
||||||
|
|
||||||
httpContext.Reponame = "repo-tmp-17"
|
httpContext.Reponame = "repo-tmp-17"
|
||||||
t.Run("CreateRepo", doAPICreateRepository(httpContext, false, git.Sha1ObjectFormat)) // FIXME: use forEachObjectFormat
|
t.Run("CreateRepo", doAPICreateRepository(httpContext, nil, git.Sha1ObjectFormat)) // FIXME: use forEachObjectFormat
|
||||||
|
|
||||||
req := NewRequestWithJSON(t, "POST", "/api/v1/user/repos",
|
req := NewRequestWithJSON(t, "POST", "/api/v1/user/repos",
|
||||||
&api.CreateRepoOption{
|
&api.CreateRepoOption{
|
||||||
|
|
|
@ -7,6 +7,7 @@ package integration
|
||||||
import (
|
import (
|
||||||
"net/http"
|
"net/http"
|
||||||
"testing"
|
"testing"
|
||||||
|
"time"
|
||||||
|
|
||||||
"forgejo.org/models/db"
|
"forgejo.org/models/db"
|
||||||
"forgejo.org/modules/setting"
|
"forgejo.org/modules/setting"
|
||||||
|
@ -97,9 +98,13 @@ func TestDatabaseCollation(t *testing.T) {
|
||||||
defer test.MockVariableValue(&setting.Database.CharsetCollation, "utf8mb4_bin")()
|
defer test.MockVariableValue(&setting.Database.CharsetCollation, "utf8mb4_bin")()
|
||||||
require.NoError(t, db.ConvertDatabaseTable())
|
require.NoError(t, db.ConvertDatabaseTable())
|
||||||
|
|
||||||
r, err := db.CheckCollations(x)
|
var r *db.CheckCollationsResult
|
||||||
|
assert.Eventually(t, func() bool {
|
||||||
|
r, err = db.CheckCollations(x)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "utf8mb4_bin", r.DatabaseCollation)
|
|
||||||
|
return r.DatabaseCollation == "utf8mb4_bin"
|
||||||
|
}, time.Second*30, time.Second)
|
||||||
assert.True(t, r.CollationEquals(r.ExpectedCollation, r.DatabaseCollation))
|
assert.True(t, r.CollationEquals(r.ExpectedCollation, r.DatabaseCollation))
|
||||||
assert.Empty(t, r.InconsistentCollationColumns)
|
assert.Empty(t, r.InconsistentCollationColumns)
|
||||||
|
|
||||||
|
@ -117,9 +122,13 @@ func TestDatabaseCollation(t *testing.T) {
|
||||||
defer test.MockVariableValue(&setting.Database.CharsetCollation, "utf8mb4_general_ci")()
|
defer test.MockVariableValue(&setting.Database.CharsetCollation, "utf8mb4_general_ci")()
|
||||||
require.NoError(t, db.ConvertDatabaseTable())
|
require.NoError(t, db.ConvertDatabaseTable())
|
||||||
|
|
||||||
r, err := db.CheckCollations(x)
|
var r *db.CheckCollationsResult
|
||||||
|
assert.Eventually(t, func() bool {
|
||||||
|
r, err = db.CheckCollations(x)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.Equal(t, "utf8mb4_general_ci", r.DatabaseCollation)
|
|
||||||
|
return r.DatabaseCollation == "utf8mb4_general_ci"
|
||||||
|
}, time.Second*30, time.Second)
|
||||||
assert.True(t, r.CollationEquals(r.ExpectedCollation, r.DatabaseCollation))
|
assert.True(t, r.CollationEquals(r.ExpectedCollation, r.DatabaseCollation))
|
||||||
assert.Empty(t, r.InconsistentCollationColumns)
|
assert.Empty(t, r.InconsistentCollationColumns)
|
||||||
|
|
||||||
|
@ -137,9 +146,15 @@ func TestDatabaseCollation(t *testing.T) {
|
||||||
defer test.MockVariableValue(&setting.Database.CharsetCollation, "")()
|
defer test.MockVariableValue(&setting.Database.CharsetCollation, "")()
|
||||||
require.NoError(t, db.ConvertDatabaseTable())
|
require.NoError(t, db.ConvertDatabaseTable())
|
||||||
|
|
||||||
|
var r *db.CheckCollationsResult
|
||||||
r, err := db.CheckCollations(x)
|
r, err := db.CheckCollations(x)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
assert.True(t, r.IsCollationCaseSensitive(r.DatabaseCollation))
|
assert.Eventually(t, func() bool {
|
||||||
|
r, err = db.CheckCollations(x)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
return r.IsCollationCaseSensitive(r.DatabaseCollation)
|
||||||
|
}, time.Second*30, time.Second)
|
||||||
assert.True(t, r.CollationEquals(r.ExpectedCollation, r.DatabaseCollation))
|
assert.True(t, r.CollationEquals(r.ExpectedCollation, r.DatabaseCollation))
|
||||||
assert.Empty(t, r.InconsistentCollationColumns)
|
assert.Empty(t, r.InconsistentCollationColumns)
|
||||||
})
|
})
|
||||||
|
|
|
@ -69,7 +69,7 @@ func testGit(t *testing.T, u *url.URL) {
|
||||||
|
|
||||||
dstPath := t.TempDir()
|
dstPath := t.TempDir()
|
||||||
|
|
||||||
t.Run("CreateRepoInDifferentUser", doAPICreateRepository(forkedUserCtx, false, objectFormat))
|
t.Run("CreateRepoInDifferentUser", doAPICreateRepository(forkedUserCtx, nil, objectFormat))
|
||||||
t.Run("AddUserAsCollaborator", doAPIAddCollaborator(forkedUserCtx, httpContext.Username, perm.AccessModeRead))
|
t.Run("AddUserAsCollaborator", doAPIAddCollaborator(forkedUserCtx, httpContext.Username, perm.AccessModeRead))
|
||||||
|
|
||||||
t.Run("ForkFromDifferentUser", doAPIForkRepository(httpContext, forkedUserCtx.Username))
|
t.Run("ForkFromDifferentUser", doAPIForkRepository(httpContext, forkedUserCtx.Username))
|
||||||
|
@ -110,7 +110,7 @@ func testGit(t *testing.T, u *url.URL) {
|
||||||
sshContext.Reponame = "repo-tmp-18-" + objectFormat.Name()
|
sshContext.Reponame = "repo-tmp-18-" + objectFormat.Name()
|
||||||
keyname := "my-testing-key"
|
keyname := "my-testing-key"
|
||||||
forkedUserCtx.Reponame = sshContext.Reponame
|
forkedUserCtx.Reponame = sshContext.Reponame
|
||||||
t.Run("CreateRepoInDifferentUser", doAPICreateRepository(forkedUserCtx, false, objectFormat))
|
t.Run("CreateRepoInDifferentUser", doAPICreateRepository(forkedUserCtx, nil, objectFormat))
|
||||||
t.Run("AddUserAsCollaborator", doAPIAddCollaborator(forkedUserCtx, sshContext.Username, perm.AccessModeRead))
|
t.Run("AddUserAsCollaborator", doAPIAddCollaborator(forkedUserCtx, sshContext.Username, perm.AccessModeRead))
|
||||||
t.Run("ForkFromDifferentUser", doAPIForkRepository(sshContext, forkedUserCtx.Username))
|
t.Run("ForkFromDifferentUser", doAPIForkRepository(sshContext, forkedUserCtx.Username))
|
||||||
|
|
||||||
|
@ -529,8 +529,7 @@ func doMergeFork(ctx, baseCtx APITestContext, baseBranch, headBranch string) fun
|
||||||
t.Run("EnsureCanSeePull", doEnsureCanSeePull(headCtx, pr, false))
|
t.Run("EnsureCanSeePull", doEnsureCanSeePull(headCtx, pr, false))
|
||||||
t.Run("CheckPR", func(t *testing.T) {
|
t.Run("CheckPR", func(t *testing.T) {
|
||||||
oldMergeBase := pr.MergeBase
|
oldMergeBase := pr.MergeBase
|
||||||
pr2, err := doAPIGetPullRequest(baseCtx, baseCtx.Username, baseCtx.Reponame, pr.Index)(t)
|
pr2 := doAPIGetPullRequest(baseCtx, baseCtx.Username, baseCtx.Reponame, pr.Index)(t)
|
||||||
require.NoError(t, err)
|
|
||||||
assert.Equal(t, oldMergeBase, pr2.MergeBase)
|
assert.Equal(t, oldMergeBase, pr2.MergeBase)
|
||||||
})
|
})
|
||||||
t.Run("EnsurDiffNoChange", doEnsureDiffNoChange(baseCtx, pr, diffHash, diffLength))
|
t.Run("EnsurDiffNoChange", doEnsureDiffNoChange(baseCtx, pr, diffHash, diffLength))
|
||||||
|
@ -730,24 +729,21 @@ func doAutoPRMerge(baseCtx *APITestContext, dstPath string) func(t *testing.T) {
|
||||||
|
|
||||||
// Check pr status
|
// Check pr status
|
||||||
ctx.ExpectedCode = 0
|
ctx.ExpectedCode = 0
|
||||||
pr, err = doAPIGetPullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index)(t)
|
pr = doAPIGetPullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index)(t)
|
||||||
require.NoError(t, err)
|
|
||||||
assert.False(t, pr.HasMerged)
|
assert.False(t, pr.HasMerged)
|
||||||
|
|
||||||
// Call API to add Failure status for commit
|
// Call API to add Failure status for commit
|
||||||
t.Run("CreateStatus", addCommitStatus(api.CommitStatusFailure))
|
t.Run("CreateStatus", addCommitStatus(api.CommitStatusFailure))
|
||||||
|
|
||||||
// Check pr status
|
// Check pr status
|
||||||
pr, err = doAPIGetPullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index)(t)
|
pr = doAPIGetPullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index)(t)
|
||||||
require.NoError(t, err)
|
|
||||||
assert.False(t, pr.HasMerged)
|
assert.False(t, pr.HasMerged)
|
||||||
|
|
||||||
// Call API to add Success status for commit
|
// Call API to add Success status for commit
|
||||||
t.Run("CreateStatus", addCommitStatus(api.CommitStatusSuccess))
|
t.Run("CreateStatus", addCommitStatus(api.CommitStatusSuccess))
|
||||||
|
|
||||||
// test pr status
|
// test pr status
|
||||||
pr, err = doAPIGetPullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index)(t)
|
pr = doAPIGetPullRequest(ctx, baseCtx.Username, baseCtx.Reponame, pr.Index)(t)
|
||||||
require.NoError(t, err)
|
|
||||||
assert.True(t, pr.HasMerged)
|
assert.True(t, pr.HasMerged)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -775,11 +771,6 @@ func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, headBranch string
|
||||||
return func(t *testing.T) {
|
return func(t *testing.T) {
|
||||||
defer tests.PrintCurrentTest(t)()
|
defer tests.PrintCurrentTest(t)()
|
||||||
|
|
||||||
// skip this test if git version is low
|
|
||||||
if git.CheckGitVersionAtLeast("2.29") != nil {
|
|
||||||
return
|
|
||||||
}
|
|
||||||
|
|
||||||
gitRepo, err := git.OpenRepository(git.DefaultContext, dstPath)
|
gitRepo, err := git.OpenRepository(git.DefaultContext, dstPath)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
@ -836,8 +827,7 @@ func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, headBranch string
|
||||||
assert.Equal(t, 1, pr1.CommitsAhead)
|
assert.Equal(t, 1, pr1.CommitsAhead)
|
||||||
assert.Equal(t, 0, pr1.CommitsBehind)
|
assert.Equal(t, 0, pr1.CommitsBehind)
|
||||||
|
|
||||||
prMsg, err := doAPIGetPullRequest(*ctx, ctx.Username, ctx.Reponame, pr1.Index)(t)
|
prMsg := doAPIGetPullRequest(*ctx, ctx.Username, ctx.Reponame, pr1.Index)(t)
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "user2/"+headBranch, pr1.HeadBranch)
|
assert.Equal(t, "user2/"+headBranch, pr1.HeadBranch)
|
||||||
assert.False(t, prMsg.HasMerged)
|
assert.False(t, prMsg.HasMerged)
|
||||||
|
@ -858,8 +848,7 @@ func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, headBranch string
|
||||||
}
|
}
|
||||||
assert.Equal(t, 1, pr2.CommitsAhead)
|
assert.Equal(t, 1, pr2.CommitsAhead)
|
||||||
assert.Equal(t, 0, pr2.CommitsBehind)
|
assert.Equal(t, 0, pr2.CommitsBehind)
|
||||||
prMsg, err = doAPIGetPullRequest(*ctx, ctx.Username, ctx.Reponame, pr2.Index)(t)
|
prMsg = doAPIGetPullRequest(*ctx, ctx.Username, ctx.Reponame, pr2.Index)(t)
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "user2/test/"+headBranch, pr2.HeadBranch)
|
assert.Equal(t, "user2/test/"+headBranch, pr2.HeadBranch)
|
||||||
assert.False(t, prMsg.HasMerged)
|
assert.False(t, prMsg.HasMerged)
|
||||||
|
@ -910,8 +899,7 @@ func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, headBranch string
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
unittest.AssertCount(t, &issues_model.PullRequest{}, pullNum+2)
|
unittest.AssertCount(t, &issues_model.PullRequest{}, pullNum+2)
|
||||||
prMsg, err := doAPIGetPullRequest(*ctx, ctx.Username, ctx.Reponame, pr1.Index)(t)
|
prMsg := doAPIGetPullRequest(*ctx, ctx.Username, ctx.Reponame, pr1.Index)(t)
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.False(t, prMsg.HasMerged)
|
assert.False(t, prMsg.HasMerged)
|
||||||
assert.Equal(t, commit, prMsg.Head.Sha)
|
assert.Equal(t, commit, prMsg.Head.Sha)
|
||||||
|
@ -928,8 +916,7 @@ func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, headBranch string
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
unittest.AssertCount(t, &issues_model.PullRequest{}, pullNum+2)
|
unittest.AssertCount(t, &issues_model.PullRequest{}, pullNum+2)
|
||||||
prMsg, err = doAPIGetPullRequest(*ctx, ctx.Username, ctx.Reponame, pr2.Index)(t)
|
prMsg = doAPIGetPullRequest(*ctx, ctx.Username, ctx.Reponame, pr2.Index)(t)
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.False(t, prMsg.HasMerged)
|
assert.False(t, prMsg.HasMerged)
|
||||||
assert.Equal(t, commit, prMsg.Head.Sha)
|
assert.Equal(t, commit, prMsg.Head.Sha)
|
||||||
|
@ -953,8 +940,7 @@ func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, headBranch string
|
||||||
err := pr3.LoadIssue(db.DefaultContext)
|
err := pr3.LoadIssue(db.DefaultContext)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
_, err2 := doAPIGetPullRequest(*ctx, ctx.Username, ctx.Reponame, pr3.Index)(t)
|
doAPIGetPullRequest(*ctx, ctx.Username, ctx.Reponame, pr3.Index)(t)
|
||||||
require.NoError(t, err2)
|
|
||||||
|
|
||||||
assert.Equal(t, "Testing commit 2", pr3.Issue.Title)
|
assert.Equal(t, "Testing commit 2", pr3.Issue.Title)
|
||||||
assert.Contains(t, pr3.Issue.Content, "Longer description.")
|
assert.Contains(t, pr3.Issue.Content, "Longer description.")
|
||||||
|
@ -975,8 +961,7 @@ func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, headBranch string
|
||||||
err := pr.LoadIssue(db.DefaultContext)
|
err := pr.LoadIssue(db.DefaultContext)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
_, err = doAPIGetPullRequest(*ctx, ctx.Username, ctx.Reponame, pr.Index)(t)
|
doAPIGetPullRequest(*ctx, ctx.Username, ctx.Reponame, pr.Index)(t)
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "my-shiny-title", pr.Issue.Title)
|
assert.Equal(t, "my-shiny-title", pr.Issue.Title)
|
||||||
assert.Contains(t, pr.Issue.Content, "Longer description.")
|
assert.Contains(t, pr.Issue.Content, "Longer description.")
|
||||||
|
@ -998,8 +983,7 @@ func doCreateAgitFlowPull(dstPath string, ctx *APITestContext, headBranch string
|
||||||
err := pr.LoadIssue(db.DefaultContext)
|
err := pr.LoadIssue(db.DefaultContext)
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
|
|
||||||
_, err = doAPIGetPullRequest(*ctx, ctx.Username, ctx.Reponame, pr.Index)(t)
|
doAPIGetPullRequest(*ctx, ctx.Username, ctx.Reponame, pr.Index)(t)
|
||||||
require.NoError(t, err)
|
|
||||||
|
|
||||||
assert.Equal(t, "Testing commit 2", pr.Issue.Title)
|
assert.Equal(t, "Testing commit 2", pr.Issue.Title)
|
||||||
assert.Contains(t, pr.Issue.Content, "custom")
|
assert.Contains(t, pr.Issue.Content, "custom")
|
||||||
|
|
|
@ -102,11 +102,35 @@ func TestIssueCommentChangeMilestone(t *testing.T) {
|
||||||
[]string{"user1 removed this from the milestone2 milestone"},
|
[]string{"user1 removed this from the milestone2 milestone"},
|
||||||
[]string{"/user1", "/user2/repo1/milestone/2"})
|
[]string{"/user1", "/user2/repo1/milestone/2"})
|
||||||
|
|
||||||
// Deleted milestone
|
// Added milestone that in the meantime was deleted
|
||||||
testIssueCommentChangeEvent(t, htmlDoc, "2003",
|
testIssueCommentChangeEvent(t, htmlDoc, "2003",
|
||||||
"octicon-milestone", "User One", "/user1",
|
"octicon-milestone", "User One", "/user1",
|
||||||
[]string{"user1 added this to the (deleted) milestone"},
|
[]string{"user1 added this to the (deleted) milestone"},
|
||||||
[]string{"/user1"})
|
[]string{"/user1"})
|
||||||
|
|
||||||
|
// Modified milestone - from a meantime deleted one to a valid one
|
||||||
|
testIssueCommentChangeEvent(t, htmlDoc, "2004",
|
||||||
|
"octicon-milestone", "User One", "/user1",
|
||||||
|
[]string{"user1 modified the milestone from (deleted) to milestone1"},
|
||||||
|
[]string{"/user1", "/user2/repo1/milestone/1"})
|
||||||
|
|
||||||
|
// Modified milestone - from a valid one to a meantime deleted one
|
||||||
|
testIssueCommentChangeEvent(t, htmlDoc, "2005",
|
||||||
|
"octicon-milestone", "User One", "/user1",
|
||||||
|
[]string{"user1 modified the milestone from milestone1 to (deleted)"},
|
||||||
|
[]string{"/user1", "/user2/repo1/milestone/1"})
|
||||||
|
|
||||||
|
// Modified milestone - from a meantime deleted one to a meantime deleted one
|
||||||
|
testIssueCommentChangeEvent(t, htmlDoc, "2006",
|
||||||
|
"octicon-milestone", "User One", "/user1",
|
||||||
|
[]string{"user1 modified the milestone from (deleted) to (deleted)"},
|
||||||
|
[]string{"/user1"})
|
||||||
|
|
||||||
|
// Removed milestone that in the meantime was deleted
|
||||||
|
testIssueCommentChangeEvent(t, htmlDoc, "2007",
|
||||||
|
"octicon-milestone", "User One", "/user1",
|
||||||
|
[]string{"user1 removed this from the (deleted) milestone"},
|
||||||
|
[]string{"/user1"})
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestIssueCommentChangeProject(t *testing.T) {
|
func TestIssueCommentChangeProject(t *testing.T) {
|
||||||
|
|
|
@ -1350,8 +1350,6 @@ body:
|
||||||
|
|
||||||
func TestIssueUnsubscription(t *testing.T) {
|
func TestIssueUnsubscription(t *testing.T) {
|
||||||
onGiteaRun(t, func(t *testing.T, u *url.URL) {
|
onGiteaRun(t, func(t *testing.T, u *url.URL) {
|
||||||
defer tests.PrepareTestEnv(t)()
|
|
||||||
|
|
||||||
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
|
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 1})
|
||||||
repo, _, f := tests.CreateDeclarativeRepoWithOptions(t, user, tests.DeclarativeRepoOptions{
|
repo, _, f := tests.CreateDeclarativeRepoWithOptions(t, user, tests.DeclarativeRepoOptions{
|
||||||
AutoInit: optional.Some(false),
|
AutoInit: optional.Some(false),
|
||||||
|
|
|
@ -43,7 +43,6 @@ func TestMirrorPush(t *testing.T) {
|
||||||
}
|
}
|
||||||
|
|
||||||
func testMirrorPush(t *testing.T, u *url.URL) {
|
func testMirrorPush(t *testing.T, u *url.URL) {
|
||||||
defer tests.PrepareTestEnv(t)()
|
|
||||||
defer test.MockVariableValue(&setting.Migrations.AllowLocalNetworks, true)()
|
defer test.MockVariableValue(&setting.Migrations.AllowLocalNetworks, true)()
|
||||||
|
|
||||||
require.NoError(t, migrations.Init())
|
require.NoError(t, migrations.Init())
|
||||||
|
|
|
@ -98,7 +98,7 @@ func TestOpenGraphProperties(t *testing.T) {
|
||||||
"og:url": setting.AppURL + "/user27/repo49/src/branch/master/test/test.txt",
|
"og:url": setting.AppURL + "/user27/repo49/src/branch/master/test/test.txt",
|
||||||
"og:type": "object",
|
"og:type": "object",
|
||||||
"og:image": setting.AppURL + "user27/repo49/-/summary-card",
|
"og:image": setting.AppURL + "user27/repo49/-/summary-card",
|
||||||
"og:image:alt": "Summary card of repository user27/repo49",
|
"og:image:alt": "Summary card of repository user27/repo49, described as: A wonderful repository with more than just a README.md",
|
||||||
"og:image:width": "1200",
|
"og:image:width": "1200",
|
||||||
"og:image:height": "600",
|
"og:image:height": "600",
|
||||||
"og:site_name": siteName,
|
"og:site_name": siteName,
|
||||||
|
@ -141,7 +141,7 @@ func TestOpenGraphProperties(t *testing.T) {
|
||||||
"og:description": "A wonderful repository with more than just a README.md",
|
"og:description": "A wonderful repository with more than just a README.md",
|
||||||
"og:type": "object",
|
"og:type": "object",
|
||||||
"og:image": setting.AppURL + "user27/repo49/-/summary-card",
|
"og:image": setting.AppURL + "user27/repo49/-/summary-card",
|
||||||
"og:image:alt": "Summary card of repository user27/repo49",
|
"og:image:alt": "Summary card of repository user27/repo49, described as: A wonderful repository with more than just a README.md",
|
||||||
"og:image:width": "1200",
|
"og:image:width": "1200",
|
||||||
"og:image:height": "600",
|
"og:image:height": "600",
|
||||||
"og:site_name": siteName,
|
"og:site_name": siteName,
|
||||||
|
|
|
@ -4,13 +4,26 @@
|
||||||
package integration
|
package integration
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/base64"
|
||||||
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
|
"net/url"
|
||||||
|
"os"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
|
auth_model "forgejo.org/models/auth"
|
||||||
|
"forgejo.org/models/unittest"
|
||||||
|
user_model "forgejo.org/models/user"
|
||||||
|
"forgejo.org/modules/git"
|
||||||
|
"forgejo.org/modules/setting"
|
||||||
|
api "forgejo.org/modules/structs"
|
||||||
|
"forgejo.org/modules/test"
|
||||||
pull_service "forgejo.org/services/pull"
|
pull_service "forgejo.org/services/pull"
|
||||||
"forgejo.org/tests"
|
"forgejo.org/tests"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestListPullCommits(t *testing.T) {
|
func TestListPullCommits(t *testing.T) {
|
||||||
|
@ -48,3 +61,83 @@ func TestPullCommitLinks(t *testing.T) {
|
||||||
commitLinkHref, _ := commitLink.Attr("href")
|
commitLinkHref, _ := commitLink.Attr("href")
|
||||||
assert.Equal(t, "/user2/repo1/pulls/3/commits/5f22f7d0d95d614d25a5b68592adb345a4b5c7fd", commitLinkHref)
|
assert.Equal(t, "/user2/repo1/pulls/3/commits/5f22f7d0d95d614d25a5b68592adb345a4b5c7fd", commitLinkHref)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestPullCommitSignature(t *testing.T) {
|
||||||
|
t.Cleanup(func() {
|
||||||
|
// Cannot use t.Context(), it is in the done state.
|
||||||
|
require.NoError(t, git.InitFull(context.Background())) //nolint:usetesting
|
||||||
|
})
|
||||||
|
|
||||||
|
defer test.MockVariableValue(&setting.Repository.Signing.SigningName, "UwU")()
|
||||||
|
defer test.MockVariableValue(&setting.Repository.Signing.SigningEmail, "fox@example.com")()
|
||||||
|
defer test.MockVariableValue(&setting.Repository.Signing.CRUDActions, []string{"always"})()
|
||||||
|
defer test.MockVariableValue(&setting.Repository.Signing.InitialCommit, []string{"always"})()
|
||||||
|
|
||||||
|
filePath := "signed.txt"
|
||||||
|
fromBranch := "master"
|
||||||
|
toBranch := "branch-signed"
|
||||||
|
|
||||||
|
onGiteaRun(t, func(t *testing.T, u *url.URL) {
|
||||||
|
// Use a new GNUPGPHOME to avoid messing with the existing GPG keyring.
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
require.NoError(t, os.Chmod(tmpDir, 0o700))
|
||||||
|
t.Setenv("GNUPGHOME", tmpDir)
|
||||||
|
|
||||||
|
rootKeyPair, err := importTestingKey()
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer test.MockVariableValue(&setting.Repository.Signing.SigningKey, rootKeyPair.PrimaryKey.KeyIdShortString())()
|
||||||
|
defer test.MockVariableValue(&setting.Repository.Signing.Format, "openpgp")()
|
||||||
|
|
||||||
|
// Ensure the git config is updated with the new signing format.
|
||||||
|
require.NoError(t, git.InitFull(t.Context()))
|
||||||
|
|
||||||
|
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||||
|
testCtx := NewAPITestContext(t, user.Name, "pull-request-commit-header-signed", auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
|
u.Path = testCtx.GitPath()
|
||||||
|
|
||||||
|
t.Run("Create repository", doAPICreateRepository(testCtx, nil, git.Sha1ObjectFormat))
|
||||||
|
|
||||||
|
t.Run("Create commit", func(t *testing.T) {
|
||||||
|
defer tests.PrintCurrentTest(t)()
|
||||||
|
|
||||||
|
options := &api.CreateFileOptions{
|
||||||
|
FileOptions: api.FileOptions{
|
||||||
|
BranchName: fromBranch,
|
||||||
|
NewBranchName: toBranch,
|
||||||
|
Message: fmt.Sprintf("from:%s to:%s path:%s", fromBranch, toBranch, filePath),
|
||||||
|
Author: api.Identity{
|
||||||
|
Name: user.FullName,
|
||||||
|
Email: user.Email,
|
||||||
|
},
|
||||||
|
Committer: api.Identity{
|
||||||
|
Name: user.FullName,
|
||||||
|
Email: user.Email,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ContentBase64: base64.StdEncoding.EncodeToString(fmt.Appendf(nil, "This is new text for %s", filePath)),
|
||||||
|
}
|
||||||
|
|
||||||
|
req := NewRequestWithJSON(t, "POST", fmt.Sprintf("/api/v1/repos/%s/%s/contents/%s", testCtx.Username, testCtx.Reponame, filePath), &options).
|
||||||
|
AddTokenAuth(testCtx.Token)
|
||||||
|
resp := testCtx.Session.MakeRequest(t, req, http.StatusCreated)
|
||||||
|
|
||||||
|
var contents api.FileResponse
|
||||||
|
DecodeJSON(t, resp, &contents)
|
||||||
|
|
||||||
|
assert.True(t, contents.Verification.Verified)
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("Create pull request", func(t *testing.T) {
|
||||||
|
defer tests.PrintCurrentTest(t)()
|
||||||
|
|
||||||
|
pr, err := doAPICreatePullRequest(testCtx, testCtx.Username, testCtx.Reponame, fromBranch, toBranch)(t)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
req := NewRequest(t, "GET", fmt.Sprintf("/%s/%s/pulls/%d/commits/%s", testCtx.Username, testCtx.Reponame, pr.Index, pr.Head.Sha))
|
||||||
|
resp := testCtx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
|
htmlDoc := NewHTMLParser(t, resp.Body)
|
||||||
|
htmlDoc.AssertElement(t, "#diff-commit-header .commit-header-row.message.isSigned.isVerified", true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
|
@ -287,8 +287,6 @@ func testDeleteRepository(t *testing.T, session *TestSession, ownerName, repoNam
|
||||||
|
|
||||||
func TestPullBranchDelete(t *testing.T) {
|
func TestPullBranchDelete(t *testing.T) {
|
||||||
onGiteaRun(t, func(t *testing.T, u *url.URL) {
|
onGiteaRun(t, func(t *testing.T, u *url.URL) {
|
||||||
defer tests.PrepareTestEnv(t)()
|
|
||||||
|
|
||||||
session := loginUser(t, "user1")
|
session := loginUser(t, "user1")
|
||||||
testRepoFork(t, session, "user2", "repo1", "user1", "repo1")
|
testRepoFork(t, session, "user2", "repo1", "user1", "repo1")
|
||||||
testCreateBranch(t, session, "user1", "repo1", "branch/master", "master1", http.StatusSeeOther)
|
testCreateBranch(t, session, "user1", "repo1", "branch/master", "master1", http.StatusSeeOther)
|
||||||
|
|
|
@ -89,7 +89,6 @@ func TestAPIPullUpdateByRebase(t *testing.T) {
|
||||||
|
|
||||||
func TestAPIViewUpdateSettings(t *testing.T) {
|
func TestAPIViewUpdateSettings(t *testing.T) {
|
||||||
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
onGiteaRun(t, func(t *testing.T, giteaURL *url.URL) {
|
||||||
defer tests.PrepareTestEnv(t)()
|
|
||||||
// Create PR to test
|
// Create PR to test
|
||||||
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||||
org26 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 26})
|
org26 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 26})
|
||||||
|
@ -136,7 +135,6 @@ func TestViewPullUpdateByRebase(t *testing.T) {
|
||||||
|
|
||||||
func testViewPullUpdate(t *testing.T, updateStyle string) {
|
func testViewPullUpdate(t *testing.T, updateStyle string) {
|
||||||
defer test.MockVariableValue(&setting.Repository.PullRequest.DefaultUpdateStyle, updateStyle)()
|
defer test.MockVariableValue(&setting.Repository.PullRequest.DefaultUpdateStyle, updateStyle)()
|
||||||
defer tests.PrepareTestEnv(t)()
|
|
||||||
// Create PR to test
|
// Create PR to test
|
||||||
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||||
org26 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 26})
|
org26 := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 26})
|
||||||
|
|
|
@ -25,10 +25,6 @@ import (
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestRepoSSHSignedTags(t *testing.T) {
|
func TestRepoSSHSignedTags(t *testing.T) {
|
||||||
if git.CheckGitVersionAtLeast("2.34") != nil {
|
|
||||||
t.Skip("Skipping, does not support SSH signing")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer tests.PrepareTestEnv(t)()
|
defer tests.PrepareTestEnv(t)()
|
||||||
|
|
||||||
// Preparations
|
// Preparations
|
||||||
|
|
|
@ -5,15 +5,19 @@
|
||||||
package integration
|
package integration
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
"encoding/base64"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/http"
|
"net/http"
|
||||||
"net/url"
|
"net/url"
|
||||||
|
"os"
|
||||||
"path"
|
"path"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
auth_model "forgejo.org/models/auth"
|
||||||
"forgejo.org/models/db"
|
"forgejo.org/models/db"
|
||||||
repo_model "forgejo.org/models/repo"
|
repo_model "forgejo.org/models/repo"
|
||||||
unit_model "forgejo.org/models/unit"
|
unit_model "forgejo.org/models/unit"
|
||||||
|
@ -22,6 +26,7 @@ import (
|
||||||
"forgejo.org/modules/git"
|
"forgejo.org/modules/git"
|
||||||
"forgejo.org/modules/optional"
|
"forgejo.org/modules/optional"
|
||||||
"forgejo.org/modules/setting"
|
"forgejo.org/modules/setting"
|
||||||
|
api "forgejo.org/modules/structs"
|
||||||
"forgejo.org/modules/test"
|
"forgejo.org/modules/test"
|
||||||
"forgejo.org/modules/translation"
|
"forgejo.org/modules/translation"
|
||||||
repo_service "forgejo.org/services/repository"
|
repo_service "forgejo.org/services/repository"
|
||||||
|
@ -193,9 +198,9 @@ func TestViewRepoWithSymlinks(t *testing.T) {
|
||||||
|
|
||||||
// TestViewAsRepoAdmin tests PR #2167
|
// TestViewAsRepoAdmin tests PR #2167
|
||||||
func TestViewAsRepoAdmin(t *testing.T) {
|
func TestViewAsRepoAdmin(t *testing.T) {
|
||||||
for _, user := range []string{"user2", "user4"} {
|
|
||||||
defer tests.PrepareTestEnv(t)()
|
defer tests.PrepareTestEnv(t)()
|
||||||
|
|
||||||
|
for _, user := range []string{"user2", "user4"} {
|
||||||
session := loginUser(t, user)
|
session := loginUser(t, user)
|
||||||
|
|
||||||
req := NewRequest(t, "GET", "/user2/repo1.git")
|
req := NewRequest(t, "GET", "/user2/repo1.git")
|
||||||
|
@ -682,6 +687,79 @@ func TestViewCommit(t *testing.T) {
|
||||||
assert.True(t, test.IsNormalPageCompleted(resp.Body.String()), "non-existing commit should render 404 page")
|
assert.True(t, test.IsNormalPageCompleted(resp.Body.String()), "non-existing commit should render 404 page")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestViewCommitSignature(t *testing.T) {
|
||||||
|
t.Cleanup(func() {
|
||||||
|
// Cannot use t.Context(), it is in the done state.
|
||||||
|
require.NoError(t, git.InitFull(context.Background())) //nolint:usetesting
|
||||||
|
})
|
||||||
|
|
||||||
|
defer test.MockVariableValue(&setting.Repository.Signing.SigningName, "UwU")()
|
||||||
|
defer test.MockVariableValue(&setting.Repository.Signing.SigningEmail, "fox@example.com")()
|
||||||
|
defer test.MockVariableValue(&setting.Repository.Signing.CRUDActions, []string{"always"})()
|
||||||
|
defer test.MockVariableValue(&setting.Repository.Signing.InitialCommit, []string{"always"})()
|
||||||
|
|
||||||
|
filePath := "signed.txt"
|
||||||
|
fromBranch := "master"
|
||||||
|
toBranch := "branch-signed"
|
||||||
|
|
||||||
|
onGiteaRun(t, func(t *testing.T, u *url.URL) {
|
||||||
|
// Use a new GNUPGPHOME to avoid messing with the existing GPG keyring.
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
require.NoError(t, os.Chmod(tmpDir, 0o700))
|
||||||
|
t.Setenv("GNUPGHOME", tmpDir)
|
||||||
|
|
||||||
|
rootKeyPair, err := importTestingKey()
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer test.MockVariableValue(&setting.Repository.Signing.SigningKey, rootKeyPair.PrimaryKey.KeyIdShortString())()
|
||||||
|
defer test.MockVariableValue(&setting.Repository.Signing.Format, "openpgp")()
|
||||||
|
|
||||||
|
// Ensure the git config is updated with the new signing format.
|
||||||
|
require.NoError(t, git.InitFull(t.Context()))
|
||||||
|
|
||||||
|
user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2})
|
||||||
|
testCtx := NewAPITestContext(t, user.Name, "commit-header-signed", auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
|
u.Path = testCtx.GitPath()
|
||||||
|
|
||||||
|
t.Run("Create repository", doAPICreateRepository(testCtx, nil, git.Sha1ObjectFormat))
|
||||||
|
|
||||||
|
t.Run("Create commit", func(t *testing.T) {
|
||||||
|
defer tests.PrintCurrentTest(t)()
|
||||||
|
|
||||||
|
options := &api.CreateFileOptions{
|
||||||
|
FileOptions: api.FileOptions{
|
||||||
|
BranchName: fromBranch,
|
||||||
|
NewBranchName: toBranch,
|
||||||
|
Message: fmt.Sprintf("from:%s to:%s path:%s", fromBranch, toBranch, filePath),
|
||||||
|
Author: api.Identity{
|
||||||
|
Name: user.FullName,
|
||||||
|
Email: user.Email,
|
||||||
|
},
|
||||||
|
Committer: api.Identity{
|
||||||
|
Name: user.FullName,
|
||||||
|
Email: user.Email,
|
||||||
|
},
|
||||||
|
},
|
||||||
|
ContentBase64: base64.StdEncoding.EncodeToString(fmt.Appendf(nil, "This is new text for %s", filePath)),
|
||||||
|
}
|
||||||
|
|
||||||
|
req := NewRequestWithJSON(t, "POST", fmt.Sprintf("/api/v1/repos/%s/%s/contents/%s", testCtx.Username, testCtx.Reponame, filePath), &options).
|
||||||
|
AddTokenAuth(testCtx.Token)
|
||||||
|
resp := testCtx.Session.MakeRequest(t, req, http.StatusCreated)
|
||||||
|
|
||||||
|
var contents api.FileResponse
|
||||||
|
DecodeJSON(t, resp, &contents)
|
||||||
|
|
||||||
|
assert.True(t, contents.Verification.Verified)
|
||||||
|
|
||||||
|
req = NewRequest(t, "GET", fmt.Sprintf("/%s/%s/commit/%s", testCtx.Username, testCtx.Reponame, contents.Commit.SHA))
|
||||||
|
resp = testCtx.Session.MakeRequest(t, req, http.StatusOK)
|
||||||
|
|
||||||
|
htmlDoc := NewHTMLParser(t, resp.Body)
|
||||||
|
htmlDoc.AssertElement(t, ".commit-header-row.message.isSigned.isVerified", true)
|
||||||
|
})
|
||||||
|
})
|
||||||
|
}
|
||||||
|
|
||||||
func TestCommitView(t *testing.T) {
|
func TestCommitView(t *testing.T) {
|
||||||
defer tests.PrepareTestEnv(t)()
|
defer tests.PrepareTestEnv(t)()
|
||||||
|
|
||||||
|
|
|
@ -42,10 +42,6 @@ func TestInstanceSigning(t *testing.T) {
|
||||||
defer test.MockProtect(&setting.Repository.Signing.CRUDActions)()
|
defer test.MockProtect(&setting.Repository.Signing.CRUDActions)()
|
||||||
|
|
||||||
t.Run("SSH", func(t *testing.T) {
|
t.Run("SSH", func(t *testing.T) {
|
||||||
if git.CheckGitVersionAtLeast("2.34") != nil {
|
|
||||||
t.Skip("Skipping, does not support git SSH signing")
|
|
||||||
return
|
|
||||||
}
|
|
||||||
defer tests.PrintCurrentTest(t)()
|
defer tests.PrintCurrentTest(t)()
|
||||||
|
|
||||||
pubKeyContent, err := os.ReadFile("tests/integration/ssh-signing-key.pub")
|
pubKeyContent, err := os.ReadFile("tests/integration/ssh-signing-key.pub")
|
||||||
|
@ -109,13 +105,14 @@ func testCRUD(t *testing.T, u *url.URL, signingFormat string, objectFormat git.O
|
||||||
defer tests.PrintCurrentTest(t)()
|
defer tests.PrintCurrentTest(t)()
|
||||||
|
|
||||||
testCtx := NewAPITestContext(t, username, "initial-unsigned"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
testCtx := NewAPITestContext(t, username, "initial-unsigned"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
t.Run("CreateRepository", doAPICreateRepository(testCtx, false, objectFormat))
|
t.Run("CreateRepository", doAPICreateRepository(testCtx, nil, objectFormat))
|
||||||
t.Run("CheckMasterBranchUnsigned", doAPIGetBranch(testCtx, "master", func(t *testing.T, branch api.Branch) {
|
t.Run("CheckMasterBranchUnsigned", func(t *testing.T) {
|
||||||
|
branch := doAPIGetBranch(testCtx, "master")(t)
|
||||||
assert.NotNil(t, branch.Commit)
|
assert.NotNil(t, branch.Commit)
|
||||||
assert.NotNil(t, branch.Commit.Verification)
|
assert.NotNil(t, branch.Commit.Verification)
|
||||||
assert.False(t, branch.Commit.Verification.Verified)
|
assert.False(t, branch.Commit.Verification.Verified)
|
||||||
assert.Empty(t, branch.Commit.Verification.Signature)
|
assert.Empty(t, branch.Commit.Verification.Signature)
|
||||||
}))
|
})
|
||||||
t.Run("CreateCRUDFile-Never", crudActionCreateFile(
|
t.Run("CreateCRUDFile-Never", crudActionCreateFile(
|
||||||
t, testCtx, user, "master", "never", "unsigned-never.txt", func(t *testing.T, response api.FileResponse) {
|
t, testCtx, user, "master", "never", "unsigned-never.txt", func(t *testing.T, response api.FileResponse) {
|
||||||
assert.False(t, response.Verification.Verified)
|
assert.False(t, response.Verification.Verified)
|
||||||
|
@ -191,25 +188,27 @@ func testCRUD(t *testing.T, u *url.URL, signingFormat string, objectFormat git.O
|
||||||
defer tests.PrintCurrentTest(t)()
|
defer tests.PrintCurrentTest(t)()
|
||||||
|
|
||||||
testCtx := NewAPITestContext(t, username, "initial-pubkey"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
testCtx := NewAPITestContext(t, username, "initial-pubkey"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
t.Run("CreateRepository", doAPICreateRepository(testCtx, false, objectFormat))
|
t.Run("CreateRepository", doAPICreateRepository(testCtx, nil, objectFormat))
|
||||||
t.Run("CheckMasterBranchSigned", doAPIGetBranch(testCtx, "master", func(t *testing.T, branch api.Branch) {
|
t.Run("CheckMasterBranchSigned", func(t *testing.T) {
|
||||||
|
branch := doAPIGetBranch(testCtx, "master")(t)
|
||||||
require.NotNil(t, branch.Commit)
|
require.NotNil(t, branch.Commit)
|
||||||
require.NotNil(t, branch.Commit.Verification)
|
require.NotNil(t, branch.Commit.Verification)
|
||||||
assert.True(t, branch.Commit.Verification.Verified)
|
assert.True(t, branch.Commit.Verification.Verified)
|
||||||
assert.Equal(t, "fox@example.com", branch.Commit.Verification.Signer.Email)
|
assert.Equal(t, "fox@example.com", branch.Commit.Verification.Signer.Email)
|
||||||
}))
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("No publickey", func(t *testing.T) {
|
t.Run("No publickey", func(t *testing.T) {
|
||||||
defer tests.PrintCurrentTest(t)()
|
defer tests.PrintCurrentTest(t)()
|
||||||
|
|
||||||
testCtx := NewAPITestContext(t, "user4", "initial-no-pubkey"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
testCtx := NewAPITestContext(t, "user4", "initial-no-pubkey"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
t.Run("CreateRepository", doAPICreateRepository(testCtx, false, objectFormat))
|
t.Run("CreateRepository", doAPICreateRepository(testCtx, nil, objectFormat))
|
||||||
t.Run("CheckMasterBranchSigned", doAPIGetBranch(testCtx, "master", func(t *testing.T, branch api.Branch) {
|
t.Run("CheckMasterBranchSigned", func(t *testing.T) {
|
||||||
|
branch := doAPIGetBranch(testCtx, "master")(t)
|
||||||
require.NotNil(t, branch.Commit)
|
require.NotNil(t, branch.Commit)
|
||||||
require.NotNil(t, branch.Commit.Verification)
|
require.NotNil(t, branch.Commit.Verification)
|
||||||
assert.False(t, branch.Commit.Verification.Verified)
|
assert.False(t, branch.Commit.Verification.Verified)
|
||||||
}))
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -226,25 +225,27 @@ func testCRUD(t *testing.T, u *url.URL, signingFormat string, objectFormat git.O
|
||||||
testCtx := NewAPITestContext(t, username, "initial-2fa"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
testCtx := NewAPITestContext(t, username, "initial-2fa"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
unittest.AssertSuccessfulInsert(t, &auth_model.WebAuthnCredential{UserID: user.ID})
|
unittest.AssertSuccessfulInsert(t, &auth_model.WebAuthnCredential{UserID: user.ID})
|
||||||
|
|
||||||
t.Run("CreateRepository", doAPICreateRepository(testCtx, false, objectFormat))
|
t.Run("CreateRepository", doAPICreateRepository(testCtx, nil, objectFormat))
|
||||||
t.Run("CheckMasterBranchSigned", doAPIGetBranch(testCtx, "master", func(t *testing.T, branch api.Branch) {
|
t.Run("CheckMasterBranchSigned", func(t *testing.T) {
|
||||||
|
branch := doAPIGetBranch(testCtx, "master")(t)
|
||||||
require.NotNil(t, branch.Commit)
|
require.NotNil(t, branch.Commit)
|
||||||
require.NotNil(t, branch.Commit.Verification)
|
require.NotNil(t, branch.Commit.Verification)
|
||||||
assert.True(t, branch.Commit.Verification.Verified)
|
assert.True(t, branch.Commit.Verification.Verified)
|
||||||
assert.Equal(t, "fox@example.com", branch.Commit.Verification.Signer.Email)
|
assert.Equal(t, "fox@example.com", branch.Commit.Verification.Signer.Email)
|
||||||
}))
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("No publickey", func(t *testing.T) {
|
t.Run("No 2fa", func(t *testing.T) {
|
||||||
defer tests.PrintCurrentTest(t)()
|
defer tests.PrintCurrentTest(t)()
|
||||||
|
|
||||||
testCtx := NewAPITestContext(t, "user4", "initial-no-2fa"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
testCtx := NewAPITestContext(t, "user4", "initial-no-2fa"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
t.Run("CreateRepository", doAPICreateRepository(testCtx, false, objectFormat))
|
t.Run("CreateRepository", doAPICreateRepository(testCtx, nil, objectFormat))
|
||||||
t.Run("CheckMasterBranchSigned", doAPIGetBranch(testCtx, "master", func(t *testing.T, branch api.Branch) {
|
t.Run("CheckMasterBranchSigned", func(t *testing.T) {
|
||||||
|
branch := doAPIGetBranch(testCtx, "master")(t)
|
||||||
require.NotNil(t, branch.Commit)
|
require.NotNil(t, branch.Commit)
|
||||||
require.NotNil(t, branch.Commit.Verification)
|
require.NotNil(t, branch.Commit.Verification)
|
||||||
assert.False(t, branch.Commit.Verification.Verified)
|
assert.False(t, branch.Commit.Verification.Verified)
|
||||||
}))
|
})
|
||||||
})
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
|
@ -253,13 +254,14 @@ func testCRUD(t *testing.T, u *url.URL, signingFormat string, objectFormat git.O
|
||||||
setting.Repository.Signing.InitialCommit = []string{"always"}
|
setting.Repository.Signing.InitialCommit = []string{"always"}
|
||||||
|
|
||||||
testCtx := NewAPITestContext(t, username, "initial-always"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
testCtx := NewAPITestContext(t, username, "initial-always"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
t.Run("CreateRepository", doAPICreateRepository(testCtx, false, objectFormat))
|
t.Run("CreateRepository", doAPICreateRepository(testCtx, nil, objectFormat))
|
||||||
t.Run("CheckMasterBranchSigned", doAPIGetBranch(testCtx, "master", func(t *testing.T, branch api.Branch) {
|
t.Run("CheckMasterBranchSigned", func(t *testing.T) {
|
||||||
|
branch := doAPIGetBranch(testCtx, "master")(t)
|
||||||
require.NotNil(t, branch.Commit)
|
require.NotNil(t, branch.Commit)
|
||||||
require.NotNil(t, branch.Commit.Verification)
|
require.NotNil(t, branch.Commit.Verification)
|
||||||
assert.True(t, branch.Commit.Verification.Verified)
|
assert.True(t, branch.Commit.Verification.Verified)
|
||||||
assert.Equal(t, "fox@example.com", branch.Commit.Verification.Signer.Email)
|
assert.Equal(t, "fox@example.com", branch.Commit.Verification.Signer.Email)
|
||||||
}))
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("AlwaysSign-Initial-CRUD-Never", func(t *testing.T) {
|
t.Run("AlwaysSign-Initial-CRUD-Never", func(t *testing.T) {
|
||||||
|
@ -267,7 +269,7 @@ func testCRUD(t *testing.T, u *url.URL, signingFormat string, objectFormat git.O
|
||||||
setting.Repository.Signing.CRUDActions = []string{"never"}
|
setting.Repository.Signing.CRUDActions = []string{"never"}
|
||||||
|
|
||||||
testCtx := NewAPITestContext(t, username, "initial-always-never"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
testCtx := NewAPITestContext(t, username, "initial-always-never"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
t.Run("CreateRepository", doAPICreateRepository(testCtx, false, objectFormat))
|
t.Run("CreateRepository", doAPICreateRepository(testCtx, nil, objectFormat))
|
||||||
t.Run("CreateCRUDFile-Never", crudActionCreateFile(
|
t.Run("CreateCRUDFile-Never", crudActionCreateFile(
|
||||||
t, testCtx, user, "master", "never", "unsigned-never.txt", func(t *testing.T, response api.FileResponse) {
|
t, testCtx, user, "master", "never", "unsigned-never.txt", func(t *testing.T, response api.FileResponse) {
|
||||||
assert.False(t, response.Verification.Verified)
|
assert.False(t, response.Verification.Verified)
|
||||||
|
@ -279,7 +281,7 @@ func testCRUD(t *testing.T, u *url.URL, signingFormat string, objectFormat git.O
|
||||||
setting.Repository.Signing.CRUDActions = []string{"parentsigned"}
|
setting.Repository.Signing.CRUDActions = []string{"parentsigned"}
|
||||||
|
|
||||||
testCtx := NewAPITestContext(t, username, "initial-always-parent"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
testCtx := NewAPITestContext(t, username, "initial-always-parent"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
t.Run("CreateRepository", doAPICreateRepository(testCtx, false, objectFormat))
|
t.Run("CreateRepository", doAPICreateRepository(testCtx, nil, objectFormat))
|
||||||
t.Run("CreateCRUDFile-ParentSigned", crudActionCreateFile(
|
t.Run("CreateCRUDFile-ParentSigned", crudActionCreateFile(
|
||||||
t, testCtx, user, "master", "parentsigned", "signed-parent.txt", func(t *testing.T, response api.FileResponse) {
|
t, testCtx, user, "master", "parentsigned", "signed-parent.txt", func(t *testing.T, response api.FileResponse) {
|
||||||
assert.True(t, response.Verification.Verified)
|
assert.True(t, response.Verification.Verified)
|
||||||
|
@ -287,12 +289,71 @@ func testCRUD(t *testing.T, u *url.URL, signingFormat string, objectFormat git.O
|
||||||
}))
|
}))
|
||||||
})
|
})
|
||||||
|
|
||||||
|
t.Run("AlwaysSign-Initial-CRUD-Pubkey", func(t *testing.T) {
|
||||||
|
setting.Repository.Signing.CRUDActions = []string{"pubkey"}
|
||||||
|
|
||||||
|
t.Run("Has publickey", func(t *testing.T) {
|
||||||
|
defer tests.PrintCurrentTest(t)()
|
||||||
|
|
||||||
|
testCtx := NewAPITestContext(t, username, "initial-always-pubkey"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
|
t.Run("CreateRepository", doAPICreateRepository(testCtx, nil, objectFormat))
|
||||||
|
t.Run("CreateCRUDFile-Pubkey", crudActionCreateFile(
|
||||||
|
t, testCtx, user, "master", "pubkey", "signed-pubkey.txt", func(t *testing.T, response api.FileResponse) {
|
||||||
|
assert.True(t, response.Verification.Verified)
|
||||||
|
assert.Equal(t, "fox@example.com", response.Verification.Signer.Email)
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("No publickey", func(t *testing.T) {
|
||||||
|
defer tests.PrintCurrentTest(t)()
|
||||||
|
|
||||||
|
testCtx := NewAPITestContext(t, "user4", "initial-always-no-pubkey"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
|
t.Run("CreateRepository", doAPICreateRepository(testCtx, nil, objectFormat))
|
||||||
|
t.Run("CreateCRUDFile-Pubkey", crudActionCreateFile(
|
||||||
|
t, testCtx, user, "master", "pubkey", "unsigned-pubkey.txt", func(t *testing.T, response api.FileResponse) {
|
||||||
|
assert.False(t, response.Verification.Verified)
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("AlwaysSign-Initial-CRUD-Twofa", func(t *testing.T) {
|
||||||
|
setting.Repository.Signing.CRUDActions = []string{"twofa"}
|
||||||
|
|
||||||
|
t.Run("Has 2fa", func(t *testing.T) {
|
||||||
|
defer tests.PrintCurrentTest(t)()
|
||||||
|
|
||||||
|
t.Cleanup(func() {
|
||||||
|
unittest.AssertSuccessfulDelete(t, &auth_model.WebAuthnCredential{UserID: user.ID})
|
||||||
|
})
|
||||||
|
|
||||||
|
testCtx := NewAPITestContext(t, username, "initial-always-twofa"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
|
unittest.AssertSuccessfulInsert(t, &auth_model.WebAuthnCredential{UserID: user.ID})
|
||||||
|
t.Run("CreateRepository", doAPICreateRepository(testCtx, nil, objectFormat))
|
||||||
|
t.Run("CreateCRUDFile-Twofa", crudActionCreateFile(
|
||||||
|
t, testCtx, user, "master", "twofa", "signed-twofa.txt", func(t *testing.T, response api.FileResponse) {
|
||||||
|
assert.True(t, response.Verification.Verified)
|
||||||
|
assert.Equal(t, "fox@example.com", response.Verification.Signer.Email)
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("No 2fa", func(t *testing.T) {
|
||||||
|
defer tests.PrintCurrentTest(t)()
|
||||||
|
|
||||||
|
testCtx := NewAPITestContext(t, "user4", "initial-always-no-twofa"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
|
t.Run("CreateRepository", doAPICreateRepository(testCtx, nil, objectFormat))
|
||||||
|
t.Run("CreateCRUDFile-Pubkey", crudActionCreateFile(
|
||||||
|
t, testCtx, user, "master", "twofa", "unsigned-twofa.txt", func(t *testing.T, response api.FileResponse) {
|
||||||
|
assert.False(t, response.Verification.Verified)
|
||||||
|
}))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
|
||||||
t.Run("AlwaysSign-Initial-CRUD-Always", func(t *testing.T) {
|
t.Run("AlwaysSign-Initial-CRUD-Always", func(t *testing.T) {
|
||||||
defer tests.PrintCurrentTest(t)()
|
defer tests.PrintCurrentTest(t)()
|
||||||
setting.Repository.Signing.CRUDActions = []string{"always"}
|
setting.Repository.Signing.CRUDActions = []string{"always"}
|
||||||
|
|
||||||
testCtx := NewAPITestContext(t, username, "initial-always-always"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
testCtx := NewAPITestContext(t, username, "initial-always-always"+suffix, auth_model.AccessTokenScopeWriteRepository, auth_model.AccessTokenScopeWriteUser)
|
||||||
t.Run("CreateRepository", doAPICreateRepository(testCtx, false, objectFormat))
|
t.Run("CreateRepository", doAPICreateRepository(testCtx, nil, objectFormat))
|
||||||
t.Run("CreateCRUDFile-Always", crudActionCreateFile(
|
t.Run("CreateCRUDFile-Always", crudActionCreateFile(
|
||||||
t, testCtx, user, "master", "always", "signed-always.txt", func(t *testing.T, response api.FileResponse) {
|
t, testCtx, user, "master", "always", "signed-always.txt", func(t *testing.T, response api.FileResponse) {
|
||||||
assert.True(t, response.Verification.Verified)
|
assert.True(t, response.Verification.Verified)
|
||||||
|
@ -310,12 +371,13 @@ func testCRUD(t *testing.T, u *url.URL, signingFormat string, objectFormat git.O
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
t.Run("MergePR", doAPIMergePullRequest(testCtx, testCtx.Username, testCtx.Reponame, pr.Index))
|
t.Run("MergePR", doAPIMergePullRequest(testCtx, testCtx.Username, testCtx.Reponame, pr.Index))
|
||||||
})
|
})
|
||||||
t.Run("CheckMasterBranchUnsigned", doAPIGetBranch(testCtx, "master", func(t *testing.T, branch api.Branch) {
|
t.Run("CheckMasterBranchUnsigned", func(t *testing.T) {
|
||||||
|
branch := doAPIGetBranch(testCtx, "master")(t)
|
||||||
require.NotNil(t, branch.Commit)
|
require.NotNil(t, branch.Commit)
|
||||||
require.NotNil(t, branch.Commit.Verification)
|
require.NotNil(t, branch.Commit.Verification)
|
||||||
assert.False(t, branch.Commit.Verification.Verified)
|
assert.False(t, branch.Commit.Verification.Verified)
|
||||||
assert.Empty(t, branch.Commit.Verification.Signature)
|
assert.Empty(t, branch.Commit.Verification.Signature)
|
||||||
}))
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("BaseSignedMerging", func(t *testing.T) {
|
t.Run("BaseSignedMerging", func(t *testing.T) {
|
||||||
|
@ -328,12 +390,13 @@ func testCRUD(t *testing.T, u *url.URL, signingFormat string, objectFormat git.O
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
t.Run("MergePR", doAPIMergePullRequest(testCtx, testCtx.Username, testCtx.Reponame, pr.Index))
|
t.Run("MergePR", doAPIMergePullRequest(testCtx, testCtx.Username, testCtx.Reponame, pr.Index))
|
||||||
})
|
})
|
||||||
t.Run("CheckMasterBranchUnsigned", doAPIGetBranch(testCtx, "master", func(t *testing.T, branch api.Branch) {
|
t.Run("CheckMasterBranchUnsigned", func(t *testing.T) {
|
||||||
|
branch := doAPIGetBranch(testCtx, "master")(t)
|
||||||
require.NotNil(t, branch.Commit)
|
require.NotNil(t, branch.Commit)
|
||||||
require.NotNil(t, branch.Commit.Verification)
|
require.NotNil(t, branch.Commit.Verification)
|
||||||
assert.False(t, branch.Commit.Verification.Verified)
|
assert.False(t, branch.Commit.Verification.Verified)
|
||||||
assert.Empty(t, branch.Commit.Verification.Signature)
|
assert.Empty(t, branch.Commit.Verification.Signature)
|
||||||
}))
|
})
|
||||||
})
|
})
|
||||||
|
|
||||||
t.Run("CommitsSignedMerging", func(t *testing.T) {
|
t.Run("CommitsSignedMerging", func(t *testing.T) {
|
||||||
|
@ -346,11 +409,12 @@ func testCRUD(t *testing.T, u *url.URL, signingFormat string, objectFormat git.O
|
||||||
require.NoError(t, err)
|
require.NoError(t, err)
|
||||||
t.Run("MergePR", doAPIMergePullRequest(testCtx, testCtx.Username, testCtx.Reponame, pr.Index))
|
t.Run("MergePR", doAPIMergePullRequest(testCtx, testCtx.Username, testCtx.Reponame, pr.Index))
|
||||||
})
|
})
|
||||||
t.Run("CheckMasterBranchUnsigned", doAPIGetBranch(testCtx, "master", func(t *testing.T, branch api.Branch) {
|
t.Run("CheckMasterBranchUnsigned", func(t *testing.T) {
|
||||||
|
branch := doAPIGetBranch(testCtx, "master")(t)
|
||||||
require.NotNil(t, branch.Commit)
|
require.NotNil(t, branch.Commit)
|
||||||
require.NotNil(t, branch.Commit.Verification)
|
require.NotNil(t, branch.Commit.Verification)
|
||||||
assert.True(t, branch.Commit.Verification.Verified)
|
assert.True(t, branch.Commit.Verification.Verified)
|
||||||
}))
|
})
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
|
@ -55,7 +55,13 @@ func testPushDeployKeyOnEmptyRepo(t *testing.T, u *url.URL) {
|
||||||
keyname := fmt.Sprintf("%s-push", ctx.Reponame)
|
keyname := fmt.Sprintf("%s-push", ctx.Reponame)
|
||||||
u.Path = ctx.GitPath()
|
u.Path = ctx.GitPath()
|
||||||
|
|
||||||
t.Run("CreateEmptyRepository", doAPICreateRepository(ctx, true, objectFormat))
|
opts := &api.CreateRepoOption{
|
||||||
|
Description: "Temporary repo",
|
||||||
|
Name: ctx.Reponame,
|
||||||
|
Private: true,
|
||||||
|
Template: true,
|
||||||
|
}
|
||||||
|
t.Run("CreateEmptyRepository", doAPICreateRepository(ctx, opts, objectFormat))
|
||||||
|
|
||||||
t.Run("CheckIsEmpty", doCheckRepositoryEmptyStatus(ctx, true))
|
t.Run("CheckIsEmpty", doCheckRepositoryEmptyStatus(ctx, true))
|
||||||
|
|
||||||
|
@ -105,8 +111,8 @@ func testKeyOnlyOneType(t *testing.T, u *url.URL) {
|
||||||
failCtx := ctx
|
failCtx := ctx
|
||||||
failCtx.ExpectedCode = http.StatusUnprocessableEntity
|
failCtx.ExpectedCode = http.StatusUnprocessableEntity
|
||||||
|
|
||||||
t.Run("CreateRepository", doAPICreateRepository(ctx, false, git.Sha1ObjectFormat)) // FIXME: use forEachObjectFormat
|
t.Run("CreateRepository", doAPICreateRepository(ctx, nil, git.Sha1ObjectFormat)) // FIXME: use forEachObjectFormat
|
||||||
t.Run("CreateOtherRepository", doAPICreateRepository(otherCtx, false, git.Sha1ObjectFormat)) // FIXME: use forEachObjectFormat
|
t.Run("CreateOtherRepository", doAPICreateRepository(otherCtx, nil, git.Sha1ObjectFormat)) // FIXME: use forEachObjectFormat
|
||||||
|
|
||||||
withKeyFile(t, keyname, func(keyFile string) {
|
withKeyFile(t, keyname, func(keyFile string) {
|
||||||
var userKeyPublicKeyID int64
|
var userKeyPublicKeyID int64
|
||||||
|
@ -180,7 +186,7 @@ func testKeyOnlyOneType(t *testing.T, u *url.URL) {
|
||||||
|
|
||||||
t.Run("DeleteOtherRepository", doAPIDeleteRepository(otherCtxWithDeleteRepo))
|
t.Run("DeleteOtherRepository", doAPIDeleteRepository(otherCtxWithDeleteRepo))
|
||||||
|
|
||||||
t.Run("RecreateRepository", doAPICreateRepository(ctxWithDeleteRepo, false, git.Sha1ObjectFormat)) // FIXME: use forEachObjectFormat
|
t.Run("RecreateRepository", doAPICreateRepository(ctxWithDeleteRepo, nil, git.Sha1ObjectFormat)) // FIXME: use forEachObjectFormat
|
||||||
|
|
||||||
t.Run("CreateUserKey", doAPICreateUserKey(ctx, keyname, keyFile, func(t *testing.T, publicKey api.PublicKey) {
|
t.Run("CreateUserKey", doAPICreateUserKey(ctx, keyname, keyFile, func(t *testing.T, publicKey api.PublicKey) {
|
||||||
userKeyPublicKeyID = publicKey.ID
|
userKeyPublicKeyID = publicKey.ID
|
||||||
|
|
|
@ -13,6 +13,7 @@ import (
|
||||||
"path/filepath"
|
"path/filepath"
|
||||||
"runtime"
|
"runtime"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync/atomic"
|
||||||
"testing"
|
"testing"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
@ -42,6 +43,7 @@ import (
|
||||||
"github.com/google/uuid"
|
"github.com/google/uuid"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
|
"xorm.io/xorm/convert"
|
||||||
)
|
)
|
||||||
|
|
||||||
func exitf(format string, args ...any) {
|
func exitf(format string, args ...any) {
|
||||||
|
@ -311,9 +313,26 @@ func PrepareCleanPackageData(t testing.TB) {
|
||||||
require.NoError(t, storage.Clean(storage.Packages))
|
require.NoError(t, storage.Clean(storage.Packages))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// inTestEnv keeps track if we are current inside a test environment, this is
|
||||||
|
// used to detect if testing code tries to prepare a test environment more than
|
||||||
|
// once.
|
||||||
|
var inTestEnv atomic.Bool
|
||||||
|
|
||||||
func PrepareTestEnv(t testing.TB, skip ...int) func() {
|
func PrepareTestEnv(t testing.TB, skip ...int) func() {
|
||||||
t.Helper()
|
t.Helper()
|
||||||
deferFn := PrintCurrentTest(t, util.OptionalArg(skip)+1)
|
|
||||||
|
if !inTestEnv.CompareAndSwap(false, true) {
|
||||||
|
t.Fatal("Cannot prepare a test environment if you are already in a test environment. This is a bug in your testing code.")
|
||||||
|
}
|
||||||
|
|
||||||
|
deferPrintCurrentTest := PrintCurrentTest(t, util.OptionalArg(skip)+1)
|
||||||
|
deferFn := func() {
|
||||||
|
deferPrintCurrentTest()
|
||||||
|
|
||||||
|
if !inTestEnv.CompareAndSwap(true, false) {
|
||||||
|
t.Fatal("Tried to leave test environment, but we are no longer in a test environment. This should not happen.")
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
cancelProcesses(t, 30*time.Second)
|
cancelProcesses(t, 30*time.Second)
|
||||||
t.Cleanup(func() { cancelProcesses(t, 0) }) // cancel remaining processes in a non-blocking way
|
t.Cleanup(func() { cancelProcesses(t, 0) }) // cancel remaining processes in a non-blocking way
|
||||||
|
@ -342,6 +361,7 @@ type DeclarativeRepoOptions struct {
|
||||||
Name optional.Option[string]
|
Name optional.Option[string]
|
||||||
EnabledUnits optional.Option[[]unit_model.Type]
|
EnabledUnits optional.Option[[]unit_model.Type]
|
||||||
DisabledUnits optional.Option[[]unit_model.Type]
|
DisabledUnits optional.Option[[]unit_model.Type]
|
||||||
|
UnitConfig optional.Option[map[unit_model.Type]convert.Conversion]
|
||||||
Files optional.Option[[]*files_service.ChangeRepoFile]
|
Files optional.Option[[]*files_service.ChangeRepoFile]
|
||||||
WikiBranch optional.Option[string]
|
WikiBranch optional.Option[string]
|
||||||
AutoInit optional.Option[bool]
|
AutoInit optional.Option[bool]
|
||||||
|
@ -390,9 +410,14 @@ func CreateDeclarativeRepoWithOptions(t *testing.T, owner *user_model.User, opts
|
||||||
enabledUnits = make([]repo_model.RepoUnit, len(units))
|
enabledUnits = make([]repo_model.RepoUnit, len(units))
|
||||||
|
|
||||||
for i, unitType := range units {
|
for i, unitType := range units {
|
||||||
|
var config convert.Conversion
|
||||||
|
if cfg, ok := opts.UnitConfig.Value()[unitType]; ok {
|
||||||
|
config = cfg
|
||||||
|
}
|
||||||
enabledUnits[i] = repo_model.RepoUnit{
|
enabledUnits[i] = repo_model.RepoUnit{
|
||||||
RepoID: repo.ID,
|
RepoID: repo.ID,
|
||||||
Type: unitType,
|
Type: unitType,
|
||||||
|
Config: config,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
28
web_src/fomantic/package-lock.json
generated
|
@ -494,9 +494,9 @@
|
||||||
"license": "MIT"
|
"license": "MIT"
|
||||||
},
|
},
|
||||||
"node_modules/@types/node": {
|
"node_modules/@types/node": {
|
||||||
"version": "24.0.3",
|
"version": "24.0.7",
|
||||||
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.0.3.tgz",
|
"resolved": "https://registry.npmjs.org/@types/node/-/node-24.0.7.tgz",
|
||||||
"integrity": "sha512-R4I/kzCYAdRLzfiCabn9hxWfbuHS573x+r0dJMkkzThEa7pbrcDWK+9zu3e7aBOouf+rQAciqPFMnxwr0aWgKg==",
|
"integrity": "sha512-YIEUUr4yf8q8oQoXPpSlnvKNVKDQlPMWrmOcgzoduo7kvA2UF0/BwJ/eMKFTiTtkNL17I0M6Xe2tvwFU7be6iw==",
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"undici-types": "~7.8.0"
|
"undici-types": "~7.8.0"
|
||||||
|
@ -1117,9 +1117,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/browserslist": {
|
"node_modules/browserslist": {
|
||||||
"version": "4.25.0",
|
"version": "4.25.1",
|
||||||
"resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.0.tgz",
|
"resolved": "https://registry.npmjs.org/browserslist/-/browserslist-4.25.1.tgz",
|
||||||
"integrity": "sha512-PJ8gYKeS5e/whHBh8xrwYK+dAvEj7JXtz6uTucnMRB8OiGTsKccFekoRrjajPBHV8oOY+2tI4uxeceSimKwMFA==",
|
"integrity": "sha512-KGj0KoOMXLpSNkkEI6Z6mShmQy0bc1I+T7K9N81k4WWMrfz+6fQ6es80B/YLAeRoKvjYE1YSHHOW1qe9xIVzHw==",
|
||||||
"funding": [
|
"funding": [
|
||||||
{
|
{
|
||||||
"type": "opencollective",
|
"type": "opencollective",
|
||||||
|
@ -1136,8 +1136,8 @@
|
||||||
],
|
],
|
||||||
"license": "MIT",
|
"license": "MIT",
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
"caniuse-lite": "^1.0.30001718",
|
"caniuse-lite": "^1.0.30001726",
|
||||||
"electron-to-chromium": "^1.5.160",
|
"electron-to-chromium": "^1.5.173",
|
||||||
"node-releases": "^2.0.19",
|
"node-releases": "^2.0.19",
|
||||||
"update-browserslist-db": "^1.1.3"
|
"update-browserslist-db": "^1.1.3"
|
||||||
},
|
},
|
||||||
|
@ -1249,9 +1249,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/caniuse-lite": {
|
"node_modules/caniuse-lite": {
|
||||||
"version": "1.0.30001724",
|
"version": "1.0.30001726",
|
||||||
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001724.tgz",
|
"resolved": "https://registry.npmjs.org/caniuse-lite/-/caniuse-lite-1.0.30001726.tgz",
|
||||||
"integrity": "sha512-WqJo7p0TbHDOythNTqYujmaJTvtYRZrjpP8TCvH6Vb9CYJerJNKamKzIWOM4BkQatWj9H2lYulpdAQNBe7QhNA==",
|
"integrity": "sha512-VQAUIUzBiZ/UnlM28fSp2CRF3ivUn1BWEvxMcVTNwpw91Py1pGbPIyIKtd+tzct9C3ouceCVdGAXxZOpZAsgdw==",
|
||||||
"funding": [
|
"funding": [
|
||||||
{
|
{
|
||||||
"type": "opencollective",
|
"type": "opencollective",
|
||||||
|
@ -2005,9 +2005,9 @@
|
||||||
}
|
}
|
||||||
},
|
},
|
||||||
"node_modules/electron-to-chromium": {
|
"node_modules/electron-to-chromium": {
|
||||||
"version": "1.5.171",
|
"version": "1.5.177",
|
||||||
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.171.tgz",
|
"resolved": "https://registry.npmjs.org/electron-to-chromium/-/electron-to-chromium-1.5.177.tgz",
|
||||||
"integrity": "sha512-scWpzXEJEMrGJa4Y6m/tVotb0WuvNmasv3wWVzUAeCgKU0ToFOhUW6Z+xWnRQANMYGxN4ngJXIThgBJOqzVPCQ==",
|
"integrity": "sha512-7EH2G59nLsEMj97fpDuvVcYi6lwTcM1xuWw3PssD8xzboAW7zj7iB3COEEEATUfjLHrs5uKBLQT03V/8URx06g==",
|
||||||
"license": "ISC"
|
"license": "ISC"
|
||||||
},
|
},
|
||||||
"node_modules/emoji-regex": {
|
"node_modules/emoji-regex": {
|
||||||
|
|
|
@ -340,10 +340,10 @@ export default sfc; // activate the IDE's Vue plugin
|
||||||
<a :class="{item: true, active: tab === 'organizations'}" @click="changeTab('organizations')">{{ textMyOrgs }} <span class="ui grey label tw-ml-2">{{ organizationsTotalCount }}</span></a>
|
<a :class="{item: true, active: tab === 'organizations'}" @click="changeTab('organizations')">{{ textMyOrgs }} <span class="ui grey label tw-ml-2">{{ organizationsTotalCount }}</span></a>
|
||||||
</div>
|
</div>
|
||||||
<div v-show="tab === 'repos'" class="ui tab active list dashboard-repos">
|
<div v-show="tab === 'repos'" class="ui tab active list dashboard-repos">
|
||||||
<h4 v-if="isOrganization" class="ui top attached tw-mt-4 tw-flex tw-items-center">
|
<h4 v-if="isOrganization" class="tw-mt-4 tw-flex tw-items-center">
|
||||||
<div class="tw-flex-1 tw-flex tw-items-center">
|
<div class="tw-flex-1 tw-flex tw-gap-2 tw-items-center">
|
||||||
{{ textMyRepos }}
|
{{ textMyRepos }}
|
||||||
<span class="ui grey label tw-ml-2">{{ reposTotalCount }}</span>
|
<span class="ui grey label">{{ reposTotalCount }}</span>
|
||||||
</div>
|
</div>
|
||||||
</h4>
|
</h4>
|
||||||
<div class="ui top attached segment repos-search">
|
<div class="ui top attached segment repos-search">
|
||||||
|
|
|
@ -125,16 +125,21 @@ function excludeLabel(item) {
|
||||||
export function initRepoIssueSidebarList() {
|
export function initRepoIssueSidebarList() {
|
||||||
const repolink = $('#repolink').val();
|
const repolink = $('#repolink').val();
|
||||||
const repoId = $('#repoId').val();
|
const repoId = $('#repoId').val();
|
||||||
const crossRepoSearch = $('#crossRepoSearch').val();
|
const crossRepoSearch = $('#crossRepoSearch').val() === 'true';
|
||||||
const tp = $('#type').val();
|
const tp = $('#type').val();
|
||||||
let issueSearchUrl = `${appSubUrl}/${repolink}/issues/search?q={query}&type=${tp}`;
|
|
||||||
if (crossRepoSearch === 'true') {
|
|
||||||
issueSearchUrl = `${appSubUrl}/issues/search?q={query}&priority_repo_id=${repoId}&type=${tp}`;
|
|
||||||
}
|
|
||||||
$('#new-dependency-drop-list')
|
$('#new-dependency-drop-list')
|
||||||
.dropdown({
|
.dropdown({
|
||||||
apiSettings: {
|
apiSettings: {
|
||||||
url: issueSearchUrl,
|
beforeSend(settings) {
|
||||||
|
if (!settings.urlData.query.trim()) {
|
||||||
|
settings.url = `${appSubUrl}/${repolink}/issues/search?q={query}&type=${tp}&sort=updated`;
|
||||||
|
} else if (crossRepoSearch) {
|
||||||
|
settings.url = `${appSubUrl}/issues/search?q={query}&priority_repo_id=${repoId}&type=${tp}&sort=relevance`;
|
||||||
|
} else {
|
||||||
|
settings.url = `${appSubUrl}/${repolink}/issues/search?q={query}&type=${tp}&sort=relevance`;
|
||||||
|
}
|
||||||
|
return settings;
|
||||||
|
},
|
||||||
onResponse(response) {
|
onResponse(response) {
|
||||||
const filteredResponse = {success: true, results: []};
|
const filteredResponse = {success: true, results: []};
|
||||||
const currIssueId = $('#new-dependency-drop-list').data('issue-id');
|
const currIssueId = $('#new-dependency-drop-list').data('issue-id');
|
||||||
|
@ -142,7 +147,7 @@ export function initRepoIssueSidebarList() {
|
||||||
for (const [_, issue] of Object.entries(response)) {
|
for (const [_, issue] of Object.entries(response)) {
|
||||||
// Don't list current issue in the dependency list.
|
// Don't list current issue in the dependency list.
|
||||||
if (issue.id === currIssueId) {
|
if (issue.id === currIssueId) {
|
||||||
return;
|
continue;
|
||||||
}
|
}
|
||||||
filteredResponse.results.push({
|
filteredResponse.results.push({
|
||||||
name: `#${issue.number} ${issueTitleHTML(htmlEscape(issue.title))
|
name: `#${issue.number} ${issueTitleHTML(htmlEscape(issue.title))
|
||||||
|
|