10.0.0 base
Some checks are pending
Integration tests for the release process / release-simulation (push) Waiting to run

This commit is contained in:
David Rotermund 2025-02-08 23:21:02 +01:00
commit 49fe13eb4a
6629 changed files with 777382 additions and 0 deletions

29
.air.toml Normal file
View file

@ -0,0 +1,29 @@
root = "."
tmp_dir = ".air"
[build]
pre_cmd = ["killall -9 gitea 2>/dev/null || true"] # kill off potential zombie processes from previous runs
cmd = "make --no-print-directory backend"
bin = "gitea"
delay = 2000
include_ext = ["go", "tmpl"]
include_file = ["main.go"]
include_dir = ["cmd", "models", "modules", "options", "routers", "services"]
exclude_dir = [
"models/fixtures",
"models/migrations/fixtures",
"modules/avatar/identicon/testdata",
"modules/avatar/testdata",
"modules/git/tests",
"modules/migration/file_format_testdata",
"modules/markup/tests/repo/repo1_filepreview",
"routers/private/tests",
"services/gitdiff/testdata",
"services/migrations/testdata",
"services/webhook/sourcehut/testdata",
]
exclude_regex = ["_test.go$", "_gen.go$"]
stop_on_error = true
[log]
main_only = true

299
.deadcode-out Normal file
View file

@ -0,0 +1,299 @@
code.gitea.io/gitea/cmd
NoMainListener
code.gitea.io/gitea/cmd/forgejo
ContextSetNoInit
ContextSetNoExit
ContextSetStderr
ContextGetStderr
ContextSetStdout
ContextSetStdin
code.gitea.io/gitea/models
IsErrUpdateTaskNotExist
ErrUpdateTaskNotExist.Error
ErrUpdateTaskNotExist.Unwrap
IsErrSHANotFound
IsErrMergeDivergingFastForwardOnly
code.gitea.io/gitea/models/actions
ScheduleList.GetUserIDs
ScheduleList.GetRepoIDs
ScheduleList.LoadTriggerUser
ScheduleList.LoadRepos
code.gitea.io/gitea/models/asymkey
ErrGPGKeyAccessDenied.Error
ErrGPGKeyAccessDenied.Unwrap
HasDeployKey
code.gitea.io/gitea/models/auth
GetSourceByName
WebAuthnCredentials
code.gitea.io/gitea/models/db
TruncateBeans
InTransaction
DumpTables
code.gitea.io/gitea/models/dbfs
file.renameTo
Create
Rename
code.gitea.io/gitea/models/forgefed
GetFederationHost
code.gitea.io/gitea/models/forgejo/semver
GetVersion
SetVersionString
SetVersion
code.gitea.io/gitea/models/git
RemoveDeletedBranchByID
code.gitea.io/gitea/models/issues
IsErrUnknownDependencyType
ErrNewIssueInsert.Error
IsErrIssueWasClosed
ChangeMilestoneStatus
code.gitea.io/gitea/models/organization
GetTeamNamesByID
UpdateTeamUnits
SearchMembersOptions.ToConds
UsersInTeamsCount
code.gitea.io/gitea/models/perm/access
GetRepoWriters
code.gitea.io/gitea/models/project
UpdateColumnSorting
ChangeProjectStatus
code.gitea.io/gitea/models/repo
DeleteAttachmentsByIssue
FindReposMapByIDs
IsErrTopicNotExist
ErrTopicNotExist.Error
ErrTopicNotExist.Unwrap
GetTopicByName
WatchRepoMode
code.gitea.io/gitea/models/user
ErrUserInactive.Error
ErrUserInactive.Unwrap
IsErrExternalLoginUserAlreadyExist
IsErrExternalLoginUserNotExist
NewFederatedUser
IsErrUserSettingIsNotExist
GetUserAllSettings
DeleteUserSetting
GetUserEmailsByNames
GetUserNamesByIDs
code.gitea.io/gitea/modules/activitypub
NewContext
Context.APClientFactory
code.gitea.io/gitea/modules/assetfs
Bindata
code.gitea.io/gitea/modules/auth/password/hash
DummyHasher.HashWithSaltBytes
NewDummyHasher
code.gitea.io/gitea/modules/auth/password/pwn
WithHTTP
code.gitea.io/gitea/modules/base
SetupGiteaRoot
code.gitea.io/gitea/modules/cache
GetInt
WithNoCacheContext
RemoveContextData
code.gitea.io/gitea/modules/charset
BreakWriter.Write
code.gitea.io/gitea/modules/emoji
ReplaceCodes
code.gitea.io/gitea/modules/eventsource
Event.String
code.gitea.io/gitea/modules/forgefed
GetItemByType
JSONUnmarshalerFn
NotEmpty
ToRepository
OnRepository
code.gitea.io/gitea/modules/git
AllowLFSFiltersArgs
AddChanges
AddChangesWithArgs
CommitChanges
CommitChangesWithArgs
SetUpdateHook
openRepositoryWithDefaultContext
IsTagExist
ToEntryMode
LimitedReaderCloser.Read
LimitedReaderCloser.Close
code.gitea.io/gitea/modules/gitgraph
Parser.Reset
code.gitea.io/gitea/modules/gitrepo
GetBranchCommitID
GetWikiDefaultBranch
code.gitea.io/gitea/modules/graceful
Manager.TerminateContext
Manager.Err
Manager.Value
Manager.Deadline
code.gitea.io/gitea/modules/hcaptcha
WithHTTP
code.gitea.io/gitea/modules/hostmatcher
HostMatchList.AppendPattern
code.gitea.io/gitea/modules/json
StdJSON.Marshal
StdJSON.Unmarshal
StdJSON.NewEncoder
StdJSON.NewDecoder
StdJSON.Indent
code.gitea.io/gitea/modules/markup
GetRendererByType
RenderString
IsMarkupFile
code.gitea.io/gitea/modules/markup/console
Render
RenderString
code.gitea.io/gitea/modules/markup/markdown
IsDetails
IsSummary
IsTaskCheckBoxListItem
IsIcon
RenderRawString
code.gitea.io/gitea/modules/markup/markdown/math
WithInlineDollarParser
WithBlockDollarParser
code.gitea.io/gitea/modules/markup/mdstripper
stripRenderer.AddOptions
StripMarkdown
code.gitea.io/gitea/modules/markup/orgmode
RenderString
code.gitea.io/gitea/modules/private
ActionsRunnerRegister
code.gitea.io/gitea/modules/process
Manager.ExecTimeout
code.gitea.io/gitea/modules/queue
newBaseChannelSimple
newBaseChannelUnique
newBaseRedisSimple
newBaseRedisUnique
testStateRecorder.Records
testStateRecorder.Reset
newWorkerPoolQueueForTest
code.gitea.io/gitea/modules/queue/lqinternal
QueueItemIDBytes
QueueItemKeyBytes
ListLevelQueueKeys
code.gitea.io/gitea/modules/setting
NewConfigProviderFromData
GitConfigType.GetOption
InitLoggersForTest
code.gitea.io/gitea/modules/storage
ErrInvalidConfiguration.Error
IsErrInvalidConfiguration
code.gitea.io/gitea/modules/structs
ParseCreateHook
ParsePushHook
code.gitea.io/gitea/modules/sync
StatusTable.Start
StatusTable.IsRunning
code.gitea.io/gitea/modules/timeutil
GetExecutableModTime
MockSet
MockUnset
code.gitea.io/gitea/modules/translation
MockLocale.Language
MockLocale.TrString
MockLocale.Tr
MockLocale.TrN
MockLocale.TrSize
MockLocale.PrettyNumber
code.gitea.io/gitea/modules/util
OptionalArg
code.gitea.io/gitea/modules/util/filebuffer
CreateFromReader
code.gitea.io/gitea/modules/validation
IsErrNotValid
code.gitea.io/gitea/modules/web
RouteMock
RouteMockReset
code.gitea.io/gitea/modules/web/middleware
DeleteLocaleCookie
code.gitea.io/gitea/modules/zstd
NewWriter
Writer.Write
Writer.Close
code.gitea.io/gitea/routers/web
NotFound
code.gitea.io/gitea/routers/web/org
MustEnableProjects
code.gitea.io/gitea/services/context
GetPrivateContext
code.gitea.io/gitea/services/convert
ToSecret
code.gitea.io/gitea/services/forms
DeadlineForm.Validate
code.gitea.io/gitea/services/pull
IsCommitStatusContextSuccess
code.gitea.io/gitea/services/repository
IsErrForkAlreadyExist
code.gitea.io/gitea/services/repository/files
ContentType.String
GetFileResponseFromCommit
TemporaryUploadRepository.GetLastCommit
TemporaryUploadRepository.GetLastCommitByRef
code.gitea.io/gitea/services/webhook
NewNotifier

View file

@ -0,0 +1,42 @@
{
"name": "Gitea DevContainer",
"image": "mcr.microsoft.com/devcontainers/go:1.23-bullseye",
"features": {
// installs nodejs into container
"ghcr.io/devcontainers/features/node:1": {
"version": "20"
},
"ghcr.io/devcontainers/features/git-lfs:1.2.3": {},
"ghcr.io/devcontainers-contrib/features/poetry:2": {},
"ghcr.io/devcontainers/features/python:1": {
"version": "3.12"
},
"ghcr.io/warrenbuckley/codespace-features/sqlite:1": {}
},
"customizations": {
"vscode": {
"settings": {},
// same extensions as Gitpod, should match /.gitpod.yml
"extensions": [
"editorconfig.editorconfig",
"dbaeumer.vscode-eslint",
"golang.go",
"stylelint.vscode-stylelint",
"DavidAnson.vscode-markdownlint",
"Vue.volar",
"ms-azuretools.vscode-docker",
"vitest.explorer",
"cweijan.vscode-database-client2",
"GitHub.vscode-pull-request-github",
"Azurite.azurite"
]
}
},
"portsAttributes": {
"3000": {
"label": "Gitea Web",
"onAutoForward": "notify"
}
},
"postCreateCommand": "make deps"
}

114
.dockerignore Normal file
View file

@ -0,0 +1,114 @@
# Compiled Object files, Static and Dynamic libs (Shared Objects)
*.o
*.a
*.so
# Folders
_obj
_test
# IntelliJ
.idea
# Goland's output filename can not be set manually
/go_build_*
# MS VSCode
.vscode
__debug_bin*
# Architecture specific extensions/prefixes
*.[568vq]
[568vq].out
*.cgo1.go
*.cgo2.c
_cgo_defun.c
_cgo_gotypes.go
_cgo_export.*
_testmain.go
*.exe
*.test
*.prof
*coverage.out
coverage.all
cpu.out
/modules/migration/bindata.go
/modules/migration/bindata.go.hash
/modules/options/bindata.go
/modules/options/bindata.go.hash
/modules/public/bindata.go
/modules/public/bindata.go.hash
/modules/templates/bindata.go
/modules/templates/bindata.go.hash
*.db
*.log
/gitea
/gitea-vet
/debug
/integrations.test
/bin
/dist
/custom/*
!/custom/conf
/custom/conf/*
!/custom/conf/app.example.ini
/data
/indexers
/log
/tests/integration/gitea-integration-*
/tests/integration/indexers-*
/tests/e2e/gitea-e2e-*
/tests/e2e/indexers-*
/tests/e2e/reports
/tests/e2e/test-artifacts
/tests/e2e/test-snapshots
/tests/*.ini
/yarn.lock
/yarn-error.log
/npm-debug.log*
/public/assets/js
/public/assets/css
/public/assets/fonts
/public/assets/img/avatar
/vendor
/web_src/fomantic/node_modules
/web_src/fomantic/build/*
!/web_src/fomantic/build/semantic.js
!/web_src/fomantic/build/semantic.css
!/web_src/fomantic/build/themes
/web_src/fomantic/build/themes/*
!/web_src/fomantic/build/themes/default
/web_src/fomantic/build/themes/default/assets/*
!/web_src/fomantic/build/themes/default/assets/fonts
/web_src/fomantic/build/themes/default/assets/fonts/*
!/web_src/fomantic/build/themes/default/assets/fonts/icons.woff2
!/web_src/fomantic/build/themes/default/assets/fonts/outline-icons.woff2
/VERSION
/.air
/.go-licenses
# Files and folders that were previously generated
/public/assets/img/webpack
# Snapcraft
snap/.snapcraft/
parts/
stage/
prime/
*.snap
*.snap-build
*_source.tar.bz2
.DS_Store
# Make evidence files
/.make_evidence
# Manpage
/man

28
.editorconfig Normal file
View file

@ -0,0 +1,28 @@
root = true
[*]
indent_style = space
indent_size = 2
tab_width = 2
end_of_line = lf
charset = utf-8
trim_trailing_whitespace = true
insert_final_newline = true
[{*.{go,tmpl,html},Makefile,go.mod}]
indent_style = tab
[templates/custom/*.tmpl]
insert_final_newline = false
[templates/swagger/v1_json.tmpl]
indent_style = space
[templates/user/auth/oidc_wellknown.tmpl]
indent_style = space
[*.svg]
insert_final_newline = false
[options/locale/locale_*.ini]
insert_final_newline = false

1
.envrc.example Normal file
View file

@ -0,0 +1 @@
use flake

View file

@ -0,0 +1,34 @@
#!/bin/bash
set -ex
end_to_end=$1
end_to_end_pr=$2
forgejo=$3
forgejo_pr_or_ref=$4
cd $forgejo
full_version=$(make show-version-full)
minor_version=$(make show-version-minor)
cd $end_to_end
if ! test -f forgejo/sources/$minor_version; then
echo "FAIL: forgejo/sources/$minor_version does not exist in the end-to-end repository"
false
fi
echo -n $minor_version >forgejo/build-from-sources
date >last-upgrade
if test -f "$forgejo_pr_or_ref"; then
forgejo_pr=$forgejo_pr_or_ref
head_url=$(jq --raw-output .head.repo.html_url <$forgejo_pr)
test "$head_url" != null
branch=$(jq --raw-output .head.ref <$forgejo_pr)
test "$branch" != null
echo $head_url $branch $full_version >forgejo/sources/$minor_version
else
forgejo_ref=$forgejo_pr_or_ref
echo $GITHUB_SERVER_URL/$GITHUB_REPOSITORY ${forgejo_ref#refs/heads/} $full_version >forgejo/sources/$minor_version
fi

View file

@ -0,0 +1,22 @@
#!/bin/bash
set -ex
end_to_end=$1
end_to_end_pr=$2
forgejo=$3
forgejo_ref=$4
cd $end_to_end
date >last-upgrade
organizations=lib/ORGANIZATIONS
if ! test -f $organizations; then
echo "$organizations file not found"
false
fi
#
# Inverse the order of lookup because the goal in the release built
# pipeline is to test the latest build, if available, instead of the
# stable version by the same version.
#
echo forgejo-integration forgejo-experimental forgejo >$organizations

View file

@ -0,0 +1,57 @@
name: 🦋 Bug Report (web interface / frontend)
description: Something doesn't look quite as it should? Report it here!
title: "bug: "
labels: ["bug/new-report", "forgejo/ui"]
body:
- type: markdown
attributes:
value: |
**NOTE: If your issue is a security concern, please email <security@forgejo.org> (GPG: `A4676E79`) instead of opening a public issue.**
- type: markdown
attributes:
value: |
- Please speak English, as this is the language all maintainers can speak and write.
- Be as clear and concise as possible. A very verbose report is harder to interpret in a concrete way.
- Be civil, and follow the [Forgejo Code of Conduct](https://codeberg.org/forgejo/code-of-conduct).
- Take a moment to [check that your issue hasn't been reported before](https://codeberg.org/forgejo/forgejo/issues?q=&type=all&labels=78137).
- type: dropdown
id: can-reproduce
attributes:
label: Can you reproduce the bug on the Forgejo test instance?
description: |
Please try reproducing your issue at https://dev.next.forgejo.org.
It is running the latest development branch and will confirm the problem is not already fixed.
If you can reproduce it, provide a URL in the description.
options:
- "Yes"
- "No"
validations:
required: true
- type: textarea
id: description
attributes:
label: Description
description: |
Please provide a description of your issue here, with a URL if you were able to reproduce the issue (see above).
If you think this is a JavaScript error, include a copy of the JavaScript console.
validations:
required: true
- type: textarea
id: screenshots
attributes:
label: Screenshots
description: Please provide at least one screenshot showing the issue.
validations:
required: true
- type: input
id: forgejo-ver
attributes:
label: Forgejo Version
description: Forgejo version (or commit reference) your instance is running
- type: input
id: browser-ver
attributes:
label: Browser Version
description: The browser and version that you are using to access Forgejo
validations:
required: true

View file

@ -0,0 +1,72 @@
name: 🐛 Bug Report (server / backend)
description: Found something you weren't expecting? Report it here!
title: "bug: "
labels: bug/new-report
body:
- type: markdown
attributes:
value: |
**NOTE: If your issue is a security concern, please email <security@forgejo.org> (GPG: `A4676E79`) instead of opening a public issue.**
- type: markdown
attributes:
value: |
- Please speak English, as this is the language all maintainers can speak and write.
- Be as clear and concise as possible. A very verbose report is harder to interpret in a concrete way.
- Be civil, and follow the [Forgejo Code of Conduct](https://codeberg.org/forgejo/code-of-conduct).
- Take a moment to [check that your issue hasn't been reported before](https://codeberg.org/forgejo/forgejo/issues?q=&type=all&labels=78137).
- type: dropdown
id: can-reproduce
attributes:
label: Can you reproduce the bug on the Forgejo test instance?
description: |
Please try reproducing your issue at https://dev.next.forgejo.org.
It is running the latest development branch and will confirm the problem is not already fixed.
If you can reproduce it, provide a URL in the description.
options:
- "Yes"
- "No"
validations:
required: true
- type: textarea
id: description
attributes:
label: Description
description: |
Please provide a description of your issue here, with a URL if you were able to reproduce the issue (see above).
validations:
required: true
- type: input
id: forgejo-ver
attributes:
label: Forgejo Version
description: Forgejo version (or commit reference) of your instance
- type: textarea
id: run-info
attributes:
label: How are you running Forgejo?
description: |
Please include information on whether you built Forgejo yourself, used one of our downloads, or are using some other package.
Please also tell us how you are running Forgejo, e.g. if it is being run from a container, a command-line, systemd etc.
If you are using a package or systemd tell us what distribution you are using.
validations:
required: true
- type: textarea
id: logs
attributes:
label: Logs
description: |
It's really important to provide pertinent logs. You must give us `DEBUG` level logs.
Please read https://forgejo.org/docs/latest/admin/logging-documentation/.
In addition, if your problem relates to git commands set `RUN_MODE=dev` at the top of `app.ini`.
Please copy and paste your logs here, with any sensitive information (e.g. API keys) removed/hidden.
You can wrap your logs in `<details>...</details>` tags so it doesn't take up too much space in the issue.
- type: dropdown
id: database
attributes:
label: Database
description: What database system are you running?
options:
- SQLite
- PostgreSQL
- MySQL

View file

@ -0,0 +1,16 @@
contact_links:
- name: 🔓 Security Reports
url: mailto:security@forgejo.org
about: "Please email <security@forgejo.org> (See https://forgejo.org/.well-known/security.txt)."
- name: 💬 Matrix Chat Room
url: https://matrix.to/#/#forgejo-chat:matrix.org
about: Please ask questions and discuss configuration or deployment problems here.
- name: 💬 Matrix Space
url: https://matrix.to/#/#forgejo:matrix.org
about: A collection of Matrix rooms relating to Forgejo in addition to the main chat room.
- name: 📚 Documentation
url: https://forgejo.org/docs/latest/
about: Documentation about Forgejo.
- name: ❓ Frequently Asked Questions
url: https://forgejo.org/faq/
about: Please check if your question is mentioned here.

View file

@ -0,0 +1,31 @@
name: 💡 Feature Request
description: Got an idea for a feature that Forgejo doesn't have yet? Suggest it here!
title: "feat: "
labels: ["enhancement/feature"]
body:
- type: markdown
attributes:
value: |
- Please speak English, as this is the language all maintainers can speak and write.
- Be as clear and concise as possible. A very verbose request is harder to interpret in a concrete way.
- Be civil, and follow the [Forgejo Code of Conduct](https://codeberg.org/forgejo/code-of-conduct).
- Please make sure you are using the latest release of Forgejo and take a moment to [check that your feature hasn't already been suggested](https://codeberg.org/forgejo/forgejo/issues?q=&type=all&labels=78139).
- type: textarea
id: needs-benefits
attributes:
label: Needs and benefits
description: As concisely as possible, describe the benefits your feature request will provide or the problems it will try to solve.
validations:
required: true
- type: textarea
id: description
attributes:
label: Feature Description
description: As concisely as possible, describe the feature you would like to see added or the changes you would like to see made to Forgejo.
validations:
required: true
- type: textarea
id: screenshots
attributes:
label: Screenshots
description: If you can, provide screenshots of an implementation on another site, e.g. GitHub.

View file

@ -0,0 +1,33 @@
---
name: "Pull Request Template"
about: "Template for all Pull Requests"
labels:
- test/needed
---
## Checklist
The [contributor guide](https://forgejo.org/docs/next/contributor/) contains information that will be helpful to first time contributors. There also are a few [conditions for merging Pull Requests in Forgejo repositories](https://codeberg.org/forgejo/governance/src/branch/main/PullRequestsAgreement.md). You are also welcome to join the [Forgejo development chatroom](https://matrix.to/#/#forgejo-development:matrix.org).
### Tests
- I added test coverage for Go changes...
- [ ] in their respective `*_test.go` for unit tests.
- [ ] in the `tests/integration` directory if it involves interactions with a live Forgejo server.
- I added test coverage for JavaScript changes...
- [ ] in `web_src/js/*.test.js` if it can be unit tested.
- [ ] in `tests/e2e/*.test.e2e.js` if it requires interactions with a live Forgejo server (see also the [developer guide for JavaScript testing](https://codeberg.org/forgejo/forgejo/src/branch/forgejo/tests/e2e/README.md#end-to-end-tests)).
### Documentation
- [ ] I created a pull request [to the documentation](https://codeberg.org/forgejo/docs) to explain to Forgejo users how to use this change.
- [ ] I did not document these changes and I do not expect someone else to do it.
### Release notes
- [ ] I do not want this change to show in the release notes.
- [ ] I want the title to show in the release notes with a link to this pull request.
- [ ] I want the content of the `release-notes/<pull request number>.md` to be be used for the release notes instead of the title.

View file

@ -0,0 +1,6 @@
FROM data.forgejo.org/oci/alpine:3.20
ARG RELEASE_VERSION=unkown
LABEL maintainer="contact@forgejo.org" \
org.opencontainers.image.version="${RELEASE_VERSION}"
RUN mkdir -p /app/gitea
RUN ( echo '#!/bin/sh' ; echo "echo forgejo v$RELEASE_VERSION" ) > /app/gitea/gitea ; chmod +x /app/gitea/gitea

View file

@ -0,0 +1,5 @@
VERSION ?= $(shell cat VERSION 2>/dev/null)
sources-tarbal:
mkdir -p dist/release
echo $(VERSION) > VERSION
sources=forgejo-src-$(VERSION).tar.gz ; tar --transform 's|^./|forgejo-src-$(VERSION)/|' -czf dist/release/forgejo-src-$(VERSION).tar.gz . ; cd dist/release ; shasum -a 256 $$sources > $$sources.sha256

View file

@ -0,0 +1,3 @@
module code.gitea.io/gitea
go 1.23.3

View file

@ -0,0 +1,29 @@
inputs:
packages:
description: 'Packages to install'
required: true
release:
description: 'Release to install from'
default: testing
runs:
using: "composite"
steps:
- name: setup apt package source
run: |
export DEBIAN_FRONTEND=noninteractive
echo "deb http://deb.debian.org/debian/ ${RELEASE} main" > "/etc/apt/sources.list.d/${RELEASE}.list"
env:
RELEASE: ${{inputs.release}}
- name: install packages
run: |
apt-get update -qq
apt-get -q install -qq -y ${PACKAGES}
env:
PACKAGES: ${{inputs.packages}}
- name: remove temporary package list to prevent using it in other steps
run: |
rm "/etc/apt/sources.list.d/${RELEASE}.list"
apt-get update -qq
env:
RELEASE: ${{inputs.release}}

View file

@ -0,0 +1,15 @@
runs:
using: "composite"
steps:
- run: |
su forgejo -c 'make deps-backend'
- uses: actions/cache@v4
id: cache-backend
with:
path: ${{github.workspace}}/gitea
key: backend-build-${{ github.sha }}
- if: steps.cache-backend.outputs.cache-hit != 'true'
run: |
su forgejo -c 'make backend'
env:
TAGS: bindata

View file

@ -0,0 +1,59 @@
# SPDX-License-Identifier: MIT
name: 'Forgejo Actions to setup Go and cache dependencies'
author: 'Forgejo authors'
description: |
Wrap the setup-go with improved dependency caching.
inputs:
username:
description: 'User for which to manage the dependency cache'
default: root
runs:
using: "composite"
steps:
- name: "Install zstd for faster caching"
run: |
apt-get update -qq
apt-get -q install -qq -y zstd
- name: "Set up Go using setup-go"
uses: https://data.forgejo.org/actions/setup-go@v5
id: go-version
with:
go-version-file: "go.mod"
# do not cache dependencies, we do this manually
cache: false
- name: "Get go environment information"
id: go-environment
run: |
echo "modcache=$(su ${RUN_AS_USER} -c '/opt/hostedtoolcache/go/${GO_VERSION}/x64/bin/go env GOMODCACHE')" >> "$GITHUB_OUTPUT"
echo "cache=$(su ${RUN_AS_USER} -c '/opt/hostedtoolcache/go/${GO_VERSION}/x64/bin/go env GOCACHE')" >> "$GITHUB_OUTPUT"
env:
RUN_AS_USER: ${{ inputs.username }}
GO_VERSION: ${{ steps.go-version.outputs.go-version }}
- name: "Create cache folders with correct permissions (for non-root users)"
if: inputs.username != 'root'
# when the cache is restored, only the permissions of the last part are restored
# so assuming that /home/user exists and we are restoring /home/user/go/pkg/mod,
# both folders will have the correct permissions, but
# /home/user/go and /home/user/go/pkg might be owned by root
run: |
su ${RUN_AS_USER} -c 'mkdir -p "${MODCACHE_DIR}" "${CACHE_DIR}"'
env:
RUN_AS_USER: ${{ inputs.username }}
MODCACHE_DIR: ${{ steps.go-environment.outputs.modcache }}
CACHE_DIR: ${{ steps.go-environment.outputs.cache }}
- name: "Restore Go dependencies from cache or mark for later caching"
id: cache-deps
uses: actions/cache@v4
with:
key: setup-cache-go-deps-${{ runner.os }}-${{ inputs.username }}-${{ steps.go-version.outputs.go_version }}-${{ hashFiles('go.sum', 'go.mod') }}
restore-keys: |
setup-cache-go-deps-${{ runner.os }}-${{ inputs.username }}-${{ steps.go-version.outputs.go_version }}-
setup-cache-go-deps-${{ runner.os }}-${{ inputs.username }}-
path: |
${{ steps.go-environment.outputs.modcache }}
${{ steps.go-environment.outputs.cache }}

View file

@ -0,0 +1,25 @@
# TODO:
# - [ ] prepare a forgejo ci image with the necessary tools and forgejo user
runs:
using: "composite"
steps:
- name: setup user and permissions
run: |
git config --add safe.directory '*'
# ignore if the user already exists (like with the playwright image)
adduser --quiet --comment forgejo --disabled-password forgejo || true
chown -R forgejo:forgejo .
- uses: ./.forgejo/workflows-composite/setup-cache-go
with:
username: forgejo
- name: validate go version
run: |
set -ex
toolchain=$(grep -oP '(?<=toolchain ).+' go.mod)
version=$(go version | cut -d' ' -f3)
if [ "$toolchain" != "$version" ]; then
echo "go version mismatch: $toolchain <> $version"
exit 1
fi

View file

@ -0,0 +1,61 @@
# Copyright 2024 The Forgejo Authors
# SPDX-License-Identifier: MIT
#
# To modify this workflow:
#
# - change pull_request_target: to pull_request:
# so that it runs from a pull request instead of the default branch
#
# - push it to the wip-ci-backport branch on the forgejo repository
# otherwise it will not have access to the secrets required to push
# the PR
#
# - open a pull request targetting wip-ci-backport that includes a change
# that can be backported without conflict in v1.21 and set the
# `backport/v1.21` label.
#
# - once it works, open a pull request for the sake of keeping track
# of the change even if the PR won't run it because it will use
# whatever is in the default branch instead
#
# - after it is merged, double check it works by setting a
# `backport/v1.21` label on a merged pull request that can be backported
# without conflict.
#
name: issue-labels
on:
pull_request_target:
types:
- closed
- labeled
jobs:
backporting:
if: >
( vars.ROLE == 'forgejo-coding' ) && (
github.event.pull_request.merged
&&
contains(toJSON(github.event.pull_request.labels), 'backport/v')
)
runs-on: docker
container:
image: 'data.forgejo.org/oci/node:20-bookworm'
steps:
- name: event info
run: |
cat <<'EOF'
${{ toJSON(github) }}
EOF
- uses: https://data.forgejo.org/actions/git-backporting@v4.8.4
with:
target-branch-pattern: "^backport/(?<target>(v.*))$"
strategy: ort
strategy-option: find-renames
cherry-pick-options: -x
auth: ${{ secrets.BACKPORT_TOKEN }}
pull-request: ${{ github.event.pull_request.url }}
auto-no-squash: true
enable-err-notification: true
git-user: forgejo-backport-action
git-email: forgejo-backport-action@noreply.codeberg.org

View file

@ -0,0 +1,135 @@
name: Integration tests for the release process
on:
push:
paths:
- Makefile
- Dockerfile
- Dockerfile.rootless
- docker/**
- .forgejo/workflows/build-release.yml
- .forgejo/workflows/build-release-integration.yml
branches-ignore:
- renovate/**
pull_request:
paths:
- Makefile
- Dockerfile
- Dockerfile.rootless
- docker/**
- .forgejo/workflows/build-release.yml
- .forgejo/workflows/build-release-integration.yml
jobs:
release-simulation:
if: vars.ROLE == 'forgejo-coding'
runs-on: lxc-bookworm
steps:
- uses: actions/checkout@v4
- id: forgejo
uses: https://data.forgejo.org/actions/setup-forgejo@v2.0.4
with:
user: root
password: admin1234
image-version: 1.21
lxc-ip-prefix: 10.0.9
- name: publish the forgejo release
shell: bash
run: |
set -x
cat > /etc/docker/daemon.json <<EOF
{
"insecure-registries" : ["${{ steps.forgejo.outputs.host-port }}"]
}
EOF
systemctl restart docker
apt-get install -qq -y xz-utils
dir=$(mktemp -d)
trap "rm -fr $dir" EXIT
url=http://root:admin1234@${{ steps.forgejo.outputs.host-port }}
export FORGEJO_RUNNER_LOGS="${{ steps.forgejo.outputs.runner-logs }}"
function sanity_check() {
local url=$1 version=$2
#
# Minimal sanity checks. Since the binary
# is a script shell it does not test the sanity of the cross
# build, only the sanity of the naming of the binaries.
#
for arch in amd64 arm64 arm-6 ; do
local binary=forgejo-$version-linux-$arch
for suffix in '' '.xz' ; do
curl --fail -L -sS $url/root/forgejo/releases/download/v$version/$binary$suffix > $binary$suffix
if test "$suffix" = .xz ; then
unxz --keep $binary$suffix
fi
chmod +x $binary
./$binary --version | grep $version
curl --fail -L -sS $url/root/forgejo/releases/download/v$version/$binary$suffix.sha256 > $binary$suffix.sha256
shasum -a 256 --check $binary$suffix.sha256
rm $binary$suffix
done
done
local sources=forgejo-src-$version.tar.gz
curl --fail -L -sS $url/root/forgejo/releases/download/v$version/$sources > $sources
curl --fail -L -sS $url/root/forgejo/releases/download/v$version/$sources.sha256 > $sources.sha256
shasum -a 256 --check $sources.sha256
docker pull ${{ steps.forgejo.outputs.host-port }}/root/forgejo:$version
docker pull ${{ steps.forgejo.outputs.host-port }}/root/forgejo:$version-rootless
}
#
# Create a new project with a fake forgejo and the release workflow only
#
cp -a .forgejo/testdata/build-release/* $dir
mkdir -p $dir/.forgejo/workflows
cp .forgejo/workflows/build-release.yml $dir/.forgejo/workflows
cp $dir/Dockerfile $dir/Dockerfile.rootless
forgejo-test-helper.sh push $dir $url root forgejo
forgejo-curl.sh api_json -X PUT --data-raw '{"data":"${{ steps.forgejo.outputs.token }}"}' $url/api/v1/repos/root/forgejo/actions/secrets/TOKEN
forgejo-curl.sh api_json -X PUT --data-raw '{"data":"root"}' $url/api/v1/repos/root/forgejo/actions/secrets/DOER
forgejo-curl.sh api_json -X PUT --data-raw '{"data":"true"}' $url/api/v1/repos/root/forgejo/actions/secrets/VERBOSE
#
# Push a tag to trigger the release workflow and wait for it to complete
#
version=1.2.3
sha=$(forgejo-test-helper.sh branch_tip $url root/forgejo main)
forgejo-curl.sh api_json --data-raw '{"tag_name": "v'$version'", "target": "'$sha'"}' $url/api/v1/repos/root/forgejo/tags
LOOPS=180 forgejo-test-helper.sh wait_success "$url" root/forgejo $sha
sanity_check $url $version
#
# Push a commit to a branch that triggers the build of a test release
#
version=1.2-test
(
git clone $url/root/forgejo /tmp/forgejo
cd /tmp/forgejo
date > DATE
git config user.email root@example.com
git config user.name username
git add .
git commit -m 'update'
git push $url/root/forgejo main:forgejo
)
sha=$(forgejo-test-helper.sh branch_tip $url root/forgejo forgejo)
LOOPS=180 forgejo-test-helper.sh wait_success "$url" root/forgejo $sha
sanity_check $url $version
- name: full logs
if: always()
run: |
sed -e 's/^/[RUNNER LOGS] /' ${{ steps.forgejo.outputs.runner-logs }}
docker logs forgejo | sed -e 's/^/[FORGEJO LOGS]/'
sleep 5 # hack to avoid mixing outputs in Forgejo v1.21

View file

@ -0,0 +1,237 @@
#
# See also https://forgejo.org/docs/next/contributor/release/#stable-release-process
#
# https://codeberg.org/forgejo-integration/forgejo
#
# Builds a release from a codeberg.org/forgejo-integration tag
#
# vars.ROLE: forgejo-integration
#
# secrets.DOER: forgejo-experimental-ci
# secrets.TOKEN: <generated from codeberg.org/forgejo-experimental-ci> scope read:user, write:repository, write:package
#
# secrets.CASCADE_ORIGIN_TOKEN: <generated from codeberg.org/forgejo-experimental-ci> scope read:user, write:repository, write:issue
# secrets.CASCADE_DESTINATION_TOKEN: <generated from code.forgejo.org/forgejo-ci> scope read:user, write:repository, write:issue
# vars.CASCADE_DESTINATION_DOER: forgejo-ci
#
# vars.SKIP_END_TO_END: `true` or `false`
# It must be `false` (or absent) so https://code.forgejo.org/forgejo/end-to-end is run
# with the newly built release.
# It must be set to `true` when a release is missing, for instance because it was
# removed and failed to upload.
#
on:
push:
tags: 'v[0-9]+.[0-9]+.*'
branches:
- 'forgejo'
- 'v*/forgejo'
jobs:
release:
runs-on: lxc-bookworm
# root is used for testing, allow it
if: vars.ROLE == 'forgejo-integration' || github.repository_owner == 'root'
steps:
- uses: actions/checkout@v4
with:
fetch-depth: 0
- name: Sanitize the name of the repository
id: repository
run: |
repository="${{ github.repository }}"
echo "value=${repository##*/}" >> "$GITHUB_OUTPUT"
- uses: https://data.forgejo.org/actions/setup-node@v4
with:
node-version: 20
- uses: https://data.forgejo.org/actions/setup-go@v5
with:
go-version-file: "go.mod"
- name: version from ref
id: release-info
shell: bash
run: |
set -x
ref="${{ github.ref }}"
if [[ $ref =~ ^refs/heads/ ]] ; then
if test "$ref" = "refs/heads/forgejo" ; then
version=$(git tag -l --sort=version:refname --merged | grep -v -e '-test$' | tail -1 | sed -E -e 's/^(v[0-9]+\.[0-9]+).*/\1/')-test
else
version=${ref#refs/heads/}
version=${version%/forgejo}-test
fi
override=true
fi
if [[ $ref =~ ^refs/tags/ ]] ; then
version=${ref#refs/tags/}
override=false
fi
if test -z "$version" ; then
echo failed to figure out the release version from the reference=$ref
exit 1
fi
version=${version#v}
git describe --exclude '*-test' --tags --always
echo "sha=${{ github.sha }}" >> "$GITHUB_OUTPUT"
echo "version=$version" >> "$GITHUB_OUTPUT"
echo "override=$override" >> "$GITHUB_OUTPUT"
- name: release notes
id: release-notes
run: |
anchor=${{ steps.release-info.outputs.version }}
anchor=${anchor//./-}
cat >> "$GITHUB_OUTPUT" <<EOF
value<<ENDVAR
See https://codeberg.org/forgejo/forgejo/src/branch/forgejo/RELEASE-NOTES.md#$anchor
ENDVAR
EOF
- name: cache node_modules
id: node
uses: https://data.forgejo.org/actions/cache@v4
with:
path: |
node_modules
key: node-${{ steps.release-info.outputs.version }}
- name: skip if node cache hit
if: steps.node.outputs.cache-hit != 'true'
run: echo no hit
- name: Build sources
run: |
set -x
apt-get -qq install -y make
version=${{ steps.release-info.outputs.version }}
#
# Make sure all files are owned by the current user.
# When run as root `npx webpack` will assume the identity
# of the owner of the current working directory and may
# fail to create files if some sub-directories are not owned
# by the same user.
#
# Binaries:
# Node: 18.17.0 - /usr/local/node-v18.17.0-linux-x64/bin/node
# npm: 9.6.7 - /usr/local/node-v18.17.0-linux-x64/bin/npm
# Packages:
# add-asset-webpack-plugin: 2.0.1 => 2.0.1
# css-loader: 6.8.1 => 6.8.1
# esbuild-loader: 3.0.1 => 3.0.1
# license-checker-webpack-plugin: 0.2.1 => 0.2.1
# monaco-editor-webpack-plugin: 7.0.1 => 7.0.1
# vue-loader: 17.2.2 => 17.2.2
# webpack: 5.87.0 => 5.87.0
# webpack-cli: 5.1.4 => 5.1.4
#
chown -R $(id -u) .
make VERSION=$version TAGS=bindata sources-tarbal
mv dist/release release
(
tmp=$(mktemp -d)
tar --directory $tmp -zxvf release/*$version*.tar.gz
cd $tmp/*
#
# Verify `make frontend` files are available
#
test -d public/assets/css
test -d public/assets/fonts
test -d public/assets/js
#
# Verify `make generate` files are available
#
test -f modules/public/bindata.go
#
# Sanity check to verify that the source tarbal knows the
# version and is able to rebuild itself from it.
#
# When in sources the version is determined with git.
# When in the tarbal the version is determined from a VERSION file.
#
make sources-tarbal
tarbal=$(echo dist/release/*$version*.tar.gz)
if ! test -f $tarbal ; then
echo $tarbal does not exist
find dist release
exit 1
fi
)
- name: build container & release
if: ${{ secrets.TOKEN != '' }}
uses: https://data.forgejo.org/forgejo/forgejo-build-publish/build@v5.3.1
with:
forgejo: "${{ env.GITHUB_SERVER_URL }}"
owner: "${{ env.GITHUB_REPOSITORY_OWNER }}"
repository: "${{ steps.repository.outputs.value }}"
doer: "${{ secrets.DOER }}"
release-version: "${{ steps.release-info.outputs.version }}"
sha: "${{ steps.release-info.outputs.sha }}"
token: "${{ secrets.TOKEN }}"
platforms: linux/amd64,linux/arm64,linux/arm/v6
release-notes: "${{ steps.release-notes.outputs.value }}"
binary-name: forgejo
binary-path: /app/gitea/gitea
override: "${{ steps.release-info.outputs.override }}"
verify-labels: "maintainer=contact@forgejo.org,org.opencontainers.image.version=${{ steps.release-info.outputs.version }}"
verbose: ${{ vars.VERBOSE || secrets.VERBOSE || 'false' }}
- name: build rootless container
if: ${{ secrets.TOKEN != '' }}
uses: https://data.forgejo.org/forgejo/forgejo-build-publish/build@v5.3.1
with:
forgejo: "${{ env.GITHUB_SERVER_URL }}"
owner: "${{ env.GITHUB_REPOSITORY_OWNER }}"
repository: "${{ steps.repository.outputs.value }}"
doer: "${{ secrets.DOER }}"
release-version: "${{ steps.release-info.outputs.version }}"
sha: "${{ steps.release-info.outputs.sha }}"
token: "${{ secrets.TOKEN }}"
platforms: linux/amd64,linux/arm64,linux/arm/v6
suffix: -rootless
dockerfile: Dockerfile.rootless
override: "${{ steps.release-info.outputs.override }}"
verify-labels: "maintainer=contact@forgejo.org,org.opencontainers.image.version=${{ steps.release-info.outputs.version }}"
verbose: ${{ vars.VERBOSE || secrets.VERBOSE || 'false' }}
- name: end-to-end tests
if: ${{ secrets.TOKEN != '' && vars.ROLE == 'forgejo-integration' && vars.SKIP_END_TO_END != 'true' }}
uses: https://data.forgejo.org/actions/cascading-pr@v2.2.0
with:
origin-url: ${{ env.GITHUB_SERVER_URL }}
origin-repo: ${{ github.repository }}
origin-token: ${{ secrets.CASCADE_ORIGIN_TOKEN }}
origin-ref: refs/heads/forgejo
destination-url: https://code.forgejo.org
destination-fork-repo: ${{ vars.CASCADE_DESTINATION_DOER }}/end-to-end
destination-repo: forgejo/end-to-end
destination-branch: main
destination-token: ${{ secrets.CASCADE_DESTINATION_TOKEN }}
update: .forgejo/cascading-release-end-to-end
- name: copy to experimental
if: vars.ROLE == 'forgejo-integration' && secrets.TOKEN != ''
run: |
if test "${{ vars.VERBOSE }}" = true ; then
set -x
fi
tag=v${{ steps.release-info.outputs.version }}
url=https://any:${{ secrets.TOKEN }}@codeberg.org
if test "${{ steps.release-info.outputs.override }}" = "true" ; then
curl -sS -X DELETE $url/api/v1/repos/forgejo-experimental/forgejo/releases/tags/$tag > /dev/null
curl -sS -X DELETE $url/api/v1/repos/forgejo-experimental/forgejo/tags/$tag > /dev/null
fi
# actions/checkout@v3 sets http.https://codeberg.org/.extraheader with the automatic token.
# Get rid of it so it does not prevent using the token that has write permissions
git config --local --unset http.https://codeberg.org/.extraheader
if test -f .git/shallow ; then
echo "unexptected .git/shallow file is present"
echo "it suggests a checkout --depth X was used which may prevent pushing the commit"
echo "it happens when actions/checkout is called without depth: 0"
fi
git push $url/forgejo-experimental/forgejo ${{ steps.release-info.outputs.sha }}:refs/tags/$tag

View file

@ -0,0 +1,57 @@
# Copyright 2024 The Forgejo Authors
# SPDX-License-Identifier: MIT
#
# To modify this workflow:
#
# - push it to the wip-ci-end-to-end branch on the forgejo repository
# otherwise it will not have access to the secrets required to push
# the cascading PR
#
# - once it works, open a pull request for the sake of keeping track
# of the change even if the PR won't run it because it will use
# whatever is in the default branch instead
#
# - after it is merged, double check it works by setting the
# run-end-to-end-test on a pull request (any pull request will do)
#
name: issue-labels
on:
push:
branches:
- 'wip-ci-end-to-end'
pull_request_target:
types:
- labeled
jobs:
cascade:
if: >
vars.ROLE == 'forgejo-coding' && (
github.event_name == 'push' ||
(
github.event.action == 'label_updated' && github.event.label.name == 'run-end-to-end-tests'
)
)
runs-on: docker
container:
image: data.forgejo.org/oci/node:20-bookworm
steps:
- uses: actions/checkout@v4
with:
fetch-depth: '0'
show-progress: 'false'
- uses: https://code.forgejo.org/actions/cascading-pr@v2.2.0
with:
origin-url: ${{ env.GITHUB_SERVER_URL }}
origin-repo: ${{ github.repository }}
origin-token: ${{ secrets.END_TO_END_CASCADING_PR_ORIGIN }}
origin-pr: ${{ github.event.pull_request.number }}
origin-ref: ${{ github.event_name == 'push' && github.event.ref || '' }}
destination-url: https://code.forgejo.org
destination-fork-repo: cascading-pr/end-to-end
destination-repo: forgejo/end-to-end
destination-branch: main
destination-token: ${{ secrets.END_TO_END_CASCADING_PR_DESTINATION }}
close-merge: true
update: .forgejo/cascading-pr-end-to-end

View file

@ -0,0 +1,39 @@
on:
workflow_dispatch:
schedule:
- cron: '@daily'
jobs:
integration-cleanup:
if: vars.ROLE == 'forgejo-integration'
runs-on: docker
container:
image: 'data.forgejo.org/oci/node:20-bookworm'
steps:
- name: apt install curl jq
run: |
export DEBIAN_FRONTEND=noninteractive
apt-get update -qq
apt-get -q install -qq -y curl jq
- name: remove old releases and tags
run: |
url=https://any:${{ secrets.TOKEN }}@codeberg.org
curl -sS "$url/api/v1/repos/forgejo-integration/forgejo/releases" | jq -r '.[] | "\(.published_at) \(.tag_name)"' | sort | while read published_at version ; do
if echo $version | grep -e '-test$' >/dev/null; then
old="18 months"
else
old="1 day"
fi
too_old=$(env -i date --date="- $old" +%F)
too_old_seconds=$(env -i date --date="- $old" +%s)
published_at_seconds=$(env -i date --date="$published_at" +%s)
if test $published_at_seconds -le $too_old_seconds ; then
echo "$version was published more than $old ago ($published_at <= $too_old) and will be removed"
curl -X DELETE -sS "$url/api/v1/repos/forgejo-integration/forgejo/releases/tags/$version"
else
echo "$version was published less than $old ago"
fi
done

View file

@ -0,0 +1,45 @@
# Copyright 2024 The Forgejo Authors
# SPDX-License-Identifier: MIT
name: requirements
on:
pull_request:
types:
- labeled
- edited
- opened
- synchronize
jobs:
merge-conditions:
if: vars.ROLE == 'forgejo-coding'
runs-on: docker
container:
image: 'data.forgejo.org/oci/node:20-bookworm'
steps:
- name: Debug output
run: |
cat <<'EOF'
${{ toJSON(github) }}
EOF
- name: Missing test label
if: >
!(
contains(toJSON(github.event.pull_request.labels), 'test/present')
|| contains(toJSON(github.event.pull_request.labels), 'test/not-needed')
|| contains(toJSON(github.event.pull_request.labels), 'test/manual')
)
run: |
echo "Test label must be set to either 'present', 'not-needed' or 'manual'."
exit 1
- name: Missing manual test instructions
if: >
(
contains(toJSON(github.event.pull_request.labels), 'test/manual')
&& !contains(toJSON(github.event.pull_request.body), '# Test')
)
run: |
echo "Manual test label is set. The PR description needs to contain test steps introduced by a heading like:"
echo "# Testing"
exit 1

View file

@ -0,0 +1,24 @@
# Copyright 2024 The Forgejo Authors
# SPDX-License-Identifier: MIT
#
name: milestone
on:
pull_request_target:
types:
- closed
jobs:
set:
if: vars.ROLE == 'forgejo-coding' && github.event.pull_request.merged
runs-on: docker
container:
image: 'data.forgejo.org/oci/ci:1'
steps:
- uses: https://data.forgejo.org/forgejo/set-milestone@v1.0.0
with:
forgejo: https://codeberg.org
repository: forgejo/forgejo
token: ${{ secrets.SET_MILESTONE_TOKEN }}
pr-number: ${{ github.event.pull_request.number }}
verbose: ${{ vars.SET_MILESTONE_VERBOSE }}

View file

@ -0,0 +1,27 @@
name: mirror
on:
workflow_dispatch:
schedule:
- cron: '@daily'
jobs:
mirror:
if: ${{ secrets.MIRROR_TOKEN != '' }}
runs-on: docker
container:
image: 'data.forgejo.org/oci/node:20-bookworm'
steps:
- name: git push {v*/,}forgejo
run: |
git init --bare .
git remote add origin ${{ env.GITHUB_SERVER_URL }}/${{ env.GITHUB_REPOSITORY }}
git fetch origin refs/heads/forgejo:refs/mirror/forgejo
git ls-remote origin refs/heads/v*/forgejo | while read sha full_ref ; do
ref=${full_ref#refs/heads/}
echo git fetch origin $full_ref:refs/mirror/$ref
git fetch origin $full_ref:refs/mirror/$ref
done
echo git push --force https://${{ vars.MIRROR_DESTINATION }} refs/mirror/*:refs/heads/*
git push --force https://any:${{ secrets.MIRROR_TOKEN }}@${{ vars.MIRROR_DESTINATION }} refs/mirror/*:refs/heads/*

View file

@ -0,0 +1,86 @@
# SPDX-License-Identifier: MIT
#
# See also https://forgejo.org/docs/next/contributor/release/#stable-release-process
#
# https://codeberg.org/forgejo-experimental/forgejo
#
# Copies a release from codeberg.org/forgejo-integration to codeberg.org/forgejo-experimental
#
# vars.ROLE: forgejo-experimental
# vars.FORGEJO: https://codeberg.org
# vars.FROM_OWNER: forgejo-integration
# vars.TO_OWNER: forgejo-experimental
# vars.REPO: forgejo
# vars.DOER: forgejo-experimental-ci
# secrets.TOKEN: <generated from codeberg.org/forgejo-experimental-ci>
#
# http://private.forgejo.org/forgejo/forgejo
#
# Copies & sign a release from codeberg.org/forgejo-integration to codeberg.org/forgejo
#
# vars.ROLE: forgejo-release
# vars.FORGEJO: https://codeberg.org
# vars.FROM_OWNER: forgejo-integration
# vars.TO_OWNER: forgejo
# vars.REPO: forgejo
# vars.DOER: release-team
# secrets.TOKEN: <generated from codeberg.org/release-team>
# secrets.GPG_PRIVATE_KEY: <XYZ>
# secrets.GPG_PASSPHRASE: <ABC>
#
name: Pubish release
on:
push:
tags: 'v*'
jobs:
publish:
runs-on: lxc-bookworm
if: vars.DOER != '' && vars.FORGEJO != '' && vars.TO_OWNER != '' && vars.FROM_OWNER != '' && secrets.TOKEN != ''
steps:
- uses: actions/checkout@v4
- name: copy & sign
uses: https://data.forgejo.org/forgejo/forgejo-build-publish/publish@v5.3.1
with:
from-forgejo: ${{ vars.FORGEJO }}
to-forgejo: ${{ vars.FORGEJO }}
from-owner: ${{ vars.FROM_OWNER }}
to-owner: ${{ vars.TO_OWNER }}
repo: ${{ vars.REPO }}
release-notes: "See https://codeberg.org/forgejo/forgejo/src/branch/forgejo/release-notes-published/{VERSION}.md"
ref-name: ${{ github.ref_name }}
sha: ${{ github.sha }}
from-token: ${{ secrets.TOKEN }}
to-doer: ${{ vars.DOER }}
to-token: ${{ secrets.TOKEN }}
gpg-private-key: ${{ secrets.GPG_PRIVATE_KEY }}
gpg-passphrase: ${{ secrets.GPG_PASSPHRASE }}
verbose: ${{ vars.VERBOSE }}
- name: get trigger mirror issue
id: mirror
uses: https://data.forgejo.org/infrastructure/issue-action/get@v1.1.0
with:
forgejo: https://code.forgejo.org
repository: forgejo/forgejo
labels: mirror-trigger
- name: trigger the mirror
uses: https://data.forgejo.org/infrastructure/issue-action/set@v1.1.0
with:
forgejo: https://code.forgejo.org
repository: forgejo/forgejo
token: ${{ secrets.LABEL_ISSUE_FORGEJO_MIRROR_TOKEN }}
numbers: ${{ steps.mirror.outputs.numbers }}
label-wait-if-exists: 3600
label: trigger
- name: upgrade v*.next.forgejo.org
uses: https://data.forgejo.org/infrastructure/next-digest@v1.1.0
with:
url: https://placeholder:${{ secrets.TOKEN_NEXT_DIGEST }}@code.forgejo.org/infrastructure/next-digest
ref_name: '${{ github.ref_name }}'
image: 'codeberg.org/forgejo-experimental/forgejo'
tag_suffix: '-rootless'

View file

@ -0,0 +1,33 @@
on:
workflow_dispatch:
schedule:
- cron: '@daily'
jobs:
release-notes:
if: vars.ROLE == 'forgejo-coding'
runs-on: docker
container:
image: 'data.forgejo.org/oci/node:20-bookworm'
steps:
- uses: https://data.forgejo.org/actions/checkout@v4
- uses: https://data.forgejo.org/actions/setup-go@v5
with:
go-version-file: "go.mod"
cache: false
- name: apt install jq
run: |
export DEBIAN_FRONTEND=noninteractive
apt-get update -qq
apt-get -q install -y -qq jq
- name: update open milestones
run: |
set -x
curl -sS $GITHUB_SERVER_URL/api/v1/repos/$GITHUB_REPOSITORY/milestones?state=open | jq -r '.[] | .title' | while read forgejo version ; do
milestone="$forgejo $version"
go run code.forgejo.org/forgejo/release-notes-assistant@v1.1.1 --config .release-notes-assistant.yaml --storage milestone --storage-location "$milestone" --forgejo-url $GITHUB_SERVER_URL --repository $GITHUB_REPOSITORY --token ${{ secrets.RELEASE_NOTES_ASSISTANT_TOKEN }} release $version
done

View file

@ -0,0 +1,41 @@
name: issue-labels
on:
pull_request_target:
types:
- edited
- synchronize
- labeled
jobs:
release-notes:
if: ( vars.ROLE == 'forgejo-coding' ) && contains(github.event.pull_request.labels.*.name, 'worth a release-note')
runs-on: docker
container:
image: 'data.forgejo.org/oci/node:20-bookworm'
steps:
- uses: https://data.forgejo.org/actions/checkout@v4
- name: event
run: |
cat <<'EOF'
${{ toJSON(github.event.pull_request.labels.*.name) }}
EOF
cat <<'EOF'
${{ toJSON(github.event) }}
EOF
- uses: https://data.forgejo.org/actions/setup-go@v5
with:
go-version-file: "go.mod"
cache: false
- name: apt install jq
run: |
export DEBIAN_FRONTEND=noninteractive
apt-get update -qq
apt-get -q install -y -qq jq
- name: release-notes-assistant preview
run: |
go run code.forgejo.org/forgejo/release-notes-assistant@v1.1.1 --config .release-notes-assistant.yaml --storage pr --storage-location ${{ github.event.pull_request.number }} --forgejo-url $GITHUB_SERVER_URL --repository $GITHUB_REPOSITORY --token ${{ secrets.RELEASE_NOTES_ASSISTANT_TOKEN }} preview ${{ github.event.pull_request.number }}

View file

@ -0,0 +1,71 @@
#
# Runs every 2 hours, but Renovate is limited to create new PR before 4am.
# See renovate.json for more settings.
# Automerge is enabled for Renovate PR's but need to be approved before.
#
name: renovate
on:
push:
branches:
- renovate/** # self-test updates
paths:
- .forgejo/workflows/renovate.yml
schedule:
- cron: '0 0/2 * * *'
workflow_dispatch:
env:
RENOVATE_DRY_RUN: ${{ (github.event_name != 'schedule' && github.ref_name != github.event.repository.default_branch) && 'full' || '' }}
RENOVATE_REPOSITORIES: ${{ github.repository }}
jobs:
renovate:
if: vars.ROLE == 'forgejo-coding' && secrets.RENOVATE_TOKEN != ''
runs-on: docker
container:
image: data.forgejo.org/forgejo-contrib/renovate:39.69.2
steps:
- name: Load renovate repo cache
uses: https://data.forgejo.org/actions/cache/restore@3624ceb22c1c5a301c8db4169662070a689d9ea8 # v4.1.1
with:
path: |
.tmp/cache/renovate/repository
.tmp/cache/renovate/renovate-cache-sqlite
.tmp/osv
key: repo-cache-${{ github.run_id }}
restore-keys: |
repo-cache-
- name: Run renovate
run: renovate
env:
GITHUB_COM_TOKEN: ${{ secrets.RENOVATE_GITHUB_COM_TOKEN }}
LOG_LEVEL: debug
RENOVATE_BASE_DIR: ${{ github.workspace }}/.tmp
RENOVATE_ENDPOINT: ${{ github.server_url }}
RENOVATE_PLATFORM: gitea
RENOVATE_REPOSITORY_CACHE: 'enabled'
RENOVATE_TOKEN: ${{ secrets.RENOVATE_TOKEN }}
RENOVATE_GIT_AUTHOR: 'Renovate Bot <forgejo-renovate-action@forgejo.org>'
RENOVATE_X_SQLITE_PACKAGE_CACHE: true
GIT_AUTHOR_NAME: 'Renovate Bot'
GIT_AUTHOR_EMAIL: 'forgejo-renovate-action@forgejo.org'
GIT_COMMITTER_NAME: 'Renovate Bot'
GIT_COMMITTER_EMAIL: 'forgejo-renovate-action@forgejo.org'
OSV_OFFLINE_ROOT_DIR: ${{ github.workspace }}/.tmp/osv
- name: Save renovate repo cache
if: always() && env.RENOVATE_DRY_RUN != 'full'
uses: https://data.forgejo.org/actions/cache/save@3624ceb22c1c5a301c8db4169662070a689d9ea8 # v4.1.1
with:
path: |
.tmp/cache/renovate/repository
.tmp/cache/renovate/renovate-cache-sqlite
.tmp/osv
key: repo-cache-${{ github.run_id }}

View file

@ -0,0 +1,287 @@
name: testing
on:
pull_request:
push:
branches:
- 'forgejo*'
- 'v*/forgejo*'
workflow_dispatch:
jobs:
backend-checks:
if: vars.ROLE == 'forgejo-coding' || vars.ROLE == 'forgejo-testing'
runs-on: docker
container:
image: 'data.forgejo.org/oci/node:20-bookworm'
options: --tmpfs /tmp:exec,noatime
steps:
- name: event info
run: |
cat <<'EOF'
${{ toJSON(github) }}
EOF
- uses: https://data.forgejo.org/actions/checkout@v4
- uses: ./.forgejo/workflows-composite/setup-env
- run: su forgejo -c 'make deps-backend deps-tools'
- run: su forgejo -c 'make --always-make -j$(nproc) lint-backend tidy-check swagger-check fmt-check swagger-validate' # ensure the "go-licenses" make target runs
- uses: ./.forgejo/workflows-composite/build-backend
frontend-checks:
if: vars.ROLE == 'forgejo-coding' || vars.ROLE == 'forgejo-testing'
runs-on: docker
container:
image: 'data.forgejo.org/oci/node:20-bookworm'
options: --tmpfs /tmp:exec,noatime
steps:
- uses: https://data.forgejo.org/actions/checkout@v4
- run: make deps-frontend
- run: make lint-frontend
- run: make checks-frontend
- run: make test-frontend-coverage
- run: make frontend
- name: Install zstd for cache saving
# works around https://github.com/actions/cache/issues/1169, because the
# consuming job has zstd and doesn't restore the cache otherwise
run: |
apt-get update -qq
apt-get -q install -qq -y zstd
- name: "Cache frontend build for playwright testing"
uses: actions/cache/save@v4
with:
path: ${{github.workspace}}/public/assets
key: frontend-build-${{ github.sha }}
test-unit:
if: vars.ROLE == 'forgejo-coding' || vars.ROLE == 'forgejo-testing'
runs-on: docker
needs: [backend-checks, frontend-checks]
container:
image: 'data.forgejo.org/oci/node:20-bookworm'
options: --tmpfs /tmp:exec,noatime
services:
elasticsearch:
image: data.forgejo.org/oci/bitnami/elasticsearch:7
options: --tmpfs /bitnami/elasticsearch/data
env:
discovery.type: single-node
ES_JAVA_OPTS: "-Xms512m -Xmx512m"
minio:
image: data.forgejo.org/oci/bitnami/minio:2024.8.17
options: >-
--hostname gitea.minio --tmpfs /bitnami/minio/data:noatime
env:
MINIO_DOMAIN: minio
MINIO_ROOT_USER: 123456
MINIO_ROOT_PASSWORD: 12345678
steps:
- uses: https://data.forgejo.org/actions/checkout@v4
- uses: ./.forgejo/workflows-composite/setup-env
- name: install git >= 2.42
uses: ./.forgejo/workflows-composite/apt-install-from
with:
packages: git
- name: test release-notes-assistant.sh
run: |
apt-get -q install -qq -y jq
./release-notes-assistant.sh test_main
- uses: ./.forgejo/workflows-composite/build-backend
- run: |
su forgejo -c 'make test-backend test-check'
timeout-minutes: 120
env:
RACE_ENABLED: 'true'
TAGS: bindata
TEST_ELASTICSEARCH_URL: http://elasticsearch:9200
test-e2e:
if: vars.ROLE == 'forgejo-coding' || vars.ROLE == 'forgejo-testing'
runs-on: docker
needs: [backend-checks, frontend-checks]
container:
image: 'data.forgejo.org/oci/playwright:latest'
options: --tmpfs /tmp:exec,noatime
steps:
- uses: https://data.forgejo.org/actions/checkout@v4
with:
fetch-depth: 20
- uses: ./.forgejo/workflows-composite/setup-env
- name: "Restore frontend build"
uses: actions/cache/restore@v4
id: cache-frontend
with:
path: ${{github.workspace}}/public/assets
key: frontend-build-${{ github.sha }}
- name: "Build frontend (if not cached)"
if: steps.cache-frontend.outputs.cache-hit != 'true'
run: |
su forgejo -c 'make deps-frontend frontend'
- uses: ./.forgejo/workflows-composite/build-backend
- name: Get changed files
id: changed-files
uses: https://data.forgejo.org/tj-actions/changed-files@v45
with:
separator: '\n'
- run: |
su forgejo -c 'make generate test-e2e-sqlite'
timeout-minutes: 120
env:
USE_REPO_TEST_DIR: 1
PLAYWRIGHT_SKIP_BROWSER_DOWNLOAD: 1
CHANGED_FILES: ${{steps.changed-files.outputs.all_changed_files}}
- name: Upload test artifacts on failure
if: failure()
uses: https://data.forgejo.org/forgejo/upload-artifact@v4
with:
name: test-artifacts.zip
path: tests/e2e/test-artifacts/
retention-days: 3
test-remote-cacher:
if: vars.ROLE == 'forgejo-coding' || vars.ROLE == 'forgejo-testing'
runs-on: docker
needs: [backend-checks, frontend-checks, test-unit]
container:
image: 'data.forgejo.org/oci/node:20-bookworm'
options: --tmpfs /tmp:exec,noatime
name: ${{ format('test-remote-cacher ({0})', matrix.cacher.name) }}
strategy:
matrix:
cacher:
- name: redis
image: data.forgejo.org/oci/bitnami/redis:7.2
options: --tmpfs /bitnami/redis/data:noatime
- name: redict
image: registry.redict.io/redict:7.3.0-scratch
options: --tmpfs /data:noatime
- name: valkey
image: data.forgejo.org/oci/bitnami/valkey:7.2
options: --tmpfs /bitnami/redis/data:noatime
- name: garnet
image: ghcr.io/microsoft/garnet-alpine:1.0.14
options: --tmpfs /data:noatime
services:
cacher:
image: ${{ matrix.cacher.image }}
options: ${{ matrix.cacher.options }}
steps:
- uses: https://data.forgejo.org/actions/checkout@v4
- uses: ./.forgejo/workflows-composite/setup-env
- name: install git >= 2.42
uses: ./.forgejo/workflows-composite/apt-install-from
with:
packages: git
- uses: ./.forgejo/workflows-composite/build-backend
- run: |
su forgejo -c 'make test-remote-cacher test-check'
timeout-minutes: 120
env:
RACE_ENABLED: 'true'
TAGS: bindata
TEST_REDIS_SERVER: cacher:${{ matrix.cacher.port }}
test-mysql:
if: vars.ROLE == 'forgejo-coding' || vars.ROLE == 'forgejo-testing'
runs-on: docker
needs: [backend-checks, frontend-checks]
container:
image: 'data.forgejo.org/oci/node:20-bookworm'
options: --tmpfs /tmp:exec,noatime
services:
mysql:
image: 'data.forgejo.org/oci/bitnami/mysql:8.4'
env:
ALLOW_EMPTY_PASSWORD: yes
MYSQL_DATABASE: testgitea
#
# See also https://codeberg.org/forgejo/forgejo/issues/976
#
MYSQL_EXTRA_FLAGS: --innodb-adaptive-flushing=OFF --innodb-buffer-pool-size=4G --innodb-log-buffer-size=128M --innodb-flush-log-at-trx-commit=0 --innodb-flush-log-at-timeout=30 --innodb-flush-method=nosync --innodb-fsync-threshold=1000000000 --disable-log-bin
options: --tmpfs /bitnami/mysql/data:noatime
steps:
- uses: https://data.forgejo.org/actions/checkout@v4
- uses: ./.forgejo/workflows-composite/setup-env
- name: install dependencies & git >= 2.42
uses: ./.forgejo/workflows-composite/apt-install-from
with:
packages: git git-lfs
- uses: ./.forgejo/workflows-composite/build-backend
- run: |
su forgejo -c 'make test-mysql-migration test-mysql'
timeout-minutes: 120
env:
USE_REPO_TEST_DIR: 1
test-pgsql:
if: vars.ROLE == 'forgejo-coding' || vars.ROLE == 'forgejo-testing'
runs-on: docker
needs: [backend-checks, frontend-checks]
container:
image: 'data.forgejo.org/oci/node:20-bookworm'
options: --tmpfs /tmp:exec,noatime
services:
minio:
image: data.forgejo.org/oci/bitnami/minio:2024.8.17
env:
MINIO_ROOT_USER: 123456
MINIO_ROOT_PASSWORD: 12345678
options: --tmpfs /bitnami/minio/data
ldap:
image: data.forgejo.org/oci/test-openldap:latest
pgsql:
image: data.forgejo.org/oci/bitnami/postgresql:15
env:
POSTGRESQL_DATABASE: test
POSTGRESQL_PASSWORD: postgres
POSTGRESQL_FSYNC: off
POSTGRESQL_EXTRA_FLAGS: -c full_page_writes=off
options: --tmpfs /bitnami/postgresql
steps:
- uses: https://data.forgejo.org/actions/checkout@v4
- uses: ./.forgejo/workflows-composite/setup-env
- name: install dependencies & git >= 2.42
uses: ./.forgejo/workflows-composite/apt-install-from
with:
packages: git git-lfs
- uses: ./.forgejo/workflows-composite/build-backend
- run: |
su forgejo -c 'make test-pgsql-migration test-pgsql'
timeout-minutes: 120
env:
RACE_ENABLED: true
USE_REPO_TEST_DIR: 1
TEST_LDAP: 1
test-sqlite:
if: vars.ROLE == 'forgejo-coding' || vars.ROLE == 'forgejo-testing'
runs-on: docker
needs: [backend-checks, frontend-checks]
container:
image: 'data.forgejo.org/oci/node:20-bookworm'
options: --tmpfs /tmp:exec,noatime
steps:
- uses: https://data.forgejo.org/actions/checkout@v4
- uses: ./.forgejo/workflows-composite/setup-env
- name: install dependencies & git >= 2.42
uses: ./.forgejo/workflows-composite/apt-install-from
with:
packages: git git-lfs
- uses: ./.forgejo/workflows-composite/build-backend
- run: |
su forgejo -c 'make test-sqlite-migration test-sqlite'
timeout-minutes: 120
env:
TAGS: sqlite sqlite_unlock_notify
RACE_ENABLED: true
TEST_TAGS: sqlite sqlite_unlock_notify
USE_REPO_TEST_DIR: 1
security-check:
if: vars.ROLE == 'forgejo-coding' || vars.ROLE == 'forgejo-testing'
runs-on: docker
needs:
- test-sqlite
- test-pgsql
- test-mysql
- test-remote-cacher
- test-unit
container:
image: 'data.forgejo.org/oci/node:20-bookworm'
options: --tmpfs /tmp:exec,noatime
steps:
- uses: https://data.forgejo.org/actions/checkout@v4
- uses: ./.forgejo/workflows-composite/setup-env
- run: su forgejo -c 'make deps-backend deps-tools'
- run: su forgejo -c 'make security-check'

11
.gitattributes vendored Normal file
View file

@ -0,0 +1,11 @@
* text=auto eol=lf
*.tmpl linguist-language=go-html-template
*.pb.go linguist-generated
/assets/*.json linguist-generated
/public/assets/img/svg/*.svg linguist-generated
/templates/swagger/v1_json.tmpl linguist-generated
/vendor/** -text -eol linguist-vendored
/web_src/fomantic/build/** linguist-generated
/web_src/fomantic/_site/globals/site.variables linguist-language=Less
/web_src/js/vendor/** -text -eol linguist-vendored
Dockerfile.* linguist-language=Dockerfile

128
.gitignore vendored Normal file
View file

@ -0,0 +1,128 @@
# Emacs
*~
# Compiled Object files, Static and Dynamic libs (Shared Objects)
*.o
*.a
*.so
# Folders
_obj
_test
# IntelliJ
.idea
# Goland's output filename can not be set manually
/go_build_*
/gitea_*
# MS VSCode
.vscode
__debug_bin*
*.cgo1.go
*.cgo2.c
_cgo_defun.c
_cgo_gotypes.go
_cgo_export.*
_testmain.go
# folder for project related local files
/local/
*.exe
*.test
*.prof
*coverage.out
coverage.all
cpu.out
/modules/migration/bindata.go
/modules/migration/bindata.go.hash
/modules/options/bindata.go
/modules/options/bindata.go.hash
/modules/public/bindata.go
/modules/public/bindata.go.hash
/modules/templates/bindata.go
/modules/templates/bindata.go.hash
*.db
*.log
*.log.*.gz
/gitea
/gitea-vet
/debug
/integrations.test
/bin
/dist
/custom/*
!/custom/conf/app.example.ini
/data
/indexers
/log
/public/assets/img/avatar
/tests/integration/gitea-integration-*
/tests/integration/indexers-*
/tests/e2e/gitea-e2e-*
/tests/e2e/indexers-*
/tests/e2e/reports
/tests/e2e/test-artifacts
/tests/e2e/test-snapshots
/tests/*.ini
/tests/**/*.git/**/*.sample
/node_modules
/.venv
/yarn.lock
/yarn-error.log
/npm-debug.log*
/public/assets/js
/public/assets/css
/public/assets/fonts
/public/assets/licenses.txt
/vendor
/web_src/fomantic/node_modules
/web_src/fomantic/build/*
!/web_src/fomantic/build/semantic.js
!/web_src/fomantic/build/semantic.css
!/web_src/fomantic/build/themes
/web_src/fomantic/build/themes/*
!/web_src/fomantic/build/themes/default
/web_src/fomantic/build/themes/default/assets/*
!/web_src/fomantic/build/themes/default/assets/fonts
/web_src/fomantic/build/themes/default/assets/fonts/*
!/web_src/fomantic/build/themes/default/assets/fonts/icons.woff2
!/web_src/fomantic/build/themes/default/assets/fonts/outline-icons.woff2
/VERSION
/.air
/.go-licenses
/.cur-deadcode-out
# Files and folders that were previously generated
/public/assets/img/webpack
# Snapcraft
/gitea_a*.txt
snap/.snapcraft/
parts/
stage/
prime/
*.snap
*.snap-build
*_source.tar.bz2
.DS_Store
# Direnv configuration
/.envrc
# nix-direnv generated files
.direnv/
# Make evidence files
/.make_evidence
# Manpage
/man

0
.gitmodules vendored Normal file
View file

51
.gitpod.yml Normal file
View file

@ -0,0 +1,51 @@
tasks:
- name: Setup
init: |
cp -r contrib/ide/vscode .vscode
make deps
make build
command: |
gp sync-done setup
exit 0
- name: Run backend
command: |
gp sync-await setup
# Get the URL and extract the domain
url=$(gp url 3000)
domain=$(echo $url | awk -F[/:] '{print $4}')
if [ -f custom/conf/app.ini ]; then
sed -i "s|^ROOT_URL =.*|ROOT_URL = ${url}/|" custom/conf/app.ini
sed -i "s|^DOMAIN =.*|DOMAIN = ${domain}|" custom/conf/app.ini
sed -i "s|^SSH_DOMAIN =.*|SSH_DOMAIN = ${domain}|" custom/conf/app.ini
sed -i "s|^NO_REPLY_ADDRESS =.*|SSH_DOMAIN = noreply.${domain}|" custom/conf/app.ini
else
mkdir -p custom/conf/
echo -e "[server]\nROOT_URL = ${url}/" > custom/conf/app.ini
echo -e "\n[database]\nDB_TYPE = sqlite3\nPATH = $GITPOD_REPO_ROOT/data/gitea.db" >> custom/conf/app.ini
fi
export TAGS="sqlite sqlite_unlock_notify"
make watch-backend
- name: Run frontend
command: |
gp sync-await setup
make watch-frontend
openMode: split-right
vscode:
extensions:
- editorconfig.editorconfig
- dbaeumer.vscode-eslint
- golang.go
- stylelint.vscode-stylelint
- DavidAnson.vscode-markdownlint
- Vue.volar
- ms-azuretools.vscode-docker
- vitest.explorer
- cweijan.vscode-database-client2
- GitHub.vscode-pull-request-github
ports:
- name: Gitea
port: 3000

150
.golangci.yml Normal file
View file

@ -0,0 +1,150 @@
linters:
enable-all: false
disable-all: true
fast: false
enable:
- bidichk
- depguard
- dupl
- errcheck
- forbidigo
- gocritic
- gofmt
- gofumpt
- gosimple
- govet
- ineffassign
- nakedret
- nolintlint
- revive
- staticcheck
- stylecheck
- tenv
- testifylint
- typecheck
- unconvert
- unused
- unparam
- wastedassign
run:
timeout: 10m
output:
sort-results: true
sort-order: [file]
show-stats: true
linters-settings:
stylecheck:
checks: ["all", "-ST1005", "-ST1003"]
nakedret:
max-func-lines: 0
gocritic:
disabled-checks:
- ifElseChain
revive:
severity: error
rules:
- name: atomic
- name: bare-return
- name: blank-imports
- name: constant-logical-expr
- name: context-as-argument
- name: context-keys-type
- name: dot-imports
- name: duplicated-imports
- name: empty-lines
- name: error-naming
- name: error-return
- name: error-strings
- name: errorf
- name: exported
- name: identical-branches
- name: if-return
- name: increment-decrement
- name: indent-error-flow
- name: modifies-value-receiver
- name: package-comments
- name: range
- name: receiver-naming
- name: redefines-builtin-id
- name: string-of-int
- name: superfluous-else
- name: time-naming
- name: unconditional-recursion
- name: unexported-return
- name: unreachable-code
- name: var-declaration
- name: var-naming
- name: redefines-builtin-id
disabled: true
gofumpt:
extra-rules: true
depguard:
rules:
main:
deny:
- pkg: encoding/json
desc: use gitea's modules/json instead of encoding/json
- pkg: github.com/unknwon/com
desc: use gitea's util and replacements
- pkg: io/ioutil
desc: use os or io instead
- pkg: golang.org/x/exp
desc: it's experimental and unreliable
- pkg: code.gitea.io/gitea/modules/git/internal
desc: do not use the internal package, use AddXxx function instead
- pkg: gopkg.in/ini.v1
desc: do not use the ini package, use gitea's config system instead
- pkg: github.com/minio/sha256-simd
desc: use crypto/sha256 instead, see https://codeberg.org/forgejo/forgejo/pulls/1528
testifylint:
disable:
- go-require
issues:
max-issues-per-linter: 0
max-same-issues: 0
exclude-dirs: [node_modules, public, web_src]
exclude-case-sensitive: true
exclude-rules:
- path: models/db/sql_postgres_with_schema.go
linters:
- nolintlint
- path: _test\.go
linters:
- gocyclo
- errcheck
- dupl
- gosec
- unparam
- staticcheck
- path: models/migrations/v
linters:
- gocyclo
- errcheck
- dupl
- gosec
- path: cmd
linters:
- forbidigo
- text: "webhook"
linters:
- dupl
- text: "`ID' should not be capitalized"
linters:
- gocritic
- text: "swagger"
linters:
- unused
- deadcode
- text: "argument x is overwritten before first use"
linters:
- staticcheck
- text: "commentFormatting: put a space between `//` and comment text"
linters:
- gocritic
- text: "exitAfterDefer:"
linters:
- gocritic

11
.ignore Normal file
View file

@ -0,0 +1,11 @@
*.min.css
*.min.js
/assets/*.json
/modules/options/bindata.go
/modules/public/bindata.go
/modules/templates/bindata.go
/options/gitignore
/options/license
/public/assets
/vendor
node_modules

2
.mailmap Normal file
View file

@ -0,0 +1,2 @@
Unknwon <u@gogs.io> <joe2010xtmf@163.com>
Unknwon <u@gogs.io> 无闻 <u@gogs.io>

17
.markdownlint.yaml Normal file
View file

@ -0,0 +1,17 @@
commands-show-output: false
fenced-code-language: false
first-line-h1: false
heading-increment: false
line-length: {code_blocks: false, tables: false, stern: true, line_length: -1}
no-alt-text: false
no-bare-urls: false
no-blanks-blockquote: false
no-emphasis-as-heading: false
no-empty-links: false
no-hard-tabs: {code_blocks: false}
no-inline-html: false
no-space-in-code: false
no-space-in-emphasis: false
no-trailing-punctuation: false
no-trailing-spaces: {br_spaces: 0}
single-h1: false

6
.npmrc Normal file
View file

@ -0,0 +1,6 @@
audit=false
fund=false
update-notifier=false
package-lock=true
save-exact=true
lockfile-version=3

View file

@ -0,0 +1,27 @@
categorize: './release-notes-assistant.sh'
branch-development: 'forgejo'
branch-pattern: 'v*/forgejo'
branch-find-version: 'v(?P<version>\d+\.\d+)/forgejo'
branch-to-version: '${version}.0'
branch-from-version: 'v%[1]d.%[2]d/forgejo'
tag-from-version: 'v%[1]d.%[2]d.%[3]d'
branch-known:
- 'v7.0/forgejo'
cleanup-line: 'sed -Ee "s/^(feat|fix):\s*//g" -e "s/^\[WIP\] //" -e "s/^WIP: //" -e "s;\[(UI|BUG|FEAT|v.*?/forgejo)\]\s*;;g"'
render-header: |
## Release notes
comment: |
<details>
<summary>Where does that come from?</summary>
The following is a preview of the release notes for this pull request, as they will appear in the upcoming release. They are derived from the content of the `%[2]s/%[3]s.md` file, if it exists, or the title of the pull request. They were also added at the bottom of the description of this pull request for easier reference.
This message and the release notes originate from a call to the [release-notes-assistant](https://code.forgejo.org/forgejo/release-notes-assistant).
```diff
%[4]s
```
</details>
%[1]s

12
.spectral.yaml Normal file
View file

@ -0,0 +1,12 @@
extends: [[spectral:oas, all]]
rules:
info-contact: off
oas2-api-host: off
oas2-parameter-description: off
oas2-schema: off
oas2-valid-schema-example: off
openapi-tags: off
operation-description: off
operation-singular-tag: off
operation-tag-defined: off

44
.yamllint.yaml Normal file
View file

@ -0,0 +1,44 @@
extends: default
rules:
braces:
min-spaces-inside: 0
max-spaces-inside: 1
min-spaces-inside-empty: 0
max-spaces-inside-empty: 0
brackets:
min-spaces-inside: 0
max-spaces-inside: 1
min-spaces-inside-empty: 0
max-spaces-inside-empty: 0
comments:
require-starting-space: true
ignore-shebangs: true
min-spaces-from-content: 1
comments-indentation:
level: error
document-start:
level: error
present: false
document-end:
present: false
empty-lines:
max: 1
indentation:
spaces: 2
line-length: disable
truthy:
allowed-values: ["true", "false", "on", "off"]
ignore: |
.venv
node_modules

58
BSDmakefile Normal file
View file

@ -0,0 +1,58 @@
# GNU makefile proxy script for BSD make
#
# Written and maintained by Mahmoud Al-Qudsi <mqudsi@neosmart.net>
# Copyright NeoSmart Technologies <https://neosmart.net/> 2014-2019
# Obtain updates from <https://github.com/neosmart/gmake-proxy>
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice, this
# list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright notice,
# this list of conditions and the following disclaimer in the documentation
# and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS IS"
# AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE
# IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE
# DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR CONTRIBUTORS BE LIABLE
# FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL
# DAMAGES (INCLUDING, BUT NOT LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR
# SERVICES; LOSS OF USE, DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER
# CAUSED AND ON ANY THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY,
# OR TORT (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
JARG =
GMAKE = "gmake"
# When gmake is called from another make instance, -w is automatically added
# which causes extraneous messages about directory changes to be emitted.
# Running with --no-print-directory silences these messages.
GARGS = "--no-print-directory"
.if "$(.MAKE.JOBS)" != ""
JARG = -j$(.MAKE.JOBS)
.endif
# bmake prefers out-of-source builds and tries to cd into ./obj (among others)
# where possible. GNU Make doesn't, so override that value.
.OBJDIR: ./
# The GNU convention is to use the lowercased `prefix` variable/macro to
# specify the installation directory. Humor them.
GPREFIX =
.if defined(PREFIX) && ! defined(prefix)
GPREFIX = 'prefix = "$(PREFIX)"'
.endif
.BEGIN: .SILENT
which $(GMAKE) || (printf "Error: GNU Make is required!\n\n" 1>&2 && false)
.PHONY: FRC
$(.TARGETS): FRC
$(GMAKE) $(GPREFIX) $(GARGS) $(.TARGETS:S,.DONE,,) $(JARG)
.DONE .DEFAULT: .SILENT
$(GMAKE) $(GPREFIX) $(GARGS) $(.TARGETS:S,.DONE,,) $(JARG)

40
CODEOWNERS Normal file
View file

@ -0,0 +1,40 @@
# This file describes the expected reviewers for a PR based on the changed
# files. Unlike what the name of the file suggests they don't own the code, but
# merely have a good understanding of that area of the codebase and therefore
# are usually suited as a reviewer.
# Please mind the alphabetic order of reviewers.
# Files related to frontend development.
# Javascript and CSS code.
web_src/.* @caesar @crystal @gusted
# HTML templates used by the backend.
templates/.* @caesar @crystal @gusted
## the issue sidebar was touched by fnetx
templates/repo/issue/view_content/sidebar.* @fnetx
# Playwright tests
tests/e2e/.* @fnetx
# Files related to Go development.
# The modules usually don't require much knowledge about Forgejo and could
# be reviewed by Go developers.
modules/.* @gusted
# Models has code related to SQL queries, general database knowledge and XORM.
models/.* @gusted
# The routers directory contains the most amount code that requires a good grasp
# of how Forgejo comes together. It's tedious to write good integration testing
# for code that lives in here.
routers/.* @gusted
# Let new strings be checked by the translation team.
options/locale/locale_en-US.ini @0ko
# Personal interest
.*/webhook.* @oliverpool

7
CONTRIBUTING.md Normal file
View file

@ -0,0 +1,7 @@
# Forgejo Contributor Guide
The Forgejo project is run by a community of people who are expected to follow this guide when cooperating on a simple bug fix as well as when changing the governance. For more information about the project, take a look at [the documentation explaining what Forgejo provides](README.md).
Sensitive security-related issues should be reported to [security@forgejo.org](mailto:security@forgejo.org) using [encryption](https://keyoxide.org/security@forgejo.org).
You can find links to the different aspects of Developer documentation on this page: [Forgejo Contributor Guide](https://forgejo.org/docs/next/contributor/).

34
DCO Normal file
View file

@ -0,0 +1,34 @@
Developer Certificate of Origin
Version 1.1
Copyright (C) 2004, 2006 The Linux Foundation and its contributors.
Everyone is permitted to copy and distribute verbatim copies of this
license document, but changing it is not allowed.
Developer's Certificate of Origin 1.1
By making a contribution to this project, I certify that:
(a) The contribution was created in whole or in part by me and I
have the right to submit it under the open source license
indicated in the file; or
(b) The contribution is based upon previous work that, to the best
of my knowledge, is covered under an appropriate open source
license and I have the right under that license to submit that
work with modifications, whether created in whole or in part
by me, under the same open source license (unless I am
permitted to submit under a different license), as indicated
in the file; or
(c) The contribution was provided directly to me by some other
person who certified (a), (b) or (c) and I have not modified
it.
(d) I understand and agree that this project and the contribution
are public and that a record of the contribution (including all
personal information I submit with it, including my sign-off) is
maintained indefinitely and may be redistributed consistent with
this project or the open source license(s) involved.

108
Dockerfile Normal file
View file

@ -0,0 +1,108 @@
FROM --platform=$BUILDPLATFORM code.forgejo.org/oci/xx AS xx
FROM --platform=$BUILDPLATFORM code.forgejo.org/oci/golang:1.23-alpine3.20 as build-env
ARG GOPROXY
ENV GOPROXY=${GOPROXY:-direct}
ARG RELEASE_VERSION
ARG TAGS="sqlite sqlite_unlock_notify"
ENV TAGS="bindata timetzdata $TAGS"
ARG CGO_EXTRA_CFLAGS
#
# Transparently cross compile for the target platform
#
COPY --from=xx / /
ARG TARGETPLATFORM
RUN apk --no-cache add clang lld
RUN xx-apk --no-cache add gcc musl-dev
ENV CGO_ENABLED=1
RUN xx-go --wrap
#
# for go generate and binfmt to find
# without it the generate phase will fail with
# #19 25.04 modules/public/public_bindata.go:8: running "go": exit status 1
# #19 25.39 aarch64-binfmt-P: Could not open '/lib/ld-musl-aarch64.so.1': No such file or directory
# why exactly is it needed? where is binfmt involved?
#
RUN cp /*-alpine-linux-musl*/lib/ld-musl-*.so.1 /lib || true
RUN apk --no-cache add build-base git nodejs npm
COPY . ${GOPATH}/src/code.gitea.io/gitea
WORKDIR ${GOPATH}/src/code.gitea.io/gitea
RUN make clean
RUN make frontend
RUN go build contrib/environment-to-ini/environment-to-ini.go && xx-verify environment-to-ini
RUN LDFLAGS="-buildid=" make RELEASE_VERSION=$RELEASE_VERSION GOFLAGS="-trimpath" go-check generate-backend static-executable && xx-verify gitea
# Copy local files
COPY docker/root /tmp/local
# Set permissions
RUN chmod 755 /tmp/local/usr/bin/entrypoint \
/tmp/local/usr/local/bin/gitea \
/tmp/local/etc/s6/gitea/* \
/tmp/local/etc/s6/openssh/* \
/tmp/local/etc/s6/.s6-svscan/* \
/go/src/code.gitea.io/gitea/gitea \
/go/src/code.gitea.io/gitea/environment-to-ini
RUN chmod 644 /go/src/code.gitea.io/gitea/contrib/autocompletion/bash_autocomplete
FROM code.forgejo.org/oci/alpine:3.20
ARG RELEASE_VERSION
LABEL maintainer="contact@forgejo.org" \
org.opencontainers.image.authors="Forgejo" \
org.opencontainers.image.url="https://forgejo.org" \
org.opencontainers.image.documentation="https://forgejo.org/download/#container-image" \
org.opencontainers.image.source="https://codeberg.org/forgejo/forgejo" \
org.opencontainers.image.version="${RELEASE_VERSION}" \
org.opencontainers.image.vendor="Forgejo" \
org.opencontainers.image.licenses="GPL-3.0-or-later" \
org.opencontainers.image.title="Forgejo. Beyond coding. We forge." \
org.opencontainers.image.description="Forgejo is a self-hosted lightweight software forge. Easy to install and low maintenance, it just does the job."
EXPOSE 22 3000
RUN apk --no-cache add \
bash \
ca-certificates \
curl \
gettext \
git \
linux-pam \
openssh \
s6 \
sqlite \
su-exec \
gnupg \
&& rm -rf /var/cache/apk/*
RUN addgroup \
-S -g 1000 \
git && \
adduser \
-S -H -D \
-h /data/git \
-s /bin/bash \
-u 1000 \
-G git \
git && \
echo "git:*" | chpasswd -e
ENV USER=git
ENV GITEA_CUSTOM=/data/gitea
VOLUME ["/data"]
ENTRYPOINT ["/usr/bin/entrypoint"]
CMD ["/bin/s6-svscan", "/etc/s6"]
COPY --from=build-env /tmp/local /
RUN cd /usr/local/bin ; ln -s gitea forgejo
COPY --from=build-env /go/src/code.gitea.io/gitea/gitea /app/gitea/gitea
RUN ln -s /app/gitea/gitea /app/gitea/forgejo-cli
COPY --from=build-env /go/src/code.gitea.io/gitea/environment-to-ini /usr/local/bin/environment-to-ini
COPY --from=build-env /go/src/code.gitea.io/gitea/contrib/autocompletion/bash_autocomplete /etc/profile.d/gitea_bash_autocomplete.sh

114
Dockerfile.rootless Normal file
View file

@ -0,0 +1,114 @@
FROM --platform=$BUILDPLATFORM code.forgejo.org/oci/xx AS xx
FROM --platform=$BUILDPLATFORM code.forgejo.org/oci/golang:1.23-alpine3.20 as build-env
ARG GOPROXY
ENV GOPROXY=${GOPROXY:-direct}
ARG RELEASE_VERSION
ARG TAGS="sqlite sqlite_unlock_notify"
ENV TAGS="bindata timetzdata $TAGS"
ARG CGO_EXTRA_CFLAGS
#
# Transparently cross compile for the target platform
#
COPY --from=xx / /
ARG TARGETPLATFORM
RUN apk --no-cache add clang lld
RUN xx-apk --no-cache add gcc musl-dev
ENV CGO_ENABLED=1
RUN xx-go --wrap
#
# for go generate and binfmt to find
# without it the generate phase will fail with
# #19 25.04 modules/public/public_bindata.go:8: running "go": exit status 1
# #19 25.39 aarch64-binfmt-P: Could not open '/lib/ld-musl-aarch64.so.1': No such file or directory
# why exactly is it needed? where is binfmt involved?
#
RUN cp /*-alpine-linux-musl*/lib/ld-musl-*.so.1 /lib || true
RUN apk --no-cache add build-base git nodejs npm
COPY . ${GOPATH}/src/code.gitea.io/gitea
WORKDIR ${GOPATH}/src/code.gitea.io/gitea
RUN make clean
RUN make frontend
RUN go build contrib/environment-to-ini/environment-to-ini.go && xx-verify environment-to-ini
RUN make RELEASE_VERSION=$RELEASE_VERSION go-check generate-backend static-executable && xx-verify gitea
# Copy local files
COPY docker/rootless /tmp/local
# Set permissions
RUN chmod 755 /tmp/local/usr/local/bin/docker-entrypoint.sh \
/tmp/local/usr/local/bin/docker-setup.sh \
/tmp/local/usr/local/bin/gitea \
/go/src/code.gitea.io/gitea/gitea \
/go/src/code.gitea.io/gitea/environment-to-ini
RUN chmod 644 /go/src/code.gitea.io/gitea/contrib/autocompletion/bash_autocomplete
FROM code.forgejo.org/oci/alpine:3.20
LABEL maintainer="contact@forgejo.org" \
org.opencontainers.image.authors="Forgejo" \
org.opencontainers.image.url="https://forgejo.org" \
org.opencontainers.image.documentation="https://forgejo.org/download/#container-image" \
org.opencontainers.image.source="https://codeberg.org/forgejo/forgejo" \
org.opencontainers.image.version="${RELEASE_VERSION}" \
org.opencontainers.image.vendor="Forgejo" \
org.opencontainers.image.licenses="GPL-3.0-or-later" \
org.opencontainers.image.title="Forgejo. Beyond coding. We forge." \
org.opencontainers.image.description="Forgejo is a self-hosted lightweight software forge. Easy to install and low maintenance, it just does the job."
EXPOSE 2222 3000
RUN apk --no-cache add \
bash \
ca-certificates \
dumb-init \
gettext \
git \
curl \
gnupg \
&& rm -rf /var/cache/apk/*
RUN addgroup \
-S -g 1000 \
git && \
adduser \
-S -H -D \
-h /var/lib/gitea/git \
-s /bin/bash \
-u 1000 \
-G git \
git
RUN mkdir -p /var/lib/gitea /etc/gitea
RUN chown git:git /var/lib/gitea /etc/gitea
COPY --from=build-env /tmp/local /
RUN cd /usr/local/bin ; ln -s gitea forgejo
COPY --from=build-env --chown=root:root /go/src/code.gitea.io/gitea/gitea /app/gitea/gitea
RUN ln -s /app/gitea/gitea /app/gitea/forgejo-cli
COPY --from=build-env --chown=root:root /go/src/code.gitea.io/gitea/environment-to-ini /usr/local/bin/environment-to-ini
COPY --from=build-env /go/src/code.gitea.io/gitea/contrib/autocompletion/bash_autocomplete /etc/profile.d/gitea_bash_autocomplete.sh
#git:git
USER 1000:1000
ENV GITEA_WORK_DIR=/var/lib/gitea
ENV GITEA_CUSTOM=/var/lib/gitea/custom
ENV GITEA_TEMP=/tmp/gitea
ENV TMPDIR=/tmp/gitea
# Legacy config file for backwards compatibility
# TODO: remove on next major version release
ENV GITEA_APP_INI_LEGACY=/etc/gitea/app.ini
ENV GITEA_APP_INI=${GITEA_CUSTOM}/conf/app.ini
ENV HOME="/var/lib/gitea/git"
VOLUME ["/var/lib/gitea", "/etc/gitea"]
WORKDIR /var/lib/gitea
ENTRYPOINT ["/usr/bin/dumb-init", "--", "/usr/local/bin/docker-entrypoint.sh"]
CMD []

674
LICENSE Normal file
View file

@ -0,0 +1,674 @@
GNU GENERAL PUBLIC LICENSE
Version 3, 29 June 2007
Copyright (C) 2007 Free Software Foundation, Inc. <https://fsf.org/>
Everyone is permitted to copy and distribute verbatim copies
of this license document, but changing it is not allowed.
Preamble
The GNU General Public License is a free, copyleft license for
software and other kinds of works.
The licenses for most software and other practical works are designed
to take away your freedom to share and change the works. By contrast,
the GNU General Public License is intended to guarantee your freedom to
share and change all versions of a program--to make sure it remains free
software for all its users. We, the Free Software Foundation, use the
GNU General Public License for most of our software; it applies also to
any other work released this way by its authors. You can apply it to
your programs, too.
When we speak of free software, we are referring to freedom, not
price. Our General Public Licenses are designed to make sure that you
have the freedom to distribute copies of free software (and charge for
them if you wish), that you receive source code or can get it if you
want it, that you can change the software or use pieces of it in new
free programs, and that you know you can do these things.
To protect your rights, we need to prevent others from denying you
these rights or asking you to surrender the rights. Therefore, you have
certain responsibilities if you distribute copies of the software, or if
you modify it: responsibilities to respect the freedom of others.
For example, if you distribute copies of such a program, whether
gratis or for a fee, you must pass on to the recipients the same
freedoms that you received. You must make sure that they, too, receive
or can get the source code. And you must show them these terms so they
know their rights.
Developers that use the GNU GPL protect your rights with two steps:
(1) assert copyright on the software, and (2) offer you this License
giving you legal permission to copy, distribute and/or modify it.
For the developers' and authors' protection, the GPL clearly explains
that there is no warranty for this free software. For both users' and
authors' sake, the GPL requires that modified versions be marked as
changed, so that their problems will not be attributed erroneously to
authors of previous versions.
Some devices are designed to deny users access to install or run
modified versions of the software inside them, although the manufacturer
can do so. This is fundamentally incompatible with the aim of
protecting users' freedom to change the software. The systematic
pattern of such abuse occurs in the area of products for individuals to
use, which is precisely where it is most unacceptable. Therefore, we
have designed this version of the GPL to prohibit the practice for those
products. If such problems arise substantially in other domains, we
stand ready to extend this provision to those domains in future versions
of the GPL, as needed to protect the freedom of users.
Finally, every program is threatened constantly by software patents.
States should not allow patents to restrict development and use of
software on general-purpose computers, but in those that do, we wish to
avoid the special danger that patents applied to a free program could
make it effectively proprietary. To prevent this, the GPL assures that
patents cannot be used to render the program non-free.
The precise terms and conditions for copying, distribution and
modification follow.
TERMS AND CONDITIONS
0. Definitions.
"This License" refers to version 3 of the GNU General Public License.
"Copyright" also means copyright-like laws that apply to other kinds of
works, such as semiconductor masks.
"The Program" refers to any copyrightable work licensed under this
License. Each licensee is addressed as "you". "Licensees" and
"recipients" may be individuals or organizations.
To "modify" a work means to copy from or adapt all or part of the work
in a fashion requiring copyright permission, other than the making of an
exact copy. The resulting work is called a "modified version" of the
earlier work or a work "based on" the earlier work.
A "covered work" means either the unmodified Program or a work based
on the Program.
To "propagate" a work means to do anything with it that, without
permission, would make you directly or secondarily liable for
infringement under applicable copyright law, except executing it on a
computer or modifying a private copy. Propagation includes copying,
distribution (with or without modification), making available to the
public, and in some countries other activities as well.
To "convey" a work means any kind of propagation that enables other
parties to make or receive copies. Mere interaction with a user through
a computer network, with no transfer of a copy, is not conveying.
An interactive user interface displays "Appropriate Legal Notices"
to the extent that it includes a convenient and prominently visible
feature that (1) displays an appropriate copyright notice, and (2)
tells the user that there is no warranty for the work (except to the
extent that warranties are provided), that licensees may convey the
work under this License, and how to view a copy of this License. If
the interface presents a list of user commands or options, such as a
menu, a prominent item in the list meets this criterion.
1. Source Code.
The "source code" for a work means the preferred form of the work
for making modifications to it. "Object code" means any non-source
form of a work.
A "Standard Interface" means an interface that either is an official
standard defined by a recognized standards body, or, in the case of
interfaces specified for a particular programming language, one that
is widely used among developers working in that language.
The "System Libraries" of an executable work include anything, other
than the work as a whole, that (a) is included in the normal form of
packaging a Major Component, but which is not part of that Major
Component, and (b) serves only to enable use of the work with that
Major Component, or to implement a Standard Interface for which an
implementation is available to the public in source code form. A
"Major Component", in this context, means a major essential component
(kernel, window system, and so on) of the specific operating system
(if any) on which the executable work runs, or a compiler used to
produce the work, or an object code interpreter used to run it.
The "Corresponding Source" for a work in object code form means all
the source code needed to generate, install, and (for an executable
work) run the object code and to modify the work, including scripts to
control those activities. However, it does not include the work's
System Libraries, or general-purpose tools or generally available free
programs which are used unmodified in performing those activities but
which are not part of the work. For example, Corresponding Source
includes interface definition files associated with source files for
the work, and the source code for shared libraries and dynamically
linked subprograms that the work is specifically designed to require,
such as by intimate data communication or control flow between those
subprograms and other parts of the work.
The Corresponding Source need not include anything that users
can regenerate automatically from other parts of the Corresponding
Source.
The Corresponding Source for a work in source code form is that
same work.
2. Basic Permissions.
All rights granted under this License are granted for the term of
copyright on the Program, and are irrevocable provided the stated
conditions are met. This License explicitly affirms your unlimited
permission to run the unmodified Program. The output from running a
covered work is covered by this License only if the output, given its
content, constitutes a covered work. This License acknowledges your
rights of fair use or other equivalent, as provided by copyright law.
You may make, run and propagate covered works that you do not
convey, without conditions so long as your license otherwise remains
in force. You may convey covered works to others for the sole purpose
of having them make modifications exclusively for you, or provide you
with facilities for running those works, provided that you comply with
the terms of this License in conveying all material for which you do
not control copyright. Those thus making or running the covered works
for you must do so exclusively on your behalf, under your direction
and control, on terms that prohibit them from making any copies of
your copyrighted material outside their relationship with you.
Conveying under any other circumstances is permitted solely under
the conditions stated below. Sublicensing is not allowed; section 10
makes it unnecessary.
3. Protecting Users' Legal Rights From Anti-Circumvention Law.
No covered work shall be deemed part of an effective technological
measure under any applicable law fulfilling obligations under article
11 of the WIPO copyright treaty adopted on 20 December 1996, or
similar laws prohibiting or restricting circumvention of such
measures.
When you convey a covered work, you waive any legal power to forbid
circumvention of technological measures to the extent such circumvention
is effected by exercising rights under this License with respect to
the covered work, and you disclaim any intention to limit operation or
modification of the work as a means of enforcing, against the work's
users, your or third parties' legal rights to forbid circumvention of
technological measures.
4. Conveying Verbatim Copies.
You may convey verbatim copies of the Program's source code as you
receive it, in any medium, provided that you conspicuously and
appropriately publish on each copy an appropriate copyright notice;
keep intact all notices stating that this License and any
non-permissive terms added in accord with section 7 apply to the code;
keep intact all notices of the absence of any warranty; and give all
recipients a copy of this License along with the Program.
You may charge any price or no price for each copy that you convey,
and you may offer support or warranty protection for a fee.
5. Conveying Modified Source Versions.
You may convey a work based on the Program, or the modifications to
produce it from the Program, in the form of source code under the
terms of section 4, provided that you also meet all of these conditions:
a) The work must carry prominent notices stating that you modified
it, and giving a relevant date.
b) The work must carry prominent notices stating that it is
released under this License and any conditions added under section
7. This requirement modifies the requirement in section 4 to
"keep intact all notices".
c) You must license the entire work, as a whole, under this
License to anyone who comes into possession of a copy. This
License will therefore apply, along with any applicable section 7
additional terms, to the whole of the work, and all its parts,
regardless of how they are packaged. This License gives no
permission to license the work in any other way, but it does not
invalidate such permission if you have separately received it.
d) If the work has interactive user interfaces, each must display
Appropriate Legal Notices; however, if the Program has interactive
interfaces that do not display Appropriate Legal Notices, your
work need not make them do so.
A compilation of a covered work with other separate and independent
works, which are not by their nature extensions of the covered work,
and which are not combined with it such as to form a larger program,
in or on a volume of a storage or distribution medium, is called an
"aggregate" if the compilation and its resulting copyright are not
used to limit the access or legal rights of the compilation's users
beyond what the individual works permit. Inclusion of a covered work
in an aggregate does not cause this License to apply to the other
parts of the aggregate.
6. Conveying Non-Source Forms.
You may convey a covered work in object code form under the terms
of sections 4 and 5, provided that you also convey the
machine-readable Corresponding Source under the terms of this License,
in one of these ways:
a) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by the
Corresponding Source fixed on a durable physical medium
customarily used for software interchange.
b) Convey the object code in, or embodied in, a physical product
(including a physical distribution medium), accompanied by a
written offer, valid for at least three years and valid for as
long as you offer spare parts or customer support for that product
model, to give anyone who possesses the object code either (1) a
copy of the Corresponding Source for all the software in the
product that is covered by this License, on a durable physical
medium customarily used for software interchange, for a price no
more than your reasonable cost of physically performing this
conveying of source, or (2) access to copy the
Corresponding Source from a network server at no charge.
c) Convey individual copies of the object code with a copy of the
written offer to provide the Corresponding Source. This
alternative is allowed only occasionally and noncommercially, and
only if you received the object code with such an offer, in accord
with subsection 6b.
d) Convey the object code by offering access from a designated
place (gratis or for a charge), and offer equivalent access to the
Corresponding Source in the same way through the same place at no
further charge. You need not require recipients to copy the
Corresponding Source along with the object code. If the place to
copy the object code is a network server, the Corresponding Source
may be on a different server (operated by you or a third party)
that supports equivalent copying facilities, provided you maintain
clear directions next to the object code saying where to find the
Corresponding Source. Regardless of what server hosts the
Corresponding Source, you remain obligated to ensure that it is
available for as long as needed to satisfy these requirements.
e) Convey the object code using peer-to-peer transmission, provided
you inform other peers where the object code and Corresponding
Source of the work are being offered to the general public at no
charge under subsection 6d.
A separable portion of the object code, whose source code is excluded
from the Corresponding Source as a System Library, need not be
included in conveying the object code work.
A "User Product" is either (1) a "consumer product", which means any
tangible personal property which is normally used for personal, family,
or household purposes, or (2) anything designed or sold for incorporation
into a dwelling. In determining whether a product is a consumer product,
doubtful cases shall be resolved in favor of coverage. For a particular
product received by a particular user, "normally used" refers to a
typical or common use of that class of product, regardless of the status
of the particular user or of the way in which the particular user
actually uses, or expects or is expected to use, the product. A product
is a consumer product regardless of whether the product has substantial
commercial, industrial or non-consumer uses, unless such uses represent
the only significant mode of use of the product.
"Installation Information" for a User Product means any methods,
procedures, authorization keys, or other information required to install
and execute modified versions of a covered work in that User Product from
a modified version of its Corresponding Source. The information must
suffice to ensure that the continued functioning of the modified object
code is in no case prevented or interfered with solely because
modification has been made.
If you convey an object code work under this section in, or with, or
specifically for use in, a User Product, and the conveying occurs as
part of a transaction in which the right of possession and use of the
User Product is transferred to the recipient in perpetuity or for a
fixed term (regardless of how the transaction is characterized), the
Corresponding Source conveyed under this section must be accompanied
by the Installation Information. But this requirement does not apply
if neither you nor any third party retains the ability to install
modified object code on the User Product (for example, the work has
been installed in ROM).
The requirement to provide Installation Information does not include a
requirement to continue to provide support service, warranty, or updates
for a work that has been modified or installed by the recipient, or for
the User Product in which it has been modified or installed. Access to a
network may be denied when the modification itself materially and
adversely affects the operation of the network or violates the rules and
protocols for communication across the network.
Corresponding Source conveyed, and Installation Information provided,
in accord with this section must be in a format that is publicly
documented (and with an implementation available to the public in
source code form), and must require no special password or key for
unpacking, reading or copying.
7. Additional Terms.
"Additional permissions" are terms that supplement the terms of this
License by making exceptions from one or more of its conditions.
Additional permissions that are applicable to the entire Program shall
be treated as though they were included in this License, to the extent
that they are valid under applicable law. If additional permissions
apply only to part of the Program, that part may be used separately
under those permissions, but the entire Program remains governed by
this License without regard to the additional permissions.
When you convey a copy of a covered work, you may at your option
remove any additional permissions from that copy, or from any part of
it. (Additional permissions may be written to require their own
removal in certain cases when you modify the work.) You may place
additional permissions on material, added by you to a covered work,
for which you have or can give appropriate copyright permission.
Notwithstanding any other provision of this License, for material you
add to a covered work, you may (if authorized by the copyright holders of
that material) supplement the terms of this License with terms:
a) Disclaiming warranty or limiting liability differently from the
terms of sections 15 and 16 of this License; or
b) Requiring preservation of specified reasonable legal notices or
author attributions in that material or in the Appropriate Legal
Notices displayed by works containing it; or
c) Prohibiting misrepresentation of the origin of that material, or
requiring that modified versions of such material be marked in
reasonable ways as different from the original version; or
d) Limiting the use for publicity purposes of names of licensors or
authors of the material; or
e) Declining to grant rights under trademark law for use of some
trade names, trademarks, or service marks; or
f) Requiring indemnification of licensors and authors of that
material by anyone who conveys the material (or modified versions of
it) with contractual assumptions of liability to the recipient, for
any liability that these contractual assumptions directly impose on
those licensors and authors.
All other non-permissive additional terms are considered "further
restrictions" within the meaning of section 10. If the Program as you
received it, or any part of it, contains a notice stating that it is
governed by this License along with a term that is a further
restriction, you may remove that term. If a license document contains
a further restriction but permits relicensing or conveying under this
License, you may add to a covered work material governed by the terms
of that license document, provided that the further restriction does
not survive such relicensing or conveying.
If you add terms to a covered work in accord with this section, you
must place, in the relevant source files, a statement of the
additional terms that apply to those files, or a notice indicating
where to find the applicable terms.
Additional terms, permissive or non-permissive, may be stated in the
form of a separately written license, or stated as exceptions;
the above requirements apply either way.
8. Termination.
You may not propagate or modify a covered work except as expressly
provided under this License. Any attempt otherwise to propagate or
modify it is void, and will automatically terminate your rights under
this License (including any patent licenses granted under the third
paragraph of section 11).
However, if you cease all violation of this License, then your
license from a particular copyright holder is reinstated (a)
provisionally, unless and until the copyright holder explicitly and
finally terminates your license, and (b) permanently, if the copyright
holder fails to notify you of the violation by some reasonable means
prior to 60 days after the cessation.
Moreover, your license from a particular copyright holder is
reinstated permanently if the copyright holder notifies you of the
violation by some reasonable means, this is the first time you have
received notice of violation of this License (for any work) from that
copyright holder, and you cure the violation prior to 30 days after
your receipt of the notice.
Termination of your rights under this section does not terminate the
licenses of parties who have received copies or rights from you under
this License. If your rights have been terminated and not permanently
reinstated, you do not qualify to receive new licenses for the same
material under section 10.
9. Acceptance Not Required for Having Copies.
You are not required to accept this License in order to receive or
run a copy of the Program. Ancillary propagation of a covered work
occurring solely as a consequence of using peer-to-peer transmission
to receive a copy likewise does not require acceptance. However,
nothing other than this License grants you permission to propagate or
modify any covered work. These actions infringe copyright if you do
not accept this License. Therefore, by modifying or propagating a
covered work, you indicate your acceptance of this License to do so.
10. Automatic Licensing of Downstream Recipients.
Each time you convey a covered work, the recipient automatically
receives a license from the original licensors, to run, modify and
propagate that work, subject to this License. You are not responsible
for enforcing compliance by third parties with this License.
An "entity transaction" is a transaction transferring control of an
organization, or substantially all assets of one, or subdividing an
organization, or merging organizations. If propagation of a covered
work results from an entity transaction, each party to that
transaction who receives a copy of the work also receives whatever
licenses to the work the party's predecessor in interest had or could
give under the previous paragraph, plus a right to possession of the
Corresponding Source of the work from the predecessor in interest, if
the predecessor has it or can get it with reasonable efforts.
You may not impose any further restrictions on the exercise of the
rights granted or affirmed under this License. For example, you may
not impose a license fee, royalty, or other charge for exercise of
rights granted under this License, and you may not initiate litigation
(including a cross-claim or counterclaim in a lawsuit) alleging that
any patent claim is infringed by making, using, selling, offering for
sale, or importing the Program or any portion of it.
11. Patents.
A "contributor" is a copyright holder who authorizes use under this
License of the Program or a work on which the Program is based. The
work thus licensed is called the contributor's "contributor version".
A contributor's "essential patent claims" are all patent claims
owned or controlled by the contributor, whether already acquired or
hereafter acquired, that would be infringed by some manner, permitted
by this License, of making, using, or selling its contributor version,
but do not include claims that would be infringed only as a
consequence of further modification of the contributor version. For
purposes of this definition, "control" includes the right to grant
patent sublicenses in a manner consistent with the requirements of
this License.
Each contributor grants you a non-exclusive, worldwide, royalty-free
patent license under the contributor's essential patent claims, to
make, use, sell, offer for sale, import and otherwise run, modify and
propagate the contents of its contributor version.
In the following three paragraphs, a "patent license" is any express
agreement or commitment, however denominated, not to enforce a patent
(such as an express permission to practice a patent or covenant not to
sue for patent infringement). To "grant" such a patent license to a
party means to make such an agreement or commitment not to enforce a
patent against the party.
If you convey a covered work, knowingly relying on a patent license,
and the Corresponding Source of the work is not available for anyone
to copy, free of charge and under the terms of this License, through a
publicly available network server or other readily accessible means,
then you must either (1) cause the Corresponding Source to be so
available, or (2) arrange to deprive yourself of the benefit of the
patent license for this particular work, or (3) arrange, in a manner
consistent with the requirements of this License, to extend the patent
license to downstream recipients. "Knowingly relying" means you have
actual knowledge that, but for the patent license, your conveying the
covered work in a country, or your recipient's use of the covered work
in a country, would infringe one or more identifiable patents in that
country that you have reason to believe are valid.
If, pursuant to or in connection with a single transaction or
arrangement, you convey, or propagate by procuring conveyance of, a
covered work, and grant a patent license to some of the parties
receiving the covered work authorizing them to use, propagate, modify
or convey a specific copy of the covered work, then the patent license
you grant is automatically extended to all recipients of the covered
work and works based on it.
A patent license is "discriminatory" if it does not include within
the scope of its coverage, prohibits the exercise of, or is
conditioned on the non-exercise of one or more of the rights that are
specifically granted under this License. You may not convey a covered
work if you are a party to an arrangement with a third party that is
in the business of distributing software, under which you make payment
to the third party based on the extent of your activity of conveying
the work, and under which the third party grants, to any of the
parties who would receive the covered work from you, a discriminatory
patent license (a) in connection with copies of the covered work
conveyed by you (or copies made from those copies), or (b) primarily
for and in connection with specific products or compilations that
contain the covered work, unless you entered into that arrangement,
or that patent license was granted, prior to 28 March 2007.
Nothing in this License shall be construed as excluding or limiting
any implied license or other defenses to infringement that may
otherwise be available to you under applicable patent law.
12. No Surrender of Others' Freedom.
If conditions are imposed on you (whether by court order, agreement or
otherwise) that contradict the conditions of this License, they do not
excuse you from the conditions of this License. If you cannot convey a
covered work so as to satisfy simultaneously your obligations under this
License and any other pertinent obligations, then as a consequence you may
not convey it at all. For example, if you agree to terms that obligate you
to collect a royalty for further conveying from those to whom you convey
the Program, the only way you could satisfy both those terms and this
License would be to refrain entirely from conveying the Program.
13. Use with the GNU Affero General Public License.
Notwithstanding any other provision of this License, you have
permission to link or combine any covered work with a work licensed
under version 3 of the GNU Affero General Public License into a single
combined work, and to convey the resulting work. The terms of this
License will continue to apply to the part which is the covered work,
but the special requirements of the GNU Affero General Public License,
section 13, concerning interaction through a network will apply to the
combination as such.
14. Revised Versions of this License.
The Free Software Foundation may publish revised and/or new versions of
the GNU General Public License from time to time. Such new versions will
be similar in spirit to the present version, but may differ in detail to
address new problems or concerns.
Each version is given a distinguishing version number. If the
Program specifies that a certain numbered version of the GNU General
Public License "or any later version" applies to it, you have the
option of following the terms and conditions either of that numbered
version or of any later version published by the Free Software
Foundation. If the Program does not specify a version number of the
GNU General Public License, you may choose any version ever published
by the Free Software Foundation.
If the Program specifies that a proxy can decide which future
versions of the GNU General Public License can be used, that proxy's
public statement of acceptance of a version permanently authorizes you
to choose that version for the Program.
Later license versions may give you additional or different
permissions. However, no additional obligations are imposed on any
author or copyright holder as a result of your choosing to follow a
later version.
15. Disclaimer of Warranty.
THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY
APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT
HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY
OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO,
THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM
IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF
ALL NECESSARY SERVICING, REPAIR OR CORRECTION.
16. Limitation of Liability.
IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING
WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MODIFIES AND/OR CONVEYS
THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY
GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE
USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF
DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD
PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS),
EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF
SUCH DAMAGES.
17. Interpretation of Sections 15 and 16.
If the disclaimer of warranty and limitation of liability provided
above cannot be given local legal effect according to their terms,
reviewing courts shall apply local law that most closely approximates
an absolute waiver of all civil liability in connection with the
Program, unless a warranty or assumption of liability accompanies a
copy of the Program in return for a fee.
END OF TERMS AND CONDITIONS
How to Apply These Terms to Your New Programs
If you develop a new program, and you want it to be of the greatest
possible use to the public, the best way to achieve this is to make it
free software which everyone can redistribute and change under these terms.
To do so, attach the following notices to the program. It is safest
to attach them to the start of each source file to most effectively
state the exclusion of warranty; and each file should have at least
the "copyright" line and a pointer to where the full notice is found.
<one line to give the program's name and a brief idea of what it does.>
Copyright (C) <year> <name of author>
This program is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
This program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program. If not, see <https://www.gnu.org/licenses/>.
Also add information on how to contact you by electronic and paper mail.
If the program does terminal interaction, make it output a short
notice like this when it starts in an interactive mode:
<program> Copyright (C) <year> <name of author>
This program comes with ABSOLUTELY NO WARRANTY; for details type `show w'.
This is free software, and you are welcome to redistribute it
under certain conditions; type `show c' for details.
The hypothetical commands `show w' and `show c' should show the appropriate
parts of the General Public License. Of course, your program's commands
might be different; for a GUI interface, you would use an "about box".
You should also get your employer (if you work as a programmer) or school,
if any, to sign a "copyright disclaimer" for the program, if necessary.
For more information on this, and how to apply and follow the GNU GPL, see
<https://www.gnu.org/licenses/>.
The GNU General Public License does not permit incorporating your program
into proprietary programs. If your program is a subroutine library, you
may consider it more useful to permit linking proprietary applications with
the library. If this is what you want to do, use the GNU Lesser General
Public License instead of this License. But first, please read
<https://www.gnu.org/licenses/why-not-lgpl.html>.

1051
Makefile Normal file

File diff suppressed because it is too large Load diff

51
README.md Normal file
View file

@ -0,0 +1,51 @@
<div align="center">
<img src="./assets/logo.svg" alt="" width="192" align="center" />
<h1 align="center">Welcome to Forgejo</h1>
</div>
Hi there! Tired of big platforms playing monopoly?
Providing Git hosting for your project, friends, company or community?
**Forgejo** (/for'd&#865;ʒe.jo/ inspired by forĝejo the Esperanto word for *forge*) has you covered with its intuitive interface,
light and easy hosting and a lot of builtin functionality.
Forgejo was [created in 2022](https://forgejo.org/2022-12-15-hello-forgejo/)
because we think that the project should be owned by an independent community.
If you second that, then Forgejo is for you!
Our promise: **Independent Free/Libre Software forever!**
## What does Forgejo offer?
<!-- If you want to know what Forgejo is like,
you can check out public instances,
e.g. [Codeberg.org](https://codeberg.org).
-->
If you like any of the following, Forgejo is literally meant for you:
- Lightweight: Forgejo can easily be hosted on nearly **every machine**.
Running on a Raspberry? Small cloud instance? No problem!
- Project management: Besides Git hosting, Forgejo offers issues,
pull requests, wikis, kanban boards and much more to **coordinate with your team**.
- Publishing: Have something to share? Use **releases** to host your software for download,
or use the **package registry** to publish it for docker, npm and many other package managers.
- Customizable: Want to change your look? Change some settings?
There are many **config switches** to make Forgejo work exactly like you want.
- Powerful: Organizations & team permissions, CI integration, Code Search, LDAP, OAuth and much more.
If you have **advanced needs**, Forgejo has you covered.
- Privacy: From update checker to default settings: Forgejo is built to be **privacy first** for you and your crew.
- Federation: (WIP) We are actively working to connect software forges with each other through **ActivityPub**,
and create a collaborative network of personal instances.
## Learn more
Dive into the [documentation](https://forgejo.org/docs/latest/), subscribe to releases and blog post on [our website](https://forgejo.org), <a href="https://floss.social/@forgejo" rel="me">find us on the Fediverse</a> or hop into [our Matrix room](https://matrix.to/#/#forgejo-chat:matrix.org) if you have any questions or want to get involved.
## License
Forgejo is distributed under the terms of the [GPL version 3.0](LICENSE) or any later version.
The agreement for this license [was documented in June 2023](https://codeberg.org/forgejo/governance/pulls/24) and implemented during the development of Forgejo v9.0. All Forgejo versions before v9.0 are distributed under the MIT license.
## Get involved
If you are interested in making Forgejo better, either by reporting a bug or by changing the governance, please [take a look at the contribution guide](CONTRIBUTING.md).

2678
RELEASE-NOTES.md Normal file

File diff suppressed because it is too large Load diff

1
assets/emoji.json generated Normal file

File diff suppressed because one or more lines are too long

27
assets/favicon.svg Normal file
View file

@ -0,0 +1,27 @@
<svg viewBox="0 0 212 212" xmlns="http://www.w3.org/2000/svg">
<style type="text/css">
circle {
fill: none;
stroke: #000;
stroke-width: 15;
}
path {
fill: none;
stroke: #000;
stroke-width: 25;
}
.orange {
stroke:#ff6600;
}
.red {
stroke:#d40000;
}
</style>
<g transform="translate(6,6)">
<path d="M58 168 v-98 a50 50 0 0 1 50-50 h20" class="orange" />
<path d="M58 168 v-30 a50 50 0 0 1 50-50 h20" class="red" />
<circle cx="142" cy="20" r="18" class="orange" />
<circle cx="142" cy="88" r="18" class="red" />
<circle cx="58" cy="180" r="18" class="red" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 677 B

1102
assets/go-licenses.json generated Normal file

File diff suppressed because one or more lines are too long

27
assets/logo.svg Normal file
View file

@ -0,0 +1,27 @@
<svg viewBox="0 0 212 212" xmlns="http://www.w3.org/2000/svg">
<style type="text/css">
circle {
fill: none;
stroke: #000;
stroke-width: 15;
}
path {
fill: none;
stroke: #000;
stroke-width: 25;
}
.orange {
stroke:#ff6600;
}
.red {
stroke:#d40000;
}
</style>
<g transform="translate(6,6)">
<path d="M58 168 v-98 a50 50 0 0 1 50-50 h20" class="orange" />
<path d="M58 168 v-30 a50 50 0 0 1 50-50 h20" class="red" />
<circle cx="142" cy="20" r="18" class="orange" />
<circle cx="142" cy="88" r="18" class="red" />
<circle cx="58" cy="180" r="18" class="red" />
</g>
</svg>

After

Width:  |  Height:  |  Size: 677 B

14
build.go Normal file
View file

@ -0,0 +1,14 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
//go:build vendor
package main
// Libraries that are included to vendor utilities used during build.
// These libraries will not be included in a normal compilation.
import (
// for embed
_ "github.com/shurcooL/vfsgen"
)

115
build/backport-locales.go Normal file
View file

@ -0,0 +1,115 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
//go:build ignore
package main
import (
"fmt"
"os"
"os/exec"
"path/filepath"
"strings"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/setting"
)
func main() {
if len(os.Args) != 2 {
fmt.Println("usage: backport-locales <to-ref>")
fmt.Println("eg: backport-locales release/v1.19")
os.Exit(1)
}
mustNoErr := func(err error) {
if err != nil {
panic(err)
}
}
collectInis := func(ref string) map[string]setting.ConfigProvider {
inis := map[string]setting.ConfigProvider{}
err := filepath.WalkDir("options/locale", func(path string, d os.DirEntry, err error) error {
if err != nil {
return err
}
if d.IsDir() || !strings.HasSuffix(d.Name(), ".ini") {
return nil
}
cfg, err := setting.NewConfigProviderForLocale(path)
mustNoErr(err)
inis[path] = cfg
fmt.Printf("collecting: %s @ %s\n", path, ref)
return nil
})
mustNoErr(err)
return inis
}
// collect new locales from current working directory
inisNew := collectInis("HEAD")
// switch to the target ref, and collect the old locales
cmd := exec.Command("git", "checkout", os.Args[1])
cmd.Stdout = os.Stdout
cmd.Stderr = os.Stderr
mustNoErr(cmd.Run())
inisOld := collectInis(os.Args[1])
// use old en-US as the base, and copy the new translations to the old locales
enUsOld := inisOld["options/locale/locale_en-US.ini"]
brokenWarned := make(container.Set[string])
for path, iniOld := range inisOld {
if iniOld == enUsOld {
continue
}
iniNew := inisNew[path]
if iniNew == nil {
continue
}
for _, secEnUS := range enUsOld.Sections() {
secOld := iniOld.Section(secEnUS.Name())
secNew := iniNew.Section(secEnUS.Name())
for _, keyEnUs := range secEnUS.Keys() {
if secNew.HasKey(keyEnUs.Name()) {
oldStr := secOld.Key(keyEnUs.Name()).String()
newStr := secNew.Key(keyEnUs.Name()).String()
broken := oldStr != "" && strings.Count(oldStr, "%") != strings.Count(newStr, "%")
broken = broken || strings.Contains(oldStr, "\n") || strings.Contains(oldStr, "\n")
if broken {
brokenWarned.Add(secOld.Name() + "." + keyEnUs.Name())
fmt.Println("----")
fmt.Printf("WARNING: skip broken locale: %s , [%s] %s\n", path, secEnUS.Name(), keyEnUs.Name())
fmt.Printf("\told: %s\n", strings.ReplaceAll(oldStr, "\n", "\\n"))
fmt.Printf("\tnew: %s\n", strings.ReplaceAll(newStr, "\n", "\\n"))
continue
}
secOld.Key(keyEnUs.Name()).SetValue(newStr)
}
}
}
mustNoErr(iniOld.SaveTo(path))
}
fmt.Println("========")
for path, iniNew := range inisNew {
for _, sec := range iniNew.Sections() {
for _, key := range sec.Keys() {
str := sec.Key(key.Name()).String()
broken := strings.Contains(str, "\n")
broken = broken || strings.HasPrefix(str, "`") != strings.HasSuffix(str, "`")
broken = broken || strings.HasPrefix(str, "\"`")
broken = broken || strings.HasPrefix(str, "`\"")
broken = broken || strings.Count(str, `"`)%2 == 1
broken = broken || strings.Count(str, "`")%2 == 1
if broken && !brokenWarned.Contains(sec.Name()+"."+key.Name()) {
fmt.Printf("WARNING: found broken locale: %s , [%s] %s\n", path, sec.Name(), key.Name())
fmt.Printf("\tstr: %s\n", strings.ReplaceAll(str, "\n", "\\n"))
fmt.Println("----")
}
}
}
}
}

281
build/code-batch-process.go Normal file
View file

@ -0,0 +1,281 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
//go:build ignore
package main
import (
"fmt"
"log"
"os"
"os/exec"
"path/filepath"
"regexp"
"strconv"
"strings"
"code.gitea.io/gitea/build/codeformat"
)
// Windows has a limitation for command line arguments, the size can not exceed 32KB.
// So we have to feed the files to some tools (like gofmt) batch by batch
// We also introduce a `gitea-fmt` command, it does better import formatting than gofmt/goimports. `gitea-fmt` calls `gofmt` internally.
var optionLogVerbose bool
func logVerbose(msg string, args ...any) {
if optionLogVerbose {
log.Printf(msg, args...)
}
}
func passThroughCmd(cmd string, args []string) error {
foundCmd, err := exec.LookPath(cmd)
if err != nil {
log.Fatalf("can not find cmd: %s", cmd)
}
c := exec.Cmd{
Path: foundCmd,
Args: append([]string{cmd}, args...),
Stdin: os.Stdin,
Stdout: os.Stdout,
Stderr: os.Stderr,
}
return c.Run()
}
type fileCollector struct {
dirs []string
includePatterns []*regexp.Regexp
excludePatterns []*regexp.Regexp
batchSize int
}
func newFileCollector(fileFilter string, batchSize int) (*fileCollector, error) {
co := &fileCollector{batchSize: batchSize}
if fileFilter == "go-own" {
co.dirs = []string{
"build",
"cmd",
"contrib",
"tests",
"models",
"modules",
"routers",
"services",
}
co.includePatterns = append(co.includePatterns, regexp.MustCompile(`.*\.go$`))
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`.*\bbindata\.go$`))
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`\.pb\.go$`))
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`tests/gitea-repositories-meta`))
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`tests/integration/migration-test`))
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`modules/git/tests`))
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`models/fixtures`))
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`models/migrations/fixtures`))
co.excludePatterns = append(co.excludePatterns, regexp.MustCompile(`services/gitdiff/testdata`))
}
if co.dirs == nil {
return nil, fmt.Errorf("unknown file-filter: %s", fileFilter)
}
return co, nil
}
func (fc *fileCollector) matchPatterns(path string, regexps []*regexp.Regexp) bool {
path = strings.ReplaceAll(path, "\\", "/")
for _, re := range regexps {
if re.MatchString(path) {
return true
}
}
return false
}
func (fc *fileCollector) collectFiles() (res [][]string, err error) {
var batch []string
for _, dir := range fc.dirs {
err = filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) error {
include := len(fc.includePatterns) == 0 || fc.matchPatterns(path, fc.includePatterns)
exclude := fc.matchPatterns(path, fc.excludePatterns)
process := include && !exclude
if !process {
if d.IsDir() {
if exclude {
logVerbose("exclude dir %s", path)
return filepath.SkipDir
}
// for a directory, if it is not excluded explicitly, we should walk into
return nil
}
// for a file, we skip it if it shouldn't be processed
logVerbose("skip process %s", path)
return nil
}
if d.IsDir() {
// skip dir, we don't add dirs to the file list now
return nil
}
if len(batch) >= fc.batchSize {
res = append(res, batch)
batch = nil
}
batch = append(batch, path)
return nil
})
if err != nil {
return nil, err
}
}
res = append(res, batch)
return res, nil
}
// substArgFiles expands the {file-list} to a real file list for commands
func substArgFiles(args, files []string) []string {
for i, s := range args {
if s == "{file-list}" {
newArgs := append(args[:i], files...)
newArgs = append(newArgs, args[i+1:]...)
return newArgs
}
}
return args
}
func exitWithCmdErrors(subCmd string, subArgs []string, cmdErrors []error) {
for _, err := range cmdErrors {
if err != nil {
if exitError, ok := err.(*exec.ExitError); ok {
exitCode := exitError.ExitCode()
log.Printf("run command failed (code=%d): %s %v", exitCode, subCmd, subArgs)
os.Exit(exitCode)
} else {
log.Fatalf("run command failed (err=%s) %s %v", err, subCmd, subArgs)
}
}
}
}
func parseArgs() (mainOptions map[string]string, subCmd string, subArgs []string) {
mainOptions = map[string]string{}
for i := 1; i < len(os.Args); i++ {
arg := os.Args[i]
if arg == "" {
break
}
if arg[0] == '-' {
arg = strings.TrimPrefix(arg, "-")
arg = strings.TrimPrefix(arg, "-")
fields := strings.SplitN(arg, "=", 2)
if len(fields) == 1 {
mainOptions[fields[0]] = "1"
} else {
mainOptions[fields[0]] = fields[1]
}
} else {
subCmd = arg
subArgs = os.Args[i+1:]
break
}
}
return
}
func showUsage() {
fmt.Printf(`Usage: %[1]s [options] {command} [arguments]
Options:
--verbose
--file-filter=go-own
--batch-size=100
Commands:
%[1]s gofmt ...
Arguments:
{file-list} the file list
Example:
%[1]s gofmt -s -d {file-list}
`, "file-batch-exec")
}
func newFileCollectorFromMainOptions(mainOptions map[string]string) (fc *fileCollector, err error) {
fileFilter := mainOptions["file-filter"]
if fileFilter == "" {
fileFilter = "go-own"
}
batchSize, _ := strconv.Atoi(mainOptions["batch-size"])
if batchSize == 0 {
batchSize = 100
}
return newFileCollector(fileFilter, batchSize)
}
func containsString(a []string, s string) bool {
for _, v := range a {
if v == s {
return true
}
}
return false
}
func giteaFormatGoImports(files []string, doWriteFile bool) error {
for _, file := range files {
if err := codeformat.FormatGoImports(file, doWriteFile); err != nil {
log.Printf("failed to format go imports: %s, err=%v", file, err)
return err
}
}
return nil
}
func main() {
mainOptions, subCmd, subArgs := parseArgs()
if subCmd == "" {
showUsage()
os.Exit(1)
}
optionLogVerbose = mainOptions["verbose"] != ""
fc, err := newFileCollectorFromMainOptions(mainOptions)
if err != nil {
log.Fatalf("can not create file collector: %s", err.Error())
}
fileBatches, err := fc.collectFiles()
if err != nil {
log.Fatalf("can not collect files: %s", err.Error())
}
processed := 0
var cmdErrors []error
for _, files := range fileBatches {
if len(files) == 0 {
break
}
substArgs := substArgFiles(subArgs, files)
logVerbose("batch cmd: %s %v", subCmd, substArgs)
switch subCmd {
case "gitea-fmt":
if containsString(subArgs, "-d") {
log.Print("the -d option is not supported by gitea-fmt")
}
cmdErrors = append(cmdErrors, giteaFormatGoImports(files, containsString(subArgs, "-w")))
cmdErrors = append(cmdErrors, passThroughCmd("gofmt", append([]string{"-w", "-r", "interface{} -> any"}, substArgs...)))
cmdErrors = append(cmdErrors, passThroughCmd("go", append([]string{"run", os.Getenv("GOFUMPT_PACKAGE"), "-extra"}, substArgs...)))
default:
log.Fatalf("unknown cmd: %s %v", subCmd, subArgs)
}
processed += len(files)
}
logVerbose("processed %d files", processed)
exitWithCmdErrors(subCmd, subArgs, cmdErrors)
}

View file

@ -0,0 +1,195 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package codeformat
import (
"bytes"
"errors"
"io"
"os"
"sort"
"strings"
)
var importPackageGroupOrders = map[string]int{
"": 1, // internal
"code.gitea.io/gitea/": 2,
}
var errInvalidCommentBetweenImports = errors.New("comments between imported packages are invalid, please move comments to the end of the package line")
var (
importBlockBegin = []byte("\nimport (\n")
importBlockEnd = []byte("\n)")
)
type importLineParsed struct {
group string
pkg string
content string
}
func parseImportLine(line string) (*importLineParsed, error) {
il := &importLineParsed{content: line}
p1 := strings.IndexRune(line, '"')
if p1 == -1 {
return nil, errors.New("invalid import line: " + line)
}
p1++
p := strings.IndexRune(line[p1:], '"')
if p == -1 {
return nil, errors.New("invalid import line: " + line)
}
p2 := p1 + p
il.pkg = line[p1:p2]
pDot := strings.IndexRune(il.pkg, '.')
pSlash := strings.IndexRune(il.pkg, '/')
if pDot != -1 && pDot < pSlash {
il.group = "domain-package"
}
for groupName := range importPackageGroupOrders {
if groupName == "" {
continue // skip internal
}
if strings.HasPrefix(il.pkg, groupName) {
il.group = groupName
}
}
return il, nil
}
type (
importLineGroup []*importLineParsed
importLineGroupMap map[string]importLineGroup
)
func formatGoImports(contentBytes []byte) ([]byte, error) {
p1 := bytes.Index(contentBytes, importBlockBegin)
if p1 == -1 {
return nil, nil
}
p1 += len(importBlockBegin)
p := bytes.Index(contentBytes[p1:], importBlockEnd)
if p == -1 {
return nil, nil
}
p2 := p1 + p
importGroups := importLineGroupMap{}
r := bytes.NewBuffer(contentBytes[p1:p2])
eof := false
for !eof {
line, err := r.ReadString('\n')
eof = err == io.EOF
if err != nil && !eof {
return nil, err
}
line = strings.TrimSpace(line)
if line != "" {
if strings.HasPrefix(line, "//") || strings.HasPrefix(line, "/*") {
return nil, errInvalidCommentBetweenImports
}
importLine, err := parseImportLine(line)
if err != nil {
return nil, err
}
importGroups[importLine.group] = append(importGroups[importLine.group], importLine)
}
}
var groupNames []string
for groupName, importLines := range importGroups {
groupNames = append(groupNames, groupName)
sort.Slice(importLines, func(i, j int) bool {
return strings.Compare(importLines[i].pkg, importLines[j].pkg) < 0
})
}
sort.Slice(groupNames, func(i, j int) bool {
n1 := groupNames[i]
n2 := groupNames[j]
o1 := importPackageGroupOrders[n1]
o2 := importPackageGroupOrders[n2]
if o1 != 0 && o2 != 0 {
return o1 < o2
}
if o1 == 0 && o2 == 0 {
return strings.Compare(n1, n2) < 0
}
return o1 != 0
})
formattedBlock := bytes.Buffer{}
for _, groupName := range groupNames {
hasNormalImports := false
hasDummyImports := false
// non-dummy import comes first
for _, importLine := range importGroups[groupName] {
if strings.HasPrefix(importLine.content, "_") {
hasDummyImports = true
} else {
formattedBlock.WriteString("\t" + importLine.content + "\n")
hasNormalImports = true
}
}
// dummy (_ "pkg") comes later
if hasDummyImports {
if hasNormalImports {
formattedBlock.WriteString("\n")
}
for _, importLine := range importGroups[groupName] {
if strings.HasPrefix(importLine.content, "_") {
formattedBlock.WriteString("\t" + importLine.content + "\n")
}
}
}
formattedBlock.WriteString("\n")
}
formattedBlockBytes := bytes.TrimRight(formattedBlock.Bytes(), "\n")
var formattedBytes []byte
formattedBytes = append(formattedBytes, contentBytes[:p1]...)
formattedBytes = append(formattedBytes, formattedBlockBytes...)
formattedBytes = append(formattedBytes, contentBytes[p2:]...)
return formattedBytes, nil
}
// FormatGoImports format the imports by our rules (see unit tests)
func FormatGoImports(file string, doWriteFile bool) error {
f, err := os.Open(file)
if err != nil {
return err
}
var contentBytes []byte
{
defer f.Close()
contentBytes, err = io.ReadAll(f)
if err != nil {
return err
}
}
formattedBytes, err := formatGoImports(contentBytes)
if err != nil {
return err
}
if formattedBytes == nil {
return nil
}
if bytes.Equal(contentBytes, formattedBytes) {
return nil
}
if doWriteFile {
f, err = os.OpenFile(file, os.O_TRUNC|os.O_WRONLY, 0o644)
if err != nil {
return err
}
defer f.Close()
_, err = f.Write(formattedBytes)
return err
}
return err
}

View file

@ -0,0 +1,125 @@
// Copyright 2021 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package codeformat
import (
"testing"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
)
func TestFormatImportsSimple(t *testing.T) {
formatted, err := formatGoImports([]byte(`
package codeformat
import (
"github.com/stretchr/testify/assert"
"testing"
)
`))
expected := `
package codeformat
import (
"testing"
"github.com/stretchr/testify/assert"
)
`
require.NoError(t, err)
assert.Equal(t, expected, string(formatted))
}
func TestFormatImportsGroup(t *testing.T) {
// gofmt/goimports won't group the packages, for example, they produce such code:
// "bytes"
// "image"
// (a blank line)
// "fmt"
// "image/color/palette"
// our formatter does better, and these packages are grouped into one.
formatted, err := formatGoImports([]byte(`
package test
import (
"bytes"
"fmt"
"image"
"image/color"
_ "image/gif" // for processing gif images
_ "image/jpeg" // for processing jpeg images
_ "image/png" // for processing png images
"code.gitea.io/other/package"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
"xorm.io/the/package"
"github.com/issue9/identicon"
"github.com/nfnt/resize"
"github.com/oliamb/cutter"
)
`))
expected := `
package test
import (
"bytes"
"fmt"
"image"
"image/color"
_ "image/gif" // for processing gif images
_ "image/jpeg" // for processing jpeg images
_ "image/png" // for processing png images
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/other/package"
"github.com/issue9/identicon"
"github.com/nfnt/resize"
"github.com/oliamb/cutter"
"xorm.io/the/package"
)
`
require.NoError(t, err)
assert.Equal(t, expected, string(formatted))
}
func TestFormatImportsInvalidComment(t *testing.T) {
// why we shouldn't write comments between imports: it breaks the grouping of imports
// for example:
// "pkg1"
// "pkg2"
// // a comment
// "pkgA"
// "pkgB"
// the comment splits the packages into two groups, pkg1/2 are sorted separately, pkgA/B are sorted separately
// we don't want such code, so the code should be:
// "pkg1"
// "pkg2"
// "pkgA" // a comment
// "pkgB"
_, err := formatGoImports([]byte(`
package test
import (
"image/jpeg"
// for processing gif images
"image/gif"
)
`))
require.ErrorIs(t, err, errInvalidCommentBetweenImports)
}

92
build/generate-bindata.go Normal file
View file

@ -0,0 +1,92 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
//go:build ignore
package main
import (
"bytes"
"crypto/sha1"
"fmt"
"log"
"net/http"
"os"
"path/filepath"
"strconv"
"github.com/shurcooL/vfsgen"
)
func needsUpdate(dir, filename string) (bool, []byte) {
needRegen := false
_, err := os.Stat(filename)
if err != nil {
needRegen = true
}
oldHash, err := os.ReadFile(filename + ".hash")
if err != nil {
oldHash = []byte{}
}
hasher := sha1.New()
err = filepath.WalkDir(dir, func(path string, d os.DirEntry, err error) error {
if err != nil {
return err
}
info, err := d.Info()
if err != nil {
return err
}
_, _ = hasher.Write([]byte(d.Name()))
_, _ = hasher.Write([]byte(info.ModTime().String()))
_, _ = hasher.Write([]byte(strconv.FormatInt(info.Size(), 16)))
return nil
})
if err != nil {
return true, oldHash
}
newHash := hasher.Sum([]byte{})
if bytes.Compare(oldHash, newHash) != 0 {
return true, newHash
}
return needRegen, newHash
}
func main() {
if len(os.Args) < 4 {
log.Fatal("Insufficient number of arguments. Need: directory packageName filename")
}
dir, packageName, filename := os.Args[1], os.Args[2], os.Args[3]
var useGlobalModTime bool
if len(os.Args) == 5 {
useGlobalModTime, _ = strconv.ParseBool(os.Args[4])
}
update, newHash := needsUpdate(dir, filename)
if !update {
fmt.Printf("bindata for %s already up-to-date\n", packageName)
return
}
fmt.Printf("generating bindata for %s\n", packageName)
var fsTemplates http.FileSystem = http.Dir(dir)
err := vfsgen.Generate(fsTemplates, vfsgen.Options{
PackageName: packageName,
BuildTags: "bindata",
VariableName: "Assets",
Filename: filename,
UseGlobalModTime: useGlobalModTime,
})
if err != nil {
log.Fatalf("%v\n", err)
}
_ = os.WriteFile(filename+".hash", newHash, 0o666)
}

View file

@ -0,0 +1,203 @@
// Copyright 2024 James Hatfield
// SPDX-License-Identifier: MIT
//go:build ignore
package main
import (
"bufio"
"bytes"
"crypto"
"flag"
"fmt"
"go/format"
"io"
"log"
"net/http"
"os"
"regexp"
"strings"
)
const disposableEmailListURL string = "https://raw.githubusercontent.com/disposable-email-domains/disposable-email-domains/%s/disposable_email_blocklist.conf"
var (
gitRef *string = flag.String("r", "master", "Git reference of the domain list version")
outPat *string = flag.String("o", "modules/setting/disposable_email_domain_data.go", "Output path")
check *bool = flag.Bool("check", false, "Check if the current output file matches the current upstream list")
)
func main() {
flag.Parse()
if *check {
// read in the local copy of the domain list
local, err := get_local_file()
if err != nil {
log.Fatalf("File Read Error: %v", err)
}
// generate the remote copy of the domain list
remote, err := generate()
if err != nil {
log.Fatalf("Generation Error: %v", err)
}
// strip the comments from both (so we dont fail simply due to git ref difference)
local = strip_comments(local)
remote = strip_comments(remote)
// generate the hash of the local copy
local_sha, err := hash(local)
if err != nil {
log.Fatalf("Local Hash Generation Error: %v", err)
}
// generate the hash of the remote copy
remote_sha, err := hash(remote)
if err != nil {
log.Fatalf("Remote Hash Generation Error: %v", err)
}
// if the hashes dont match then the local copy needs to be updated
if local_sha != remote_sha {
log.Fatalf("Disposable email domain list needs to be updated!! \"make lint-disposable-emails-fix\"")
}
} else {
// generate the source code (array of domains)
res, err := generate()
if err != nil {
log.Fatalf("Generation Error: %v", err)
}
// write result to a file
err = os.WriteFile(*outPat, res, 0o644)
if err != nil {
log.Fatalf("File Write Error: %v", err)
}
}
}
func strip_comments(data []byte) []byte {
result := make([]byte, 0, len(data))
re := regexp.MustCompile(`^\W*//.*$`)
for _, line := range bytes.Split(data, []byte("\n")) {
if !re.Match(line) {
result = append(result, line...)
}
}
return result
}
func hash(data []byte) (string, error) {
var err error
hash := crypto.SHA3_256.New()
_, err = hash.Write(data)
if err != nil {
return "", err
}
return fmt.Sprintf("%x", hash.Sum(nil)), err
}
func get_local_file() ([]byte, error) {
var err error
f, err := os.Open(*outPat)
if err != nil {
return nil, err
}
defer f.Close()
data, err := io.ReadAll(f)
if err != nil {
return nil, err
}
return data, err
}
func get_remote() ([]string, error) {
var err error
var url string = fmt.Sprintf(disposableEmailListURL, *gitRef)
// download the domain list
res, err := http.Get(url)
if err != nil {
return nil, err
}
defer res.Body.Close()
body, err := io.ReadAll(res.Body)
if err != nil {
return nil, err
}
// go through all entries (1 domain per line)
scanner := bufio.NewScanner(bytes.NewReader(body))
var arrDomains []string
for scanner.Scan() {
line := scanner.Text()
arrDomains = append(arrDomains, line)
}
return arrDomains, err
}
func generate() ([]byte, error) {
var err error
var url string = fmt.Sprintf(disposableEmailListURL, *gitRef)
// download the domains list
arrDomains, err := get_remote()
if err != nil {
return nil, err
}
// build the string in a readable way
var sb strings.Builder
_, err = sb.WriteString("[]string{\n")
if err != nil {
return nil, err
}
for _, item := range arrDomains {
_, err = sb.WriteString(fmt.Sprintf("\t%q,\n", item))
if err != nil {
return nil, err
}
}
_, err = sb.WriteString("}")
if err != nil {
return nil, err
}
// insert the values into file
final := fmt.Sprintf(hdr, url, sb.String())
return format.Source([]byte(final))
}
const hdr = `
// Copyright 2024 James Hatfield
// SPDX-License-Identifier: MIT
//
// Code generated by build/generate-disposable-email.go. DO NOT EDIT
// Sourced from %s
package setting
import "sync"
var DisposableEmailDomains = sync.OnceValue(func() []string {
return %s
})
`

219
build/generate-emoji.go Normal file
View file

@ -0,0 +1,219 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// Copyright 2015 Kenneth Shaw
// SPDX-License-Identifier: MIT
//go:build ignore
package main
import (
"flag"
"fmt"
"go/format"
"io"
"log"
"net/http"
"os"
"regexp"
"sort"
"strconv"
"strings"
"unicode/utf8"
"code.gitea.io/gitea/modules/json"
)
const (
gemojiURL = "https://raw.githubusercontent.com/github/gemoji/master/db/emoji.json"
maxUnicodeVersion = 15
)
var flagOut = flag.String("o", "modules/emoji/emoji_data.go", "out")
// Gemoji is a set of emoji data.
type Gemoji []Emoji
// Emoji represents a single emoji and associated data.
type Emoji struct {
Emoji string `json:"emoji"`
Description string `json:"description,omitempty"`
Aliases []string `json:"aliases"`
UnicodeVersion string `json:"unicode_version,omitempty"`
SkinTones bool `json:"skin_tones,omitempty"`
}
// Don't include some fields in JSON
func (e Emoji) MarshalJSON() ([]byte, error) {
type emoji Emoji
x := emoji(e)
x.UnicodeVersion = ""
x.Description = ""
x.SkinTones = false
return json.Marshal(x)
}
func main() {
flag.Parse()
// generate data
buf, err := generate()
if err != nil {
log.Fatalf("generate err: %v", err)
}
// write
err = os.WriteFile(*flagOut, buf, 0o644)
if err != nil {
log.Fatalf("WriteFile err: %v", err)
}
}
var replacer = strings.NewReplacer(
"main.Gemoji", "Gemoji",
"main.Emoji", "\n",
"}}", "},\n}",
", Description:", ", ",
", Aliases:", ", ",
", UnicodeVersion:", ", ",
", SkinTones:", ", ",
)
var emojiRE = regexp.MustCompile(`\{Emoji:"([^"]*)"`)
func generate() ([]byte, error) {
// load gemoji data
res, err := http.Get(gemojiURL)
if err != nil {
return nil, err
}
defer res.Body.Close()
// read all
body, err := io.ReadAll(res.Body)
if err != nil {
return nil, err
}
// unmarshal
var data Gemoji
err = json.Unmarshal(body, &data)
if err != nil {
return nil, err
}
skinTones := make(map[string]string)
skinTones["\U0001f3fb"] = "Light Skin Tone"
skinTones["\U0001f3fc"] = "Medium-Light Skin Tone"
skinTones["\U0001f3fd"] = "Medium Skin Tone"
skinTones["\U0001f3fe"] = "Medium-Dark Skin Tone"
skinTones["\U0001f3ff"] = "Dark Skin Tone"
var tmp Gemoji
// filter out emoji that require greater than max unicode version
for i := range data {
val, _ := strconv.ParseFloat(data[i].UnicodeVersion, 64)
if int(val) <= maxUnicodeVersion {
tmp = append(tmp, data[i])
}
}
data = tmp
sort.Slice(data, func(i, j int) bool {
return data[i].Aliases[0] < data[j].Aliases[0]
})
aliasMap := make(map[string]int, len(data))
for i, e := range data {
if e.Emoji == "" || len(e.Aliases) == 0 {
continue
}
for _, a := range e.Aliases {
if a == "" {
continue
}
aliasMap[a] = i
}
}
// Forgejo customizations
i, ok := aliasMap["tada"]
if ok {
data[i].Aliases = append(data[i].Aliases, "hooray")
}
i, ok = aliasMap["laughing"]
if ok {
data[i].Aliases = append(data[i].Aliases, "laugh")
}
// write a JSON file to use with tribute (write before adding skin tones since we can't support them there yet)
file, _ := json.Marshal(data)
_ = os.WriteFile("assets/emoji.json", file, 0o644)
// Add skin tones to emoji that support it
var (
s []string
newEmoji string
newDescription string
newData Emoji
)
for i := range data {
if data[i].SkinTones {
for k, v := range skinTones {
s = strings.Split(data[i].Emoji, "")
if utf8.RuneCountInString(data[i].Emoji) == 1 {
s = append(s, k)
} else {
// insert into slice after first element because all emoji that support skin tones
// have that modifier placed at this spot
s = append(s, "")
copy(s[2:], s[1:])
s[1] = k
}
newEmoji = strings.Join(s, "")
newDescription = data[i].Description + ": " + v
newAlias := data[i].Aliases[0] + "_" + strings.ReplaceAll(v, " ", "_")
newData = Emoji{newEmoji, newDescription, []string{newAlias}, "12.0", false}
data = append(data, newData)
}
}
}
sort.Slice(data, func(i, j int) bool {
return data[i].Aliases[0] < data[j].Aliases[0]
})
// add header
str := replacer.Replace(fmt.Sprintf(hdr, gemojiURL, data))
// change the format of the unicode string
str = emojiRE.ReplaceAllStringFunc(str, func(s string) string {
var err error
s, err = strconv.Unquote(s[len("{Emoji:"):])
if err != nil {
panic(err)
}
return "{" + strconv.QuoteToASCII(s)
})
// format
return format.Source([]byte(str))
}
const hdr = `
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package emoji
// Code generated by build/generate-emoji.go. DO NOT EDIT.
// Sourced from %s
var GemojiData = %#v
`

View file

@ -0,0 +1,126 @@
//go:build ignore
package main
import (
"archive/tar"
"compress/gzip"
"flag"
"fmt"
"io"
"log"
"net/http"
"os"
"path"
"path/filepath"
"strings"
"code.gitea.io/gitea/modules/util"
)
func main() {
var (
prefix = "gitea-gitignore"
url = "https://api.github.com/repos/github/gitignore/tarball"
githubApiToken = ""
githubUsername = ""
destination = ""
)
flag.StringVar(&destination, "dest", "options/gitignore/", "destination for the gitignores")
flag.StringVar(&githubUsername, "username", "", "github username")
flag.StringVar(&githubApiToken, "token", "", "github api token")
flag.Parse()
file, err := os.CreateTemp(os.TempDir(), prefix)
if err != nil {
log.Fatalf("Failed to create temp file. %s", err)
}
defer util.Remove(file.Name())
req, err := http.NewRequest("GET", url, nil)
if err != nil {
log.Fatalf("Failed to download archive. %s", err)
}
if len(githubApiToken) > 0 && len(githubUsername) > 0 {
req.SetBasicAuth(githubUsername, githubApiToken)
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
log.Fatalf("Failed to download archive. %s", err)
}
defer resp.Body.Close()
if _, err := io.Copy(file, resp.Body); err != nil {
log.Fatalf("Failed to copy archive to file. %s", err)
}
if _, err := file.Seek(0, 0); err != nil {
log.Fatalf("Failed to reset seek on archive. %s", err)
}
gz, err := gzip.NewReader(file)
if err != nil {
log.Fatalf("Failed to gunzip the archive. %s", err)
}
tr := tar.NewReader(gz)
filesToCopy := make(map[string]string, 0)
for {
hdr, err := tr.Next()
if err == io.EOF {
break
}
if err != nil {
log.Fatalf("Failed to iterate archive. %s", err)
}
if filepath.Ext(hdr.Name) != ".gitignore" {
continue
}
if hdr.Typeflag == tar.TypeSymlink {
fmt.Printf("Found symlink %s -> %s\n", hdr.Name, hdr.Linkname)
filesToCopy[strings.TrimSuffix(filepath.Base(hdr.Name), ".gitignore")] = strings.TrimSuffix(filepath.Base(hdr.Linkname), ".gitignore")
continue
}
out, err := os.Create(path.Join(destination, strings.TrimSuffix(filepath.Base(hdr.Name), ".gitignore")))
if err != nil {
log.Fatalf("Failed to create new file. %s", err)
}
defer out.Close()
if _, err := io.Copy(out, tr); err != nil {
log.Fatalf("Failed to write new file. %s", err)
} else {
fmt.Printf("Written %s\n", out.Name())
}
}
for dst, src := range filesToCopy {
// Read all content of src to data
src = path.Join(destination, src)
data, err := os.ReadFile(src)
if err != nil {
log.Fatalf("Failed to read src file. %s", err)
}
// Write data to dst
dst = path.Join(destination, dst)
err = os.WriteFile(dst, data, 0o644)
if err != nil {
log.Fatalf("Failed to write new file. %s", err)
}
fmt.Printf("Written (copy of %s) %s\n", src, dst)
}
fmt.Println("Done")
}

View file

@ -0,0 +1,132 @@
// Copyright 2022 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
//go:build ignore
package main
import (
"encoding/json"
"fmt"
"io/fs"
"os"
"path"
"path/filepath"
"regexp"
"sort"
"strings"
"code.gitea.io/gitea/modules/container"
)
// regexp is based on go-license, excluding README and NOTICE
// https://github.com/google/go-licenses/blob/master/licenses/find.go
var licenseRe = regexp.MustCompile(`^(?i)((UN)?LICEN(S|C)E|COPYING).*$`)
type LicenseEntry struct {
Name string `json:"name"`
Path string `json:"path"`
LicenseText string `json:"licenseText"`
}
func main() {
if len(os.Args) != 3 {
fmt.Println("usage: go run generate-go-licenses.go <base-dir> <out-json-file>")
os.Exit(1)
}
base, out := os.Args[1], os.Args[2]
// Add ext for excluded files because license_test.go will be included for some reason.
// And there are more files that should be excluded, check with:
//
// go run github.com/google/go-licenses@v1.6.0 save . --force --save_path=.go-licenses 2>/dev/null
// find .go-licenses -type f | while read FILE; do echo "${$(basename $FILE)##*.}"; done | sort -u
// AUTHORS
// COPYING
// LICENSE
// Makefile
// NOTICE
// gitignore
// go
// md
// mod
// sum
// toml
// txt
// yml
//
// It could be removed once we have a better regex.
excludedExt := container.SetOf(".gitignore", ".go", ".mod", ".sum", ".toml", ".yml")
var paths []string
err := filepath.WalkDir(base, func(path string, entry fs.DirEntry, err error) error {
if err != nil {
return err
}
if entry.IsDir() || !licenseRe.MatchString(entry.Name()) || excludedExt.Contains(filepath.Ext(entry.Name())) {
return nil
}
paths = append(paths, path)
return nil
})
if err != nil {
panic(err)
}
sort.Strings(paths)
var entries []LicenseEntry
{
licenseText, err := os.ReadFile("LICENSE")
if err != nil {
panic(err)
}
entries = append(entries, LicenseEntry{
Name: "codeberg.org/forgejo/forgejo",
Path: "codeberg.org/forgejo/forgejo/GPL-3.0-or-later",
LicenseText: string(licenseText),
})
}
for _, filePath := range paths {
licenseText, err := os.ReadFile(filePath)
if err != nil {
panic(err)
}
pkgPath := filepath.ToSlash(filePath)
pkgPath = strings.TrimPrefix(pkgPath, base+"/")
pkgName := path.Dir(pkgPath)
// There might be a bug somewhere in go-licenses that sometimes interprets the
// root package as "." and sometimes as "code.gitea.io/gitea". Workaround by
// removing both of them for the sake of stable output.
if pkgName == "." || pkgName == "code.gitea.io/gitea" {
continue
}
entries = append(entries, LicenseEntry{
Name: pkgName,
Path: pkgPath,
LicenseText: string(licenseText),
})
}
jsonBytes, err := json.MarshalIndent(entries, "", " ")
if err != nil {
panic(err)
}
// Ensure file has a final newline
if jsonBytes[len(jsonBytes)-1] != '\n' {
jsonBytes = append(jsonBytes, '\n')
}
err = os.WriteFile(out, jsonBytes, 0o644)
if err != nil {
panic(err)
}
}

122
build/generate-licenses.go Normal file
View file

@ -0,0 +1,122 @@
//go:build ignore
package main
import (
"archive/tar"
"compress/gzip"
"flag"
"fmt"
"io"
"log"
"net/http"
"os"
"path"
"path/filepath"
"strings"
"code.gitea.io/gitea/modules/util"
)
func main() {
var (
prefix = "gitea-licenses"
url = "https://api.github.com/repos/spdx/license-list-data/tarball"
githubApiToken = ""
githubUsername = ""
destination = ""
)
flag.StringVar(&destination, "dest", "options/license/", "destination for the licenses")
flag.StringVar(&githubUsername, "username", "", "github username")
flag.StringVar(&githubApiToken, "token", "", "github api token")
flag.Parse()
file, err := os.CreateTemp(os.TempDir(), prefix)
if err != nil {
log.Fatalf("Failed to create temp file. %s", err)
}
defer util.Remove(file.Name())
if err := os.RemoveAll(destination); err != nil {
log.Fatalf("Cannot clean destination folder: %v", err)
}
if err := os.MkdirAll(destination, 0o755); err != nil {
log.Fatalf("Cannot create destination: %v", err)
}
req, err := http.NewRequest("GET", url, nil)
if err != nil {
log.Fatalf("Failed to download archive. %s", err)
}
if len(githubApiToken) > 0 && len(githubUsername) > 0 {
req.SetBasicAuth(githubUsername, githubApiToken)
}
resp, err := http.DefaultClient.Do(req)
if err != nil {
log.Fatalf("Failed to download archive. %s", err)
}
defer resp.Body.Close()
if _, err := io.Copy(file, resp.Body); err != nil {
log.Fatalf("Failed to copy archive to file. %s", err)
}
if _, err := file.Seek(0, 0); err != nil {
log.Fatalf("Failed to reset seek on archive. %s", err)
}
gz, err := gzip.NewReader(file)
if err != nil {
log.Fatalf("Failed to gunzip the archive. %s", err)
}
tr := tar.NewReader(gz)
for {
hdr, err := tr.Next()
if err == io.EOF {
break
}
if err != nil {
log.Fatalf("Failed to iterate archive. %s", err)
}
if !strings.Contains(hdr.Name, "/text/") {
continue
}
if filepath.Ext(hdr.Name) != ".txt" {
continue
}
if strings.HasPrefix(filepath.Base(hdr.Name), "README") {
continue
}
if strings.HasPrefix(filepath.Base(hdr.Name), "deprecated_") {
continue
}
out, err := os.Create(path.Join(destination, strings.TrimSuffix(filepath.Base(hdr.Name), ".txt")))
if err != nil {
log.Fatalf("Failed to create new file. %s", err)
}
defer out.Close()
if _, err := io.Copy(out, tr); err != nil {
log.Fatalf("Failed to write new file. %s", err)
} else {
fmt.Printf("Written %s\n", out.Name())
}
}
fmt.Println("Done")
}

155
build/lint-locale.go Normal file
View file

@ -0,0 +1,155 @@
// Copyright 2024 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
//nolint:forbidigo
package main
import (
"fmt"
"html"
"io/fs"
"os"
"path/filepath"
"regexp"
"slices"
"strings"
"github.com/microcosm-cc/bluemonday"
"github.com/sergi/go-diff/diffmatchpatch"
"gopkg.in/ini.v1" //nolint:depguard
)
var (
policy *bluemonday.Policy
tagRemover *strings.Replacer
safeURL = "https://TO-BE-REPLACED.COM"
// Matches href="", href="#", href="%s", href="#%s", href="%[1]s" and href="#%[1]s".
placeHolderRegex = regexp.MustCompile(`href="#?(%s|%\[\d\]s)?"`)
)
func initBlueMondayPolicy() {
policy = bluemonday.NewPolicy()
policy.RequireParseableURLs(true)
policy.AllowURLSchemes("https")
// Only allow safe URL on href.
// Only allow target="_blank".
// Only allow rel="nopener noreferrer", rel="noopener" and rel="noreferrer".
// Only allow placeholder on id and class.
policy.AllowAttrs("href").Matching(regexp.MustCompile("^" + regexp.QuoteMeta(safeURL) + "$")).OnElements("a")
policy.AllowAttrs("target").Matching(regexp.MustCompile("^_blank$")).OnElements("a")
policy.AllowAttrs("rel").Matching(regexp.MustCompile("^(noopener|noreferrer|noopener noreferrer)$")).OnElements("a")
policy.AllowAttrs("id", "class").Matching(regexp.MustCompile(`^%s|%\[\d\]s$`)).OnElements("a")
// Only allow positional placeholder as class.
positionalPlaceholderRe := regexp.MustCompile(`^%\[\d\]s$`)
policy.AllowAttrs("class").Matching(positionalPlaceholderRe).OnElements("strong")
policy.AllowAttrs("id").Matching(positionalPlaceholderRe).OnElements("code")
// Allowed elements with no attributes. Must be a recognized tagname.
policy.AllowElements("strong", "br", "b", "strike", "code", "i")
// TODO: Remove <c> in `actions.workflow.dispatch.trigger_found`.
policy.AllowNoAttrs().OnElements("c")
}
func initRemoveTags() {
oldnew := []string{}
for _, el := range []string{
"email@example.com", "correu@example.com", "epasts@domens.lv", "email@exemplo.com", "eposta@ornek.com", "email@példa.hu", "email@esempio.it",
"user", "utente", "lietotājs", "gebruiker", "usuário", "Benutzer", "Bruker", "bruger",
"server", "servidor", "kiszolgáló", "serveris",
"label", "etichetta", "etiķete", "rótulo", "Label", "utilizador", "etiket", "iezīme",
} {
oldnew = append(oldnew, "<"+el+">", "REPLACED-TAG")
}
tagRemover = strings.NewReplacer(oldnew...)
}
func preprocessTranslationValue(value string) string {
// href should be a parsable URL, replace placeholder strings with a safe url.
value = placeHolderRegex.ReplaceAllString(value, `href="`+safeURL+`"`)
// Remove tags that aren't tags but will be parsed as tags. We already know they are safe and sound.
value = tagRemover.Replace(value)
return value
}
func checkLocaleContent(localeContent []byte) []string {
// Same configuration as Forgejo uses.
cfg := ini.Empty(ini.LoadOptions{
IgnoreContinuation: true,
})
cfg.NameMapper = ini.SnackCase
if err := cfg.Append(localeContent); err != nil {
panic(err)
}
dmp := diffmatchpatch.New()
errors := []string{}
for _, section := range cfg.Sections() {
for _, key := range section.Keys() {
var trKey string
if section.Name() == "" || section.Name() == "DEFAULT" || section.Name() == "common" {
trKey = key.Name()
} else {
trKey = section.Name() + "." + key.Name()
}
keyValue := preprocessTranslationValue(key.Value())
if html.UnescapeString(policy.Sanitize(keyValue)) != keyValue {
// Create a nice diff of the difference.
diffs := dmp.DiffMain(keyValue, html.UnescapeString(policy.Sanitize(keyValue)), false)
diffs = dmp.DiffCleanupSemantic(diffs)
diffs = dmp.DiffCleanupEfficiency(diffs)
errors = append(errors, trKey+": "+dmp.DiffPrettyText(diffs))
}
}
}
return errors
}
func main() {
initBlueMondayPolicy()
initRemoveTags()
localeDir := filepath.Join("options", "locale")
localeFiles, err := os.ReadDir(localeDir)
if err != nil {
panic(err)
}
if !slices.ContainsFunc(localeFiles, func(e fs.DirEntry) bool { return strings.HasSuffix(e.Name(), ".ini") }) {
fmt.Println("No locale files found")
os.Exit(1)
}
exitCode := 0
for _, localeFile := range localeFiles {
if !strings.HasSuffix(localeFile.Name(), ".ini") {
continue
}
localeContent, err := os.ReadFile(filepath.Join(localeDir, localeFile.Name()))
if err != nil {
panic(err)
}
if err := checkLocaleContent(localeContent); len(err) > 0 {
fmt.Println(localeFile.Name())
fmt.Println(strings.Join(err, "\n"))
fmt.Println()
exitCode = 1
}
}
os.Exit(exitCode)
}

65
build/lint-locale_test.go Normal file
View file

@ -0,0 +1,65 @@
// Copyright 2024 The Forgejo Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package main
import (
"testing"
"github.com/stretchr/testify/assert"
)
func TestLocalizationPolicy(t *testing.T) {
initBlueMondayPolicy()
initRemoveTags()
t.Run("Remove tags", func(t *testing.T) {
assert.Empty(t, checkLocaleContent([]byte(`hidden_comment_types_description = Comment types checked here will not be shown inside issue pages. Checking "Label" for example removes all "<user> added/removed <label>" comments.`)))
assert.EqualValues(t, []string{"key: \x1b[31m<not-an-allowed-key>\x1b[0m REPLACED-TAG"}, checkLocaleContent([]byte(`key = "<not-an-allowed-key> <label>"`)))
assert.EqualValues(t, []string{"key: \x1b[31m<user@example.com>\x1b[0m REPLACED-TAG"}, checkLocaleContent([]byte(`key = "<user@example.com> <email@example.com>"`)))
assert.EqualValues(t, []string{"key: \x1b[31m<tag>\x1b[0m REPLACED-TAG \x1b[31m</tag>\x1b[0m"}, checkLocaleContent([]byte(`key = "<tag> <email@example.com> </tag>"`)))
})
t.Run("Specific exception", func(t *testing.T) {
assert.Empty(t, checkLocaleContent([]byte(`workflow.dispatch.trigger_found = This workflow has a <c>workflow_dispatch</c> event trigger.`)))
assert.Empty(t, checkLocaleContent([]byte(`pulls.title_desc_one = wants to merge %[1]d commit from <code>%[2]s</code> into <code id="%[4]s">%[3]s</code>`)))
assert.Empty(t, checkLocaleContent([]byte(`editor.commit_directly_to_this_branch = Commit directly to the <strong class="%[2]s">%[1]s</strong> branch.`)))
assert.EqualValues(t, []string{"workflow.dispatch.trigger_found: This workflow has a \x1b[31m<d>\x1b[0mworkflow_dispatch\x1b[31m</d>\x1b[0m event trigger."}, checkLocaleContent([]byte(`workflow.dispatch.trigger_found = This workflow has a <d>workflow_dispatch</d> event trigger.`)))
assert.EqualValues(t, []string{"key: <code\x1b[31m id=\"branch_targe\"\x1b[0m>%[3]s</code>"}, checkLocaleContent([]byte(`key = <code id="branch_targe">%[3]s</code>`)))
assert.EqualValues(t, []string{"key: <a\x1b[31m class=\"ui sh\"\x1b[0m href=\"https://TO-BE-REPLACED.COM\">"}, checkLocaleContent([]byte(`key = <a class="ui sh" href="%[3]s">`)))
assert.EqualValues(t, []string{"key: <a\x1b[31m class=\"js-click-me\"\x1b[0m href=\"https://TO-BE-REPLACED.COM\">"}, checkLocaleContent([]byte(`key = <a class="js-click-me" href="%[3]s">`)))
assert.EqualValues(t, []string{"key: <strong\x1b[31m class=\"branch-target\"\x1b[0m>%[1]s</strong>"}, checkLocaleContent([]byte(`key = <strong class="branch-target">%[1]s</strong>`)))
})
t.Run("General safe tags", func(t *testing.T) {
assert.Empty(t, checkLocaleContent([]byte("error404 = The page you are trying to reach either <strong>does not exist</strong> or <strong>you are not authorized</strong> to view it.")))
assert.Empty(t, checkLocaleContent([]byte("teams.specific_repositories_helper = Members will only have access to repositories explicitly added to the team. Selecting this <strong>will not</strong> automatically remove repositories already added with <i>All repositories</i>.")))
assert.Empty(t, checkLocaleContent([]byte("sqlite_helper = File path for the SQLite3 database.<br>Enter an absolute path if you run Forgejo as a service.")))
assert.Empty(t, checkLocaleContent([]byte("hi_user_x = Hi <b>%s</b>,")))
assert.EqualValues(t, []string{"error404: The page you are trying to reach either <strong\x1b[31m title='aaa'\x1b[0m>does not exist</strong> or <strong>you are not authorized</strong> to view it."}, checkLocaleContent([]byte("error404 = The page you are trying to reach either <strong title='aaa'>does not exist</strong> or <strong>you are not authorized</strong> to view it.")))
})
t.Run("<a>", func(t *testing.T) {
assert.Empty(t, checkLocaleContent([]byte(`admin.new_user.text = Please <a href="%s">click here</a> to manage this user from the admin panel.`)))
assert.Empty(t, checkLocaleContent([]byte(`access_token_desc = Selected token permissions limit authorization only to the corresponding <a href="%[1]s" target="_blank">API</a> routes. Read the <a href="%[2]s" target="_blank">documentation</a> for more information.`)))
assert.Empty(t, checkLocaleContent([]byte(`webauthn_desc = Security keys are hardware devices containing cryptographic keys. They can be used for two-factor authentication. Security keys must support the <a rel="noreferrer" target="_blank" href="%s">WebAuthn Authenticator</a> standard.`)))
assert.Empty(t, checkLocaleContent([]byte("issues.closed_at = `closed this issue <a id=\"%[1]s\" href=\"#%[1]s\">%[2]s</a>`")))
assert.EqualValues(t, []string{"key: \x1b[31m<a href=\"https://example.com\">\x1b[0m"}, checkLocaleContent([]byte(`key = <a href="https://example.com">`)))
assert.EqualValues(t, []string{"key: \x1b[31m<a href=\"javascript:alert('1')\">\x1b[0m"}, checkLocaleContent([]byte(`key = <a href="javascript:alert('1')">`)))
assert.EqualValues(t, []string{"key: <a href=\"https://TO-BE-REPLACED.COM\"\x1b[31m download\x1b[0m>"}, checkLocaleContent([]byte(`key = <a href="%s" download>`)))
assert.EqualValues(t, []string{"key: <a href=\"https://TO-BE-REPLACED.COM\"\x1b[31m target=\"_self\"\x1b[0m>"}, checkLocaleContent([]byte(`key = <a href="%s" target="_self">`)))
assert.EqualValues(t, []string{"key: \x1b[31m<a href=\"https://example.com/%s\">\x1b[0m"}, checkLocaleContent([]byte(`key = <a href="https://example.com/%s">`)))
assert.EqualValues(t, []string{"key: \x1b[31m<a href=\"https://example.com/?q=%s\">\x1b[0m"}, checkLocaleContent([]byte(`key = <a href="https://example.com/?q=%s">`)))
assert.EqualValues(t, []string{"key: \x1b[31m<a href=\"%s/open-redirect\">\x1b[0m"}, checkLocaleContent([]byte(`key = <a href="%s/open-redirect">`)))
assert.EqualValues(t, []string{"key: \x1b[31m<a href=\"%s?q=open-redirect\">\x1b[0m"}, checkLocaleContent([]byte(`key = <a href="%s?q=open-redirect">`)))
})
t.Run("Escaped HTML characters", func(t *testing.T) {
assert.Empty(t, checkLocaleContent([]byte("activity.git_stats_push_to_branch = `إلى %s و\"`")))
assert.EqualValues(t, []string{"key: و\x1b[31m&nbsp\x1b[0m\x1b[32m\u00a0\x1b[0m"}, checkLocaleContent([]byte(`key = و&nbsp;`)))
})
}

View file

@ -0,0 +1,15 @@
// Copyright 2024 The Forgejo Authors c/o Codeberg e.V.. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
//go:build ignore
package main
import (
"fmt"
)
func main() {
fmt.Println("NOT NEEDED: THIS IS A NOOP AS OF Forgejo 7.0 BUT KEPT FOR BACKWARD COMPATIBILITY")
}

20
build/test-echo.go Normal file
View file

@ -0,0 +1,20 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
//go:build ignore
package main
import (
"fmt"
"io"
"os"
)
func main() {
_, err := io.Copy(os.Stdout, os.Stdin)
if err != nil {
fmt.Fprintf(os.Stderr, "Error: %v", err)
os.Exit(1)
}
}

24
build/test-env-check.sh Executable file
View file

@ -0,0 +1,24 @@
#!/bin/sh
set -e
if [ ! -f ./build/test-env-check.sh ]; then
echo "${0} can only be executed in gitea source root directory"
exit 1
fi
echo "check uid ..."
# the uid of gitea defined in "https://gitea.com/gitea/test-env" is 1000
gitea_uid=$(id -u gitea)
if [ "$gitea_uid" != "1000" ]; then
echo "The uid of linux user 'gitea' is expected to be 1000, but it is $gitea_uid"
exit 1
fi
cur_uid=$(id -u)
if [ "$cur_uid" != "0" -a "$cur_uid" != "$gitea_uid" ]; then
echo "The uid of current linux user is expected to be 0 or $gitea_uid, but it is $cur_uid"
exit 1
fi

11
build/test-env-prepare.sh Executable file
View file

@ -0,0 +1,11 @@
#!/bin/sh
set -e
if [ ! -f ./build/test-env-prepare.sh ]; then
echo "${0} can only be executed in gitea source root directory"
exit 1
fi
echo "change the owner of files to gitea ..."
chown -R gitea:gitea .

52
build/update-locales.sh Executable file
View file

@ -0,0 +1,52 @@
#!/bin/sh
# this script runs in alpine image which only has `sh` shell
set +e
if sed --version 2>/dev/null | grep -q GNU; then
SED_INPLACE="sed -i"
else
SED_INPLACE="sed -i ''"
fi
set -e
if [ ! -f ./options/locale/locale_en-US.ini ]; then
echo "please run this script in the root directory of the project"
exit 1
fi
mv ./options/locale/locale_en-US.ini ./options/
# the "ini" library for locale has many quirks, its behavior is different from Crowdin.
# see i18n_test.go for more details
# this script helps to unquote the Crowdin outputs for the quirky ini library
# * find all `key="...\"..."` lines
# * remove the leading quote
# * remove the trailing quote
# * unescape the quotes
# * eg: key="...\"..." => key=..."...
$SED_INPLACE -r -e '/^[-.A-Za-z0-9_]+[ ]*=[ ]*".*"$/ {
s/^([-.A-Za-z0-9_]+)[ ]*=[ ]*"/\1=/
s/"$//
s/\\"/"/g
}' ./options/locale/*.ini
# * if the escaped line is incomplete like `key="...` or `key=..."`, quote it with backticks
# * eg: key="... => key=`"...`
# * eg: key=..." => key=`..."`
$SED_INPLACE -r -e 's/^([-.A-Za-z0-9_]+)[ ]*=[ ]*(".*[^"])$/\1=`\2`/' ./options/locale/*.ini
$SED_INPLACE -r -e 's/^([-.A-Za-z0-9_]+)[ ]*=[ ]*([^"].*")$/\1=`\2`/' ./options/locale/*.ini
# Remove translation under 25% of en_us
baselines=$(wc -l "./options/locale_en-US.ini" | cut -d" " -f1)
baselines=$((baselines / 4))
for filename in ./options/locale/*.ini; do
lines=$(wc -l "$filename" | cut -d" " -f1)
if [ $lines -lt $baselines ]; then
echo "Removing $filename: $lines/$baselines"
rm "$filename"
fi
done
mv ./options/locale_en-US.ini ./options/locale/

55
cmd/actions.go Normal file
View file

@ -0,0 +1,55 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cmd
import (
"fmt"
"code.gitea.io/gitea/modules/private"
"code.gitea.io/gitea/modules/setting"
"github.com/urfave/cli/v2"
)
var (
// CmdActions represents the available actions sub-commands.
CmdActions = &cli.Command{
Name: "actions",
Usage: "Manage Forgejo Actions",
Subcommands: []*cli.Command{
subcmdActionsGenRunnerToken,
},
}
subcmdActionsGenRunnerToken = &cli.Command{
Name: "generate-runner-token",
Usage: "Generate a new token for a runner to use to register with the server",
Action: runGenerateActionsRunnerToken,
Aliases: []string{"grt"},
Flags: []cli.Flag{
&cli.StringFlag{
Name: "scope",
Aliases: []string{"s"},
Value: "",
Usage: "{owner}[/{repo}] - leave empty for a global runner",
},
},
}
)
func runGenerateActionsRunnerToken(c *cli.Context) error {
ctx, cancel := installSignals()
defer cancel()
setting.MustInstalled()
scope := c.String("scope")
respText, extra := private.GenerateActionsRunnerToken(ctx, scope)
if extra.HasError() {
return handleCliResponseExtra(extra)
}
_, _ = fmt.Printf("%s\n", respText.Text)
return nil
}

168
cmd/admin.go Normal file
View file

@ -0,0 +1,168 @@
// Copyright 2016 The Gogs Authors. All rights reserved.
// Copyright 2016 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cmd
import (
"context"
"fmt"
"code.gitea.io/gitea/models/db"
repo_model "code.gitea.io/gitea/models/repo"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/gitrepo"
"code.gitea.io/gitea/modules/log"
repo_module "code.gitea.io/gitea/modules/repository"
"github.com/urfave/cli/v2"
)
var (
// CmdAdmin represents the available admin sub-command.
CmdAdmin = &cli.Command{
Name: "admin",
Usage: "Perform common administrative operations",
Subcommands: []*cli.Command{
subcmdUser,
subcmdRepoSyncReleases,
subcmdRegenerate,
subcmdAuth,
subcmdSendMail,
},
}
subcmdRepoSyncReleases = &cli.Command{
Name: "repo-sync-releases",
Usage: "Synchronize repository releases with tags",
Action: runRepoSyncReleases,
}
subcmdRegenerate = &cli.Command{
Name: "regenerate",
Usage: "Regenerate specific files",
Subcommands: []*cli.Command{
microcmdRegenHooks,
microcmdRegenKeys,
},
}
subcmdAuth = &cli.Command{
Name: "auth",
Usage: "Modify external auth providers",
Subcommands: []*cli.Command{
microcmdAuthAddOauth,
microcmdAuthUpdateOauth,
microcmdAuthAddLdapBindDn,
microcmdAuthUpdateLdapBindDn,
microcmdAuthAddLdapSimpleAuth,
microcmdAuthUpdateLdapSimpleAuth,
microcmdAuthAddSMTP,
microcmdAuthUpdateSMTP,
microcmdAuthList,
microcmdAuthDelete,
},
}
subcmdSendMail = &cli.Command{
Name: "sendmail",
Usage: "Send a message to all users",
Action: runSendMail,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "title",
Usage: `a title of a message`,
Value: "",
},
&cli.StringFlag{
Name: "content",
Usage: "a content of a message",
Value: "",
},
&cli.BoolFlag{
Name: "force",
Aliases: []string{"f"},
Usage: "A flag to bypass a confirmation step",
},
},
}
idFlag = &cli.Int64Flag{
Name: "id",
Usage: "ID of authentication source",
}
)
func runRepoSyncReleases(_ *cli.Context) error {
ctx, cancel := installSignals()
defer cancel()
if err := initDB(ctx); err != nil {
return err
}
if err := git.InitSimple(ctx); err != nil {
return err
}
log.Trace("Synchronizing repository releases (this may take a while)")
for page := 1; ; page++ {
repos, count, err := repo_model.SearchRepositoryByName(ctx, &repo_model.SearchRepoOptions{
ListOptions: db.ListOptions{
PageSize: repo_model.RepositoryListDefaultPageSize,
Page: page,
},
Private: true,
})
if err != nil {
return fmt.Errorf("SearchRepositoryByName: %w", err)
}
if len(repos) == 0 {
break
}
log.Trace("Processing next %d repos of %d", len(repos), count)
for _, repo := range repos {
log.Trace("Synchronizing repo %s with path %s", repo.FullName(), repo.RepoPath())
gitRepo, err := gitrepo.OpenRepository(ctx, repo)
if err != nil {
log.Warn("OpenRepository: %v", err)
continue
}
oldnum, err := getReleaseCount(ctx, repo.ID)
if err != nil {
log.Warn(" GetReleaseCountByRepoID: %v", err)
}
log.Trace(" currentNumReleases is %d, running SyncReleasesWithTags", oldnum)
if err = repo_module.SyncReleasesWithTags(ctx, repo, gitRepo); err != nil {
log.Warn(" SyncReleasesWithTags: %v", err)
gitRepo.Close()
continue
}
count, err = getReleaseCount(ctx, repo.ID)
if err != nil {
log.Warn(" GetReleaseCountByRepoID: %v", err)
gitRepo.Close()
continue
}
log.Trace(" repo %s releases synchronized to tags: from %d to %d",
repo.FullName(), oldnum, count)
gitRepo.Close()
}
}
return nil
}
func getReleaseCount(ctx context.Context, id int64) (int64, error) {
return db.Count[repo_model.Release](
ctx,
repo_model.FindReleasesOptions{
RepoID: id,
IncludeTags: true,
},
)
}

111
cmd/admin_auth.go Normal file
View file

@ -0,0 +1,111 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cmd
import (
"errors"
"fmt"
"os"
"text/tabwriter"
auth_model "code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/models/db"
auth_service "code.gitea.io/gitea/services/auth"
"github.com/urfave/cli/v2"
)
var (
microcmdAuthDelete = &cli.Command{
Name: "delete",
Usage: "Delete specific auth source",
Flags: []cli.Flag{idFlag},
Action: runDeleteAuth,
}
microcmdAuthList = &cli.Command{
Name: "list",
Usage: "List auth sources",
Action: runListAuth,
Flags: []cli.Flag{
&cli.IntFlag{
Name: "min-width",
Usage: "Minimal cell width including any padding for the formatted table",
Value: 0,
},
&cli.IntFlag{
Name: "tab-width",
Usage: "width of tab characters in formatted table (equivalent number of spaces)",
Value: 8,
},
&cli.IntFlag{
Name: "padding",
Usage: "padding added to a cell before computing its width",
Value: 1,
},
&cli.StringFlag{
Name: "pad-char",
Usage: `ASCII char used for padding if padchar == '\\t', the Writer will assume that the width of a '\\t' in the formatted output is tabwidth, and cells are left-aligned independent of align_left (for correct-looking results, tabwidth must correspond to the tab width in the viewer displaying the result)`,
Value: "\t",
},
&cli.BoolFlag{
Name: "vertical-bars",
Usage: "Set to true to print vertical bars between columns",
},
},
}
)
func runListAuth(c *cli.Context) error {
ctx, cancel := installSignals()
defer cancel()
if err := initDB(ctx); err != nil {
return err
}
authSources, err := db.Find[auth_model.Source](ctx, auth_model.FindSourcesOptions{})
if err != nil {
return err
}
flags := tabwriter.AlignRight
if c.Bool("vertical-bars") {
flags |= tabwriter.Debug
}
padChar := byte('\t')
if len(c.String("pad-char")) > 0 {
padChar = c.String("pad-char")[0]
}
// loop through each source and print
w := tabwriter.NewWriter(os.Stdout, c.Int("min-width"), c.Int("tab-width"), c.Int("padding"), padChar, flags)
fmt.Fprintf(w, "ID\tName\tType\tEnabled\n")
for _, source := range authSources {
fmt.Fprintf(w, "%d\t%s\t%s\t%t\n", source.ID, source.Name, source.Type.String(), source.IsActive)
}
w.Flush()
return nil
}
func runDeleteAuth(c *cli.Context) error {
if !c.IsSet("id") {
return errors.New("--id flag is missing")
}
ctx, cancel := installSignals()
defer cancel()
if err := initDB(ctx); err != nil {
return err
}
source, err := auth_model.GetSourceByID(ctx, c.Int64("id"))
if err != nil {
return err
}
return auth_service.DeleteSource(ctx, source)
}

409
cmd/admin_auth_ldap.go Normal file
View file

@ -0,0 +1,409 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cmd
import (
"context"
"fmt"
"strings"
"code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/services/auth/source/ldap"
"github.com/urfave/cli/v2"
)
type (
authService struct {
initDB func(ctx context.Context) error
createAuthSource func(context.Context, *auth.Source) error
updateAuthSource func(context.Context, *auth.Source) error
getAuthSourceByID func(ctx context.Context, id int64) (*auth.Source, error)
}
)
var (
commonLdapCLIFlags = []cli.Flag{
&cli.StringFlag{
Name: "name",
Usage: "Authentication name.",
},
&cli.BoolFlag{
Name: "not-active",
Usage: "Deactivate the authentication source.",
},
&cli.BoolFlag{
Name: "active",
Usage: "Activate the authentication source.",
},
&cli.StringFlag{
Name: "security-protocol",
Usage: "Security protocol name.",
},
&cli.BoolFlag{
Name: "skip-tls-verify",
Usage: "Disable TLS verification.",
},
&cli.StringFlag{
Name: "host",
Usage: "The address where the LDAP server can be reached.",
},
&cli.IntFlag{
Name: "port",
Usage: "The port to use when connecting to the LDAP server.",
},
&cli.StringFlag{
Name: "user-search-base",
Usage: "The LDAP base at which user accounts will be searched for.",
},
&cli.StringFlag{
Name: "user-filter",
Usage: "An LDAP filter declaring how to find the user record that is attempting to authenticate.",
},
&cli.StringFlag{
Name: "admin-filter",
Usage: "An LDAP filter specifying if a user should be given administrator privileges.",
},
&cli.StringFlag{
Name: "restricted-filter",
Usage: "An LDAP filter specifying if a user should be given restricted status.",
},
&cli.BoolFlag{
Name: "allow-deactivate-all",
Usage: "Allow empty search results to deactivate all users.",
},
&cli.StringFlag{
Name: "username-attribute",
Usage: "The attribute of the users LDAP record containing the user name.",
},
&cli.StringFlag{
Name: "firstname-attribute",
Usage: "The attribute of the users LDAP record containing the users first name.",
},
&cli.StringFlag{
Name: "surname-attribute",
Usage: "The attribute of the users LDAP record containing the users surname.",
},
&cli.StringFlag{
Name: "email-attribute",
Usage: "The attribute of the users LDAP record containing the users email address.",
},
&cli.StringFlag{
Name: "public-ssh-key-attribute",
Usage: "The attribute of the users LDAP record containing the users public ssh key.",
},
&cli.BoolFlag{
Name: "skip-local-2fa",
Usage: "Set to true to skip local 2fa for users authenticated by this source",
},
&cli.StringFlag{
Name: "avatar-attribute",
Usage: "The attribute of the users LDAP record containing the users avatar.",
},
}
ldapBindDnCLIFlags = append(commonLdapCLIFlags,
&cli.StringFlag{
Name: "bind-dn",
Usage: "The DN to bind to the LDAP server with when searching for the user.",
},
&cli.StringFlag{
Name: "bind-password",
Usage: "The password for the Bind DN, if any.",
},
&cli.BoolFlag{
Name: "attributes-in-bind",
Usage: "Fetch attributes in bind DN context.",
},
&cli.BoolFlag{
Name: "synchronize-users",
Usage: "Enable user synchronization.",
},
&cli.BoolFlag{
Name: "disable-synchronize-users",
Usage: "Disable user synchronization.",
},
&cli.UintFlag{
Name: "page-size",
Usage: "Search page size.",
})
ldapSimpleAuthCLIFlags = append(commonLdapCLIFlags,
&cli.StringFlag{
Name: "user-dn",
Usage: "The user's DN.",
})
microcmdAuthAddLdapBindDn = &cli.Command{
Name: "add-ldap",
Usage: "Add new LDAP (via Bind DN) authentication source",
Action: func(c *cli.Context) error {
return newAuthService().addLdapBindDn(c)
},
Flags: ldapBindDnCLIFlags,
}
microcmdAuthUpdateLdapBindDn = &cli.Command{
Name: "update-ldap",
Usage: "Update existing LDAP (via Bind DN) authentication source",
Action: func(c *cli.Context) error {
return newAuthService().updateLdapBindDn(c)
},
Flags: append([]cli.Flag{idFlag}, ldapBindDnCLIFlags...),
}
microcmdAuthAddLdapSimpleAuth = &cli.Command{
Name: "add-ldap-simple",
Usage: "Add new LDAP (simple auth) authentication source",
Action: func(c *cli.Context) error {
return newAuthService().addLdapSimpleAuth(c)
},
Flags: ldapSimpleAuthCLIFlags,
}
microcmdAuthUpdateLdapSimpleAuth = &cli.Command{
Name: "update-ldap-simple",
Usage: "Update existing LDAP (simple auth) authentication source",
Action: func(c *cli.Context) error {
return newAuthService().updateLdapSimpleAuth(c)
},
Flags: append([]cli.Flag{idFlag}, ldapSimpleAuthCLIFlags...),
}
)
// newAuthService creates a service with default functions.
func newAuthService() *authService {
return &authService{
initDB: initDB,
createAuthSource: auth.CreateSource,
updateAuthSource: auth.UpdateSource,
getAuthSourceByID: auth.GetSourceByID,
}
}
// parseAuthSource assigns values on authSource according to command line flags.
func parseAuthSource(c *cli.Context, authSource *auth.Source) {
if c.IsSet("name") {
authSource.Name = c.String("name")
}
if c.IsSet("not-active") {
authSource.IsActive = !c.Bool("not-active")
}
if c.IsSet("active") {
authSource.IsActive = c.Bool("active")
}
if c.IsSet("synchronize-users") {
authSource.IsSyncEnabled = c.Bool("synchronize-users")
}
if c.IsSet("disable-synchronize-users") {
authSource.IsSyncEnabled = !c.Bool("disable-synchronize-users")
}
}
// parseLdapConfig assigns values on config according to command line flags.
func parseLdapConfig(c *cli.Context, config *ldap.Source) error {
if c.IsSet("name") {
config.Name = c.String("name")
}
if c.IsSet("host") {
config.Host = c.String("host")
}
if c.IsSet("port") {
config.Port = c.Int("port")
}
if c.IsSet("security-protocol") {
p, ok := findLdapSecurityProtocolByName(c.String("security-protocol"))
if !ok {
return fmt.Errorf("Unknown security protocol name: %s", c.String("security-protocol"))
}
config.SecurityProtocol = p
}
if c.IsSet("skip-tls-verify") {
config.SkipVerify = c.Bool("skip-tls-verify")
}
if c.IsSet("bind-dn") {
config.BindDN = c.String("bind-dn")
}
if c.IsSet("user-dn") {
config.UserDN = c.String("user-dn")
}
if c.IsSet("bind-password") {
config.BindPassword = c.String("bind-password")
}
if c.IsSet("user-search-base") {
config.UserBase = c.String("user-search-base")
}
if c.IsSet("username-attribute") {
config.AttributeUsername = c.String("username-attribute")
}
if c.IsSet("firstname-attribute") {
config.AttributeName = c.String("firstname-attribute")
}
if c.IsSet("surname-attribute") {
config.AttributeSurname = c.String("surname-attribute")
}
if c.IsSet("email-attribute") {
config.AttributeMail = c.String("email-attribute")
}
if c.IsSet("attributes-in-bind") {
config.AttributesInBind = c.Bool("attributes-in-bind")
}
if c.IsSet("public-ssh-key-attribute") {
config.AttributeSSHPublicKey = c.String("public-ssh-key-attribute")
}
if c.IsSet("avatar-attribute") {
config.AttributeAvatar = c.String("avatar-attribute")
}
if c.IsSet("page-size") {
config.SearchPageSize = uint32(c.Uint("page-size"))
}
if c.IsSet("user-filter") {
config.Filter = c.String("user-filter")
}
if c.IsSet("admin-filter") {
config.AdminFilter = c.String("admin-filter")
}
if c.IsSet("restricted-filter") {
config.RestrictedFilter = c.String("restricted-filter")
}
if c.IsSet("allow-deactivate-all") {
config.AllowDeactivateAll = c.Bool("allow-deactivate-all")
}
if c.IsSet("skip-local-2fa") {
config.SkipLocalTwoFA = c.Bool("skip-local-2fa")
}
return nil
}
// findLdapSecurityProtocolByName finds security protocol by its name ignoring case.
// It returns the value of the security protocol and if it was found.
func findLdapSecurityProtocolByName(name string) (ldap.SecurityProtocol, bool) {
for i, n := range ldap.SecurityProtocolNames {
if strings.EqualFold(name, n) {
return i, true
}
}
return 0, false
}
// getAuthSource gets the login source by its id defined in the command line flags.
// It returns an error if the id is not set, does not match any source or if the source is not of expected type.
func (a *authService) getAuthSource(ctx context.Context, c *cli.Context, authType auth.Type) (*auth.Source, error) {
if err := argsSet(c, "id"); err != nil {
return nil, err
}
authSource, err := a.getAuthSourceByID(ctx, c.Int64("id"))
if err != nil {
return nil, err
}
if authSource.Type != authType {
return nil, fmt.Errorf("Invalid authentication type. expected: %s, actual: %s", authType.String(), authSource.Type.String())
}
return authSource, nil
}
// addLdapBindDn adds a new LDAP via Bind DN authentication source.
func (a *authService) addLdapBindDn(c *cli.Context) error {
if err := argsSet(c, "name", "security-protocol", "host", "port", "user-search-base", "user-filter", "email-attribute"); err != nil {
return err
}
ctx, cancel := installSignals()
defer cancel()
if err := a.initDB(ctx); err != nil {
return err
}
authSource := &auth.Source{
Type: auth.LDAP,
IsActive: true, // active by default
Cfg: &ldap.Source{
Enabled: true, // always true
},
}
parseAuthSource(c, authSource)
if err := parseLdapConfig(c, authSource.Cfg.(*ldap.Source)); err != nil {
return err
}
return a.createAuthSource(ctx, authSource)
}
// updateLdapBindDn updates a new LDAP via Bind DN authentication source.
func (a *authService) updateLdapBindDn(c *cli.Context) error {
ctx, cancel := installSignals()
defer cancel()
if err := a.initDB(ctx); err != nil {
return err
}
authSource, err := a.getAuthSource(ctx, c, auth.LDAP)
if err != nil {
return err
}
parseAuthSource(c, authSource)
if err := parseLdapConfig(c, authSource.Cfg.(*ldap.Source)); err != nil {
return err
}
return a.updateAuthSource(ctx, authSource)
}
// addLdapSimpleAuth adds a new LDAP (simple auth) authentication source.
func (a *authService) addLdapSimpleAuth(c *cli.Context) error {
if err := argsSet(c, "name", "security-protocol", "host", "port", "user-dn", "user-filter", "email-attribute"); err != nil {
return err
}
ctx, cancel := installSignals()
defer cancel()
if err := a.initDB(ctx); err != nil {
return err
}
authSource := &auth.Source{
Type: auth.DLDAP,
IsActive: true, // active by default
Cfg: &ldap.Source{
Enabled: true, // always true
},
}
parseAuthSource(c, authSource)
if err := parseLdapConfig(c, authSource.Cfg.(*ldap.Source)); err != nil {
return err
}
return a.createAuthSource(ctx, authSource)
}
// updateLdapSimpleAuth updates a new LDAP (simple auth) authentication source.
func (a *authService) updateLdapSimpleAuth(c *cli.Context) error {
ctx, cancel := installSignals()
defer cancel()
if err := a.initDB(ctx); err != nil {
return err
}
authSource, err := a.getAuthSource(ctx, c, auth.DLDAP)
if err != nil {
return err
}
parseAuthSource(c, authSource)
if err := parseLdapConfig(c, authSource.Cfg.(*ldap.Source)); err != nil {
return err
}
return a.updateAuthSource(ctx, authSource)
}

1326
cmd/admin_auth_ldap_test.go Normal file

File diff suppressed because it is too large Load diff

299
cmd/admin_auth_oauth.go Normal file
View file

@ -0,0 +1,299 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cmd
import (
"errors"
"fmt"
"net/url"
auth_model "code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/services/auth/source/oauth2"
"github.com/urfave/cli/v2"
)
var (
oauthCLIFlags = []cli.Flag{
&cli.StringFlag{
Name: "name",
Value: "",
Usage: "Application Name",
},
&cli.StringFlag{
Name: "provider",
Value: "",
Usage: "OAuth2 Provider",
},
&cli.StringFlag{
Name: "key",
Value: "",
Usage: "Client ID (Key)",
},
&cli.StringFlag{
Name: "secret",
Value: "",
Usage: "Client Secret",
},
&cli.StringFlag{
Name: "auto-discover-url",
Value: "",
Usage: "OpenID Connect Auto Discovery URL (only required when using OpenID Connect as provider)",
},
&cli.StringFlag{
Name: "use-custom-urls",
Value: "false",
Usage: "Use custom URLs for GitLab/GitHub OAuth endpoints",
},
&cli.StringFlag{
Name: "custom-tenant-id",
Value: "",
Usage: "Use custom Tenant ID for OAuth endpoints",
},
&cli.StringFlag{
Name: "custom-auth-url",
Value: "",
Usage: "Use a custom Authorization URL (option for GitLab/GitHub)",
},
&cli.StringFlag{
Name: "custom-token-url",
Value: "",
Usage: "Use a custom Token URL (option for GitLab/GitHub)",
},
&cli.StringFlag{
Name: "custom-profile-url",
Value: "",
Usage: "Use a custom Profile URL (option for GitLab/GitHub)",
},
&cli.StringFlag{
Name: "custom-email-url",
Value: "",
Usage: "Use a custom Email URL (option for GitHub)",
},
&cli.StringFlag{
Name: "icon-url",
Value: "",
Usage: "Custom icon URL for OAuth2 login source",
},
&cli.BoolFlag{
Name: "skip-local-2fa",
Usage: "Set to true to skip local 2fa for users authenticated by this source",
},
&cli.StringSliceFlag{
Name: "scopes",
Value: nil,
Usage: "Scopes to request when to authenticate against this OAuth2 source",
},
&cli.StringFlag{
Name: "required-claim-name",
Value: "",
Usage: "Claim name that has to be set to allow users to login with this source",
},
&cli.StringFlag{
Name: "required-claim-value",
Value: "",
Usage: "Claim value that has to be set to allow users to login with this source",
},
&cli.StringFlag{
Name: "group-claim-name",
Value: "",
Usage: "Claim name providing group names for this source",
},
&cli.StringFlag{
Name: "admin-group",
Value: "",
Usage: "Group Claim value for administrator users",
},
&cli.StringFlag{
Name: "restricted-group",
Value: "",
Usage: "Group Claim value for restricted users",
},
&cli.StringFlag{
Name: "group-team-map",
Value: "",
Usage: "JSON mapping between groups and org teams",
},
&cli.BoolFlag{
Name: "group-team-map-removal",
Usage: "Activate automatic team membership removal depending on groups",
},
}
microcmdAuthAddOauth = &cli.Command{
Name: "add-oauth",
Usage: "Add new Oauth authentication source",
Action: runAddOauth,
Flags: oauthCLIFlags,
}
microcmdAuthUpdateOauth = &cli.Command{
Name: "update-oauth",
Usage: "Update existing Oauth authentication source",
Action: runUpdateOauth,
Flags: append(oauthCLIFlags[:1], append([]cli.Flag{idFlag}, oauthCLIFlags[1:]...)...),
}
)
func parseOAuth2Config(c *cli.Context) *oauth2.Source {
var customURLMapping *oauth2.CustomURLMapping
if c.IsSet("use-custom-urls") {
customURLMapping = &oauth2.CustomURLMapping{
TokenURL: c.String("custom-token-url"),
AuthURL: c.String("custom-auth-url"),
ProfileURL: c.String("custom-profile-url"),
EmailURL: c.String("custom-email-url"),
Tenant: c.String("custom-tenant-id"),
}
} else {
customURLMapping = nil
}
return &oauth2.Source{
Provider: c.String("provider"),
ClientID: c.String("key"),
ClientSecret: c.String("secret"),
OpenIDConnectAutoDiscoveryURL: c.String("auto-discover-url"),
CustomURLMapping: customURLMapping,
IconURL: c.String("icon-url"),
SkipLocalTwoFA: c.Bool("skip-local-2fa"),
Scopes: c.StringSlice("scopes"),
RequiredClaimName: c.String("required-claim-name"),
RequiredClaimValue: c.String("required-claim-value"),
GroupClaimName: c.String("group-claim-name"),
AdminGroup: c.String("admin-group"),
RestrictedGroup: c.String("restricted-group"),
GroupTeamMap: c.String("group-team-map"),
GroupTeamMapRemoval: c.Bool("group-team-map-removal"),
}
}
func runAddOauth(c *cli.Context) error {
ctx, cancel := installSignals()
defer cancel()
if err := initDB(ctx); err != nil {
return err
}
config := parseOAuth2Config(c)
if config.Provider == "openidConnect" {
discoveryURL, err := url.Parse(config.OpenIDConnectAutoDiscoveryURL)
if err != nil || (discoveryURL.Scheme != "http" && discoveryURL.Scheme != "https") {
return fmt.Errorf("invalid Auto Discovery URL: %s (this must be a valid URL starting with http:// or https://)", config.OpenIDConnectAutoDiscoveryURL)
}
}
return auth_model.CreateSource(ctx, &auth_model.Source{
Type: auth_model.OAuth2,
Name: c.String("name"),
IsActive: true,
Cfg: config,
})
}
func runUpdateOauth(c *cli.Context) error {
if !c.IsSet("id") {
return errors.New("--id flag is missing")
}
ctx, cancel := installSignals()
defer cancel()
if err := initDB(ctx); err != nil {
return err
}
source, err := auth_model.GetSourceByID(ctx, c.Int64("id"))
if err != nil {
return err
}
oAuth2Config := source.Cfg.(*oauth2.Source)
if c.IsSet("name") {
source.Name = c.String("name")
}
if c.IsSet("provider") {
oAuth2Config.Provider = c.String("provider")
}
if c.IsSet("key") {
oAuth2Config.ClientID = c.String("key")
}
if c.IsSet("secret") {
oAuth2Config.ClientSecret = c.String("secret")
}
if c.IsSet("auto-discover-url") {
oAuth2Config.OpenIDConnectAutoDiscoveryURL = c.String("auto-discover-url")
}
if c.IsSet("icon-url") {
oAuth2Config.IconURL = c.String("icon-url")
}
if c.IsSet("scopes") {
oAuth2Config.Scopes = c.StringSlice("scopes")
}
if c.IsSet("required-claim-name") {
oAuth2Config.RequiredClaimName = c.String("required-claim-name")
}
if c.IsSet("required-claim-value") {
oAuth2Config.RequiredClaimValue = c.String("required-claim-value")
}
if c.IsSet("group-claim-name") {
oAuth2Config.GroupClaimName = c.String("group-claim-name")
}
if c.IsSet("admin-group") {
oAuth2Config.AdminGroup = c.String("admin-group")
}
if c.IsSet("restricted-group") {
oAuth2Config.RestrictedGroup = c.String("restricted-group")
}
if c.IsSet("group-team-map") {
oAuth2Config.GroupTeamMap = c.String("group-team-map")
}
if c.IsSet("group-team-map-removal") {
oAuth2Config.GroupTeamMapRemoval = c.Bool("group-team-map-removal")
}
// update custom URL mapping
customURLMapping := &oauth2.CustomURLMapping{}
if oAuth2Config.CustomURLMapping != nil {
customURLMapping.TokenURL = oAuth2Config.CustomURLMapping.TokenURL
customURLMapping.AuthURL = oAuth2Config.CustomURLMapping.AuthURL
customURLMapping.ProfileURL = oAuth2Config.CustomURLMapping.ProfileURL
customURLMapping.EmailURL = oAuth2Config.CustomURLMapping.EmailURL
customURLMapping.Tenant = oAuth2Config.CustomURLMapping.Tenant
}
if c.IsSet("use-custom-urls") && c.IsSet("custom-token-url") {
customURLMapping.TokenURL = c.String("custom-token-url")
}
if c.IsSet("use-custom-urls") && c.IsSet("custom-auth-url") {
customURLMapping.AuthURL = c.String("custom-auth-url")
}
if c.IsSet("use-custom-urls") && c.IsSet("custom-profile-url") {
customURLMapping.ProfileURL = c.String("custom-profile-url")
}
if c.IsSet("use-custom-urls") && c.IsSet("custom-email-url") {
customURLMapping.EmailURL = c.String("custom-email-url")
}
if c.IsSet("use-custom-urls") && c.IsSet("custom-tenant-id") {
customURLMapping.Tenant = c.String("custom-tenant-id")
}
oAuth2Config.CustomURLMapping = customURLMapping
source.Cfg = oAuth2Config
return auth_model.UpdateSource(ctx, source)
}

200
cmd/admin_auth_stmp.go Normal file
View file

@ -0,0 +1,200 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cmd
import (
"errors"
"strings"
auth_model "code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/modules/util"
"code.gitea.io/gitea/services/auth/source/smtp"
"github.com/urfave/cli/v2"
)
var (
smtpCLIFlags = []cli.Flag{
&cli.StringFlag{
Name: "name",
Value: "",
Usage: "Application Name",
},
&cli.StringFlag{
Name: "auth-type",
Value: "PLAIN",
Usage: "SMTP Authentication Type (PLAIN/LOGIN/CRAM-MD5) default PLAIN",
},
&cli.StringFlag{
Name: "host",
Value: "",
Usage: "SMTP Host",
},
&cli.IntFlag{
Name: "port",
Usage: "SMTP Port",
},
&cli.BoolFlag{
Name: "force-smtps",
Usage: "SMTPS is always used on port 465. Set this to force SMTPS on other ports.",
Value: true,
},
&cli.BoolFlag{
Name: "skip-verify",
Usage: "Skip TLS verify.",
Value: true,
},
&cli.StringFlag{
Name: "helo-hostname",
Value: "",
Usage: "Hostname sent with HELO. Leave blank to send current hostname",
},
&cli.BoolFlag{
Name: "disable-helo",
Usage: "Disable SMTP helo.",
Value: true,
},
&cli.StringFlag{
Name: "allowed-domains",
Value: "",
Usage: "Leave empty to allow all domains. Separate multiple domains with a comma (',')",
},
&cli.BoolFlag{
Name: "skip-local-2fa",
Usage: "Skip 2FA to log on.",
Value: true,
},
&cli.BoolFlag{
Name: "active",
Usage: "This Authentication Source is Activated.",
Value: true,
},
}
microcmdAuthAddSMTP = &cli.Command{
Name: "add-smtp",
Usage: "Add new SMTP authentication source",
Action: runAddSMTP,
Flags: smtpCLIFlags,
}
microcmdAuthUpdateSMTP = &cli.Command{
Name: "update-smtp",
Usage: "Update existing SMTP authentication source",
Action: runUpdateSMTP,
Flags: append(smtpCLIFlags[:1], append([]cli.Flag{idFlag}, smtpCLIFlags[1:]...)...),
}
)
func parseSMTPConfig(c *cli.Context, conf *smtp.Source) error {
if c.IsSet("auth-type") {
conf.Auth = c.String("auth-type")
validAuthTypes := []string{"PLAIN", "LOGIN", "CRAM-MD5"}
if !util.SliceContainsString(validAuthTypes, strings.ToUpper(c.String("auth-type"))) {
return errors.New("Auth must be one of PLAIN/LOGIN/CRAM-MD5")
}
conf.Auth = c.String("auth-type")
}
if c.IsSet("host") {
conf.Host = c.String("host")
}
if c.IsSet("port") {
conf.Port = c.Int("port")
}
if c.IsSet("allowed-domains") {
conf.AllowedDomains = c.String("allowed-domains")
}
if c.IsSet("force-smtps") {
conf.ForceSMTPS = c.Bool("force-smtps")
}
if c.IsSet("skip-verify") {
conf.SkipVerify = c.Bool("skip-verify")
}
if c.IsSet("helo-hostname") {
conf.HeloHostname = c.String("helo-hostname")
}
if c.IsSet("disable-helo") {
conf.DisableHelo = c.Bool("disable-helo")
}
if c.IsSet("skip-local-2fa") {
conf.SkipLocalTwoFA = c.Bool("skip-local-2fa")
}
return nil
}
func runAddSMTP(c *cli.Context) error {
ctx, cancel := installSignals()
defer cancel()
if err := initDB(ctx); err != nil {
return err
}
if !c.IsSet("name") || len(c.String("name")) == 0 {
return errors.New("name must be set")
}
if !c.IsSet("host") || len(c.String("host")) == 0 {
return errors.New("host must be set")
}
if !c.IsSet("port") {
return errors.New("port must be set")
}
active := true
if c.IsSet("active") {
active = c.Bool("active")
}
var smtpConfig smtp.Source
if err := parseSMTPConfig(c, &smtpConfig); err != nil {
return err
}
// If not set default to PLAIN
if len(smtpConfig.Auth) == 0 {
smtpConfig.Auth = "PLAIN"
}
return auth_model.CreateSource(ctx, &auth_model.Source{
Type: auth_model.SMTP,
Name: c.String("name"),
IsActive: active,
Cfg: &smtpConfig,
})
}
func runUpdateSMTP(c *cli.Context) error {
if !c.IsSet("id") {
return errors.New("--id flag is missing")
}
ctx, cancel := installSignals()
defer cancel()
if err := initDB(ctx); err != nil {
return err
}
source, err := auth_model.GetSourceByID(ctx, c.Int64("id"))
if err != nil {
return err
}
smtpConfig := source.Cfg.(*smtp.Source)
if err := parseSMTPConfig(c, smtpConfig); err != nil {
return err
}
if c.IsSet("name") {
source.Name = c.String("name")
}
if c.IsSet("active") {
source.IsActive = c.Bool("active")
}
source.Cfg = smtpConfig
return auth_model.UpdateSource(ctx, source)
}

46
cmd/admin_regenerate.go Normal file
View file

@ -0,0 +1,46 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cmd
import (
asymkey_model "code.gitea.io/gitea/models/asymkey"
"code.gitea.io/gitea/modules/graceful"
repo_service "code.gitea.io/gitea/services/repository"
"github.com/urfave/cli/v2"
)
var (
microcmdRegenHooks = &cli.Command{
Name: "hooks",
Usage: "Regenerate git-hooks",
Action: runRegenerateHooks,
}
microcmdRegenKeys = &cli.Command{
Name: "keys",
Usage: "Regenerate authorized_keys file",
Action: runRegenerateKeys,
}
)
func runRegenerateHooks(_ *cli.Context) error {
ctx, cancel := installSignals()
defer cancel()
if err := initDB(ctx); err != nil {
return err
}
return repo_service.SyncRepositoryHooks(graceful.GetManager().ShutdownContext())
}
func runRegenerateKeys(_ *cli.Context) error {
ctx, cancel := installSignals()
defer cancel()
if err := initDB(ctx); err != nil {
return err
}
return asymkey_model.RewriteAllPublicKeys(ctx)
}

21
cmd/admin_user.go Normal file
View file

@ -0,0 +1,21 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cmd
import (
"github.com/urfave/cli/v2"
)
var subcmdUser = &cli.Command{
Name: "user",
Usage: "Modify users",
Subcommands: []*cli.Command{
microcmdUserCreate,
microcmdUserList,
microcmdUserChangePassword,
microcmdUserDelete,
microcmdUserGenerateAccessToken,
microcmdUserMustChangePassword,
},
}

View file

@ -0,0 +1,80 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cmd
import (
"errors"
"fmt"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/auth/password"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/setting"
user_service "code.gitea.io/gitea/services/user"
"github.com/urfave/cli/v2"
)
var microcmdUserChangePassword = &cli.Command{
Name: "change-password",
Usage: "Change a user's password",
Action: runChangePassword,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "username",
Aliases: []string{"u"},
Value: "",
Usage: "The user to change password for",
},
&cli.StringFlag{
Name: "password",
Aliases: []string{"p"},
Value: "",
Usage: "New password to set for user",
},
&cli.BoolFlag{
Name: "must-change-password",
Usage: "User must change password",
Value: true,
},
},
}
func runChangePassword(c *cli.Context) error {
if err := argsSet(c, "username", "password"); err != nil {
return err
}
ctx, cancel := installSignals()
defer cancel()
if err := initDB(ctx); err != nil {
return err
}
user, err := user_model.GetUserByName(ctx, c.String("username"))
if err != nil {
return err
}
opts := &user_service.UpdateAuthOptions{
Password: optional.Some(c.String("password")),
MustChangePassword: optional.Some(c.Bool("must-change-password")),
}
if err := user_service.UpdateAuth(ctx, user, opts); err != nil {
switch {
case errors.Is(err, password.ErrMinLength):
return fmt.Errorf("password is not long enough, needs to be at least %d characters", setting.MinPasswordLength)
case errors.Is(err, password.ErrComplexity):
return errors.New("password does not meet complexity requirements")
case errors.Is(err, password.ErrIsPwned):
return errors.New("the password is in a list of stolen passwords previously exposed in public data breaches, please try again with a different password, to see more details: https://haveibeenpwned.com/Passwords")
default:
return err
}
}
fmt.Printf("%s's password has been successfully updated!\n", user.Name)
return nil
}

175
cmd/admin_user_create.go Normal file
View file

@ -0,0 +1,175 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cmd
import (
"errors"
"fmt"
auth_model "code.gitea.io/gitea/models/auth"
"code.gitea.io/gitea/models/db"
user_model "code.gitea.io/gitea/models/user"
pwd "code.gitea.io/gitea/modules/auth/password"
"code.gitea.io/gitea/modules/optional"
"code.gitea.io/gitea/modules/setting"
"github.com/urfave/cli/v2"
)
var microcmdUserCreate = &cli.Command{
Name: "create",
Usage: "Create a new user in database",
Action: runCreateUser,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "name",
Usage: "Username. DEPRECATED: use username instead",
},
&cli.StringFlag{
Name: "username",
Usage: "Username",
},
&cli.StringFlag{
Name: "password",
Usage: "User password",
},
&cli.StringFlag{
Name: "email",
Usage: "User email address",
},
&cli.BoolFlag{
Name: "admin",
Usage: "User is an admin",
},
&cli.BoolFlag{
Name: "random-password",
Usage: "Generate a random password for the user",
},
&cli.BoolFlag{
Name: "must-change-password",
Usage: "Set this option to false to prevent forcing the user to change their password after initial login",
Value: true,
DisableDefaultText: true,
},
&cli.IntFlag{
Name: "random-password-length",
Usage: "Length of the random password to be generated",
Value: 12,
},
&cli.BoolFlag{
Name: "access-token",
Usage: "Generate access token for the user",
},
&cli.BoolFlag{
Name: "restricted",
Usage: "Make a restricted user account",
},
},
}
func runCreateUser(c *cli.Context) error {
if err := argsSet(c, "email"); err != nil {
return err
}
if c.IsSet("name") && c.IsSet("username") {
return errors.New("cannot set both --name and --username flags")
}
if !c.IsSet("name") && !c.IsSet("username") {
return errors.New("one of --name or --username flags must be set")
}
if c.IsSet("password") && c.IsSet("random-password") {
return errors.New("cannot set both -random-password and -password flags")
}
var username string
if c.IsSet("username") {
username = c.String("username")
} else {
username = c.String("name")
_, _ = fmt.Fprintf(c.App.ErrWriter, "--name flag is deprecated. Use --username instead.\n")
}
ctx, cancel := installSignals()
defer cancel()
if err := initDB(ctx); err != nil {
return err
}
var password string
if c.IsSet("password") {
password = c.String("password")
} else if c.IsSet("random-password") {
var err error
password, err = pwd.Generate(c.Int("random-password-length"))
if err != nil {
return err
}
fmt.Printf("generated random password is '%s'\n", password)
} else {
return errors.New("must set either password or random-password flag")
}
isAdmin := c.Bool("admin")
mustChangePassword := true // always default to true
if c.IsSet("must-change-password") {
// if the flag is set, use the value provided by the user
mustChangePassword = c.Bool("must-change-password")
} else {
// check whether there are users in the database
hasUserRecord, err := db.IsTableNotEmpty(&user_model.User{})
if err != nil {
return fmt.Errorf("IsTableNotEmpty: %w", err)
}
if !hasUserRecord {
// if this is the first admin being created, don't force to change password (keep the old behavior)
mustChangePassword = false
}
}
restricted := optional.None[bool]()
if c.IsSet("restricted") {
restricted = optional.Some(c.Bool("restricted"))
}
// default user visibility in app.ini
visibility := setting.Service.DefaultUserVisibilityMode
u := &user_model.User{
Name: username,
Email: c.String("email"),
Passwd: password,
IsAdmin: isAdmin,
MustChangePassword: mustChangePassword,
Visibility: visibility,
}
overwriteDefault := &user_model.CreateUserOverwriteOptions{
IsActive: optional.Some(true),
IsRestricted: restricted,
}
if err := user_model.CreateUser(ctx, u, overwriteDefault); err != nil {
return fmt.Errorf("CreateUser: %w", err)
}
if c.Bool("access-token") {
t := &auth_model.AccessToken{
Name: "gitea-admin",
UID: u.ID,
}
if err := auth_model.NewAccessToken(ctx, t); err != nil {
return err
}
fmt.Printf("Access token was successfully created... %s\n", t.Token)
}
fmt.Printf("New user '%s' has been successfully created!\n", username)
return nil
}

81
cmd/admin_user_delete.go Normal file
View file

@ -0,0 +1,81 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cmd
import (
"errors"
"fmt"
"strings"
user_model "code.gitea.io/gitea/models/user"
"code.gitea.io/gitea/modules/storage"
user_service "code.gitea.io/gitea/services/user"
"github.com/urfave/cli/v2"
)
var microcmdUserDelete = &cli.Command{
Name: "delete",
Usage: "Delete specific user by id, name or email",
Flags: []cli.Flag{
&cli.Int64Flag{
Name: "id",
Usage: "ID of user of the user to delete",
},
&cli.StringFlag{
Name: "username",
Aliases: []string{"u"},
Usage: "Username of the user to delete",
},
&cli.StringFlag{
Name: "email",
Aliases: []string{"e"},
Usage: "Email of the user to delete",
},
&cli.BoolFlag{
Name: "purge",
Usage: "Purge user, all their repositories, organizations and comments",
},
},
Action: runDeleteUser,
}
func runDeleteUser(c *cli.Context) error {
if !c.IsSet("id") && !c.IsSet("username") && !c.IsSet("email") {
return errors.New("You must provide the id, username or email of a user to delete")
}
ctx, cancel := installSignals()
defer cancel()
if err := initDB(ctx); err != nil {
return err
}
if err := storage.Init(); err != nil {
return err
}
var err error
var user *user_model.User
if c.IsSet("email") {
user, err = user_model.GetUserByEmail(ctx, c.String("email"))
} else if c.IsSet("username") {
user, err = user_model.GetUserByName(ctx, c.String("username"))
} else {
user, err = user_model.GetUserByID(ctx, c.Int64("id"))
}
if err != nil {
return err
}
if c.IsSet("username") && user.LowerName != strings.ToLower(strings.TrimSpace(c.String("username"))) {
return fmt.Errorf("The user %s who has email %s does not match the provided username %s", user.Name, c.String("email"), c.String("username"))
}
if c.IsSet("id") && user.ID != c.Int64("id") {
return fmt.Errorf("The user %s does not match the provided id %d", user.Name, c.Int64("id"))
}
return user_service.DeleteUser(ctx, user, c.Bool("purge"))
}

View file

@ -0,0 +1,94 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cmd
import (
"errors"
"fmt"
auth_model "code.gitea.io/gitea/models/auth"
user_model "code.gitea.io/gitea/models/user"
"github.com/urfave/cli/v2"
)
var microcmdUserGenerateAccessToken = &cli.Command{
Name: "generate-access-token",
Usage: "Generate an access token for a specific user",
Flags: []cli.Flag{
&cli.StringFlag{
Name: "username",
Aliases: []string{"u"},
Usage: "Username",
},
&cli.StringFlag{
Name: "token-name",
Aliases: []string{"t"},
Usage: "Token name",
Value: "gitea-admin",
},
&cli.BoolFlag{
Name: "raw",
Usage: "Display only the token value",
},
&cli.StringFlag{
Name: "scopes",
Value: "",
Usage: "Comma separated list of scopes to apply to access token",
},
},
Action: runGenerateAccessToken,
}
func runGenerateAccessToken(c *cli.Context) error {
if !c.IsSet("username") {
return errors.New("You must provide a username to generate a token for")
}
ctx, cancel := installSignals()
defer cancel()
if err := initDB(ctx); err != nil {
return err
}
user, err := user_model.GetUserByName(ctx, c.String("username"))
if err != nil {
return err
}
// construct token with name and user so we can make sure it is unique
t := &auth_model.AccessToken{
Name: c.String("token-name"),
UID: user.ID,
}
exist, err := auth_model.AccessTokenByNameExists(ctx, t)
if err != nil {
return err
}
if exist {
return errors.New("access token name has been used already")
}
// make sure the scopes are valid
accessTokenScope, err := auth_model.AccessTokenScope(c.String("scopes")).Normalize()
if err != nil {
return fmt.Errorf("invalid access token scope provided: %w", err)
}
t.Scope = accessTokenScope
// create the token
if err := auth_model.NewAccessToken(ctx, t); err != nil {
return err
}
if c.Bool("raw") {
fmt.Printf("%s\n", t.Token)
} else {
fmt.Printf("Access token was successfully created: %s\n", t.Token)
}
return nil
}

60
cmd/admin_user_list.go Normal file
View file

@ -0,0 +1,60 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cmd
import (
"fmt"
"os"
"text/tabwriter"
user_model "code.gitea.io/gitea/models/user"
"github.com/urfave/cli/v2"
)
var microcmdUserList = &cli.Command{
Name: "list",
Usage: "List users",
Action: runListUsers,
Flags: []cli.Flag{
&cli.BoolFlag{
Name: "admin",
Usage: "List only admin users",
},
},
}
func runListUsers(c *cli.Context) error {
ctx, cancel := installSignals()
defer cancel()
if err := initDB(ctx); err != nil {
return err
}
users, err := user_model.GetAllUsers(ctx)
if err != nil {
return err
}
w := tabwriter.NewWriter(os.Stdout, 5, 0, 1, ' ', 0)
if c.IsSet("admin") {
fmt.Fprintf(w, "ID\tUsername\tEmail\tIsActive\n")
for _, u := range users {
if u.IsAdmin {
fmt.Fprintf(w, "%d\t%s\t%s\t%t\n", u.ID, u.Name, u.Email, u.IsActive)
}
}
} else {
twofa := user_model.UserList(users).GetTwoFaStatus(ctx)
fmt.Fprintf(w, "ID\tUsername\tEmail\tIsActive\tIsAdmin\t2FA\n")
for _, u := range users {
fmt.Fprintf(w, "%d\t%s\t%s\t%t\t%t\t%t\n", u.ID, u.Name, u.Email, u.IsActive, u.IsAdmin, twofa[u.ID])
}
}
w.Flush()
return nil
}

View file

@ -0,0 +1,60 @@
// Copyright 2023 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cmd
import (
"errors"
"fmt"
user_model "code.gitea.io/gitea/models/user"
"github.com/urfave/cli/v2"
)
var microcmdUserMustChangePassword = &cli.Command{
Name: "must-change-password",
Usage: "Set the must change password flag for the provided users or all users",
Action: runMustChangePassword,
Flags: []cli.Flag{
&cli.BoolFlag{
Name: "all",
Aliases: []string{"A"},
Usage: "All users must change password, except those explicitly excluded with --exclude",
},
&cli.StringSliceFlag{
Name: "exclude",
Aliases: []string{"e"},
Usage: "Do not change the must-change-password flag for these users",
},
&cli.BoolFlag{
Name: "unset",
Usage: "Instead of setting the must-change-password flag, unset it",
},
},
}
func runMustChangePassword(c *cli.Context) error {
ctx, cancel := installSignals()
defer cancel()
if c.NArg() == 0 && !c.IsSet("all") {
return errors.New("either usernames or --all must be provided")
}
mustChangePassword := !c.Bool("unset")
all := c.Bool("all")
exclude := c.StringSlice("exclude")
if err := initDB(ctx); err != nil {
return err
}
n, err := user_model.SetMustChangePassword(ctx, all, mustChangePassword, c.Args().Slice(), exclude)
if err != nil {
return err
}
fmt.Printf("Updated %d users setting MustChangePassword to %t\n", n, mustChangePassword)
return nil
}

196
cmd/cert.go Normal file
View file

@ -0,0 +1,196 @@
// Copyright 2009 The Go Authors. All rights reserved.
// Copyright 2014 The Gogs Authors. All rights reserved.
// Copyright 2016 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cmd
import (
"crypto/ecdsa"
"crypto/elliptic"
"crypto/rand"
"crypto/rsa"
"crypto/x509"
"crypto/x509/pkix"
"encoding/pem"
"log"
"math/big"
"net"
"os"
"strings"
"time"
"github.com/urfave/cli/v2"
)
// CmdCert represents the available cert sub-command.
var CmdCert = &cli.Command{
Name: "cert",
Usage: "Generate self-signed certificate",
Description: `Generate a self-signed X.509 certificate for a TLS server.
Outputs to 'cert.pem' and 'key.pem' and will overwrite existing files.`,
Action: runCert,
Flags: []cli.Flag{
&cli.StringFlag{
Name: "host",
Value: "",
Usage: "Comma-separated hostnames and IPs to generate a certificate for",
},
&cli.StringFlag{
Name: "ecdsa-curve",
Value: "",
Usage: "ECDSA curve to use to generate a key. Valid values are P224, P256, P384, P521",
},
&cli.IntFlag{
Name: "rsa-bits",
Value: 3072,
Usage: "Size of RSA key to generate. Ignored if --ecdsa-curve is set",
},
&cli.StringFlag{
Name: "start-date",
Value: "",
Usage: "Creation date formatted as Jan 1 15:04:05 2011",
},
&cli.DurationFlag{
Name: "duration",
Value: 365 * 24 * time.Hour,
Usage: "Duration that certificate is valid for",
},
&cli.BoolFlag{
Name: "ca",
Usage: "whether this cert should be its own Certificate Authority",
},
},
}
func publicKey(priv any) any {
switch k := priv.(type) {
case *rsa.PrivateKey:
return &k.PublicKey
case *ecdsa.PrivateKey:
return &k.PublicKey
default:
return nil
}
}
func pemBlockForKey(priv any) *pem.Block {
switch k := priv.(type) {
case *rsa.PrivateKey:
return &pem.Block{Type: "RSA PRIVATE KEY", Bytes: x509.MarshalPKCS1PrivateKey(k)}
case *ecdsa.PrivateKey:
b, err := x509.MarshalECPrivateKey(k)
if err != nil {
log.Fatalf("Unable to marshal ECDSA private key: %v", err)
}
return &pem.Block{Type: "EC PRIVATE KEY", Bytes: b}
default:
return nil
}
}
func runCert(c *cli.Context) error {
if err := argsSet(c, "host"); err != nil {
return err
}
var priv any
var err error
switch c.String("ecdsa-curve") {
case "":
priv, err = rsa.GenerateKey(rand.Reader, c.Int("rsa-bits"))
case "P224":
priv, err = ecdsa.GenerateKey(elliptic.P224(), rand.Reader)
case "P256":
priv, err = ecdsa.GenerateKey(elliptic.P256(), rand.Reader)
case "P384":
priv, err = ecdsa.GenerateKey(elliptic.P384(), rand.Reader)
case "P521":
priv, err = ecdsa.GenerateKey(elliptic.P521(), rand.Reader)
default:
log.Fatalf("Unrecognized elliptic curve: %q", c.String("ecdsa-curve"))
}
if err != nil {
log.Fatalf("Failed to generate private key: %v", err)
}
var notBefore time.Time
if startDate := c.String("start-date"); startDate != "" {
notBefore, err = time.Parse("Jan 2 15:04:05 2006", startDate)
if err != nil {
log.Fatalf("Failed to parse creation date: %v", err)
}
} else {
notBefore = time.Now()
}
notAfter := notBefore.Add(c.Duration("duration"))
serialNumberLimit := new(big.Int).Lsh(big.NewInt(1), 128)
serialNumber, err := rand.Int(rand.Reader, serialNumberLimit)
if err != nil {
log.Fatalf("Failed to generate serial number: %v", err)
}
template := x509.Certificate{
SerialNumber: serialNumber,
Subject: pkix.Name{
Organization: []string{"Acme Co"},
CommonName: "Forgejo",
},
NotBefore: notBefore,
NotAfter: notAfter,
KeyUsage: x509.KeyUsageKeyEncipherment | x509.KeyUsageDigitalSignature,
ExtKeyUsage: []x509.ExtKeyUsage{x509.ExtKeyUsageServerAuth},
BasicConstraintsValid: true,
}
hosts := strings.Split(c.String("host"), ",")
for _, h := range hosts {
if ip := net.ParseIP(h); ip != nil {
template.IPAddresses = append(template.IPAddresses, ip)
} else {
template.DNSNames = append(template.DNSNames, h)
}
}
if c.Bool("ca") {
template.IsCA = true
template.KeyUsage |= x509.KeyUsageCertSign
}
derBytes, err := x509.CreateCertificate(rand.Reader, &template, &template, publicKey(priv), priv)
if err != nil {
log.Fatalf("Failed to create certificate: %v", err)
}
certOut, err := os.Create("cert.pem")
if err != nil {
log.Fatalf("Failed to open cert.pem for writing: %v", err)
}
err = pem.Encode(certOut, &pem.Block{Type: "CERTIFICATE", Bytes: derBytes})
if err != nil {
log.Fatalf("Failed to encode certificate: %v", err)
}
err = certOut.Close()
if err != nil {
log.Fatalf("Failed to write cert: %v", err)
}
log.Println("Written cert.pem")
keyOut, err := os.OpenFile("key.pem", os.O_WRONLY|os.O_CREATE|os.O_TRUNC, 0o600)
if err != nil {
log.Fatalf("Failed to open key.pem for writing: %v", err)
}
err = pem.Encode(keyOut, pemBlockForKey(priv))
if err != nil {
log.Fatalf("Failed to encode key: %v", err)
}
err = keyOut.Close()
if err != nil {
log.Fatalf("Failed to write key: %v", err)
}
log.Println("Written key.pem")
return nil
}

135
cmd/cmd.go Normal file
View file

@ -0,0 +1,135 @@
// Copyright 2018 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
// Package cmd provides subcommands to the gitea binary - such as "web" or
// "admin".
package cmd
import (
"context"
"errors"
"fmt"
"io"
"os"
"os/signal"
"strings"
"syscall"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/modules/util"
"github.com/urfave/cli/v2"
)
// argsSet checks that all the required arguments are set. args is a list of
// arguments that must be set in the passed Context.
func argsSet(c *cli.Context, args ...string) error {
for _, a := range args {
if !c.IsSet(a) {
return errors.New(a + " is not set")
}
if util.IsEmptyString(c.String(a)) {
return errors.New(a + " is required")
}
}
return nil
}
// confirm waits for user input which confirms an action
func confirm() (bool, error) {
var response string
_, err := fmt.Scanln(&response)
if err != nil {
return false, err
}
switch strings.ToLower(response) {
case "y", "yes":
return true, nil
case "n", "no":
return false, nil
default:
return false, errors.New(response + " isn't a correct confirmation string")
}
}
func initDB(ctx context.Context) error {
setting.MustInstalled()
setting.LoadDBSetting()
setting.InitSQLLoggersForCli(log.INFO)
if setting.Database.Type == "" {
log.Fatal(`Database settings are missing from the configuration file: %q.
Ensure you are running in the correct environment or set the correct configuration file with -c.
If this is the intended configuration file complete the [database] section.`, setting.CustomConf)
}
if err := db.InitEngine(ctx); err != nil {
return fmt.Errorf("unable to initialize the database using the configuration in %q. Error: %w", setting.CustomConf, err)
}
return nil
}
func installSignals() (context.Context, context.CancelFunc) {
ctx, cancel := context.WithCancel(context.Background())
go func() {
// install notify
signalChannel := make(chan os.Signal, 1)
signal.Notify(
signalChannel,
syscall.SIGINT,
syscall.SIGTERM,
)
select {
case <-signalChannel:
case <-ctx.Done():
}
cancel()
signal.Reset()
}()
return ctx, cancel
}
func setupConsoleLogger(level log.Level, colorize bool, out io.Writer) {
if out != os.Stdout && out != os.Stderr {
panic("setupConsoleLogger can only be used with os.Stdout or os.Stderr")
}
writeMode := log.WriterMode{
Level: level,
Colorize: colorize,
WriterOption: log.WriterConsoleOption{Stderr: out == os.Stderr},
}
writer := log.NewEventWriterConsole("console-default", writeMode)
log.GetManager().GetLogger(log.DEFAULT).ReplaceAllWriters(writer)
}
func globalBool(c *cli.Context, name string) bool {
for _, ctx := range c.Lineage() {
if ctx.Bool(name) {
return true
}
}
return false
}
// PrepareConsoleLoggerLevel by default, use INFO level for console logger, but some sub-commands (for git/ssh protocol) shouldn't output any log to stdout.
// Any log appears in git stdout pipe will break the git protocol, eg: client can't push and hangs forever.
func PrepareConsoleLoggerLevel(defaultLevel log.Level) func(*cli.Context) error {
return func(c *cli.Context) error {
level := defaultLevel
if globalBool(c, "quiet") {
level = log.FATAL
}
if globalBool(c, "debug") || globalBool(c, "verbose") {
level = log.TRACE
}
log.SetConsoleLogger(log.DEFAULT, "console-default", level)
return nil
}
}

65
cmd/docs.go Normal file
View file

@ -0,0 +1,65 @@
// Copyright 2020 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cmd
import (
"fmt"
"os"
"strings"
"github.com/urfave/cli/v2"
)
// CmdDocs represents the available docs sub-command.
var CmdDocs = &cli.Command{
Name: "docs",
Usage: "Output CLI documentation",
Description: "A command to output Forgejo's CLI documentation, optionally to a file.",
Action: runDocs,
Flags: []cli.Flag{
&cli.BoolFlag{
Name: "man",
Usage: "Output man pages instead",
},
&cli.StringFlag{
Name: "output",
Aliases: []string{"o"},
Usage: "Path to output to instead of stdout (will overwrite if exists)",
},
},
}
func runDocs(ctx *cli.Context) error {
docs, err := ctx.App.ToMarkdown()
if ctx.Bool("man") {
docs, err = ctx.App.ToMan()
}
if err != nil {
return err
}
if !ctx.Bool("man") {
// Clean up markdown. The following bug was fixed in v2, but is present in v1.
// It affects markdown output (even though the issue is referring to man pages)
// https://github.com/urfave/cli/issues/1040
firstHashtagIndex := strings.Index(docs, "#")
if firstHashtagIndex > 0 {
docs = docs[firstHashtagIndex:]
}
}
out := os.Stdout
if ctx.String("output") != "" {
fi, err := os.Create(ctx.String("output"))
if err != nil {
return err
}
defer fi.Close()
out = fi
}
_, err = fmt.Fprintln(out, docs)
return err
}

219
cmd/doctor.go Normal file
View file

@ -0,0 +1,219 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// SPDX-License-Identifier: MIT
package cmd
import (
"fmt"
golog "log"
"os"
"path/filepath"
"strings"
"text/tabwriter"
"code.gitea.io/gitea/models/db"
"code.gitea.io/gitea/models/migrations"
migrate_base "code.gitea.io/gitea/models/migrations/base"
"code.gitea.io/gitea/modules/container"
"code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting"
"code.gitea.io/gitea/services/doctor"
"github.com/urfave/cli/v2"
"xorm.io/xorm"
)
// CmdDoctor represents the available doctor sub-command.
var CmdDoctor = &cli.Command{
Name: "doctor",
Usage: "Diagnose and optionally fix problems, convert or re-create database tables",
Description: "A command to diagnose problems with the current Forgejo instance according to the given configuration. Some problems can optionally be fixed by modifying the database or data storage.",
Subcommands: []*cli.Command{
cmdDoctorCheck,
cmdRecreateTable,
cmdDoctorConvert,
},
}
var cmdDoctorCheck = &cli.Command{
Name: "check",
Usage: "Diagnose and optionally fix problems",
Description: "A command to diagnose problems with the current Forgejo instance according to the given configuration. Some problems can optionally be fixed by modifying the database or data storage.",
Action: runDoctorCheck,
Flags: []cli.Flag{
&cli.BoolFlag{
Name: "list",
Usage: "List the available checks",
},
&cli.BoolFlag{
Name: "default",
Usage: "Run the default checks (if neither --run or --all is set, this is the default behaviour)",
},
&cli.StringSliceFlag{
Name: "run",
Usage: "Run the provided checks - (if --default is set, the default checks will also run)",
},
&cli.BoolFlag{
Name: "all",
Usage: "Run all the available checks",
},
&cli.BoolFlag{
Name: "fix",
Usage: "Automatically fix what we can",
},
&cli.StringFlag{
Name: "log-file",
Usage: `Name of the log file (no verbose log output by default). Set to "-" to output to stdout`,
},
&cli.BoolFlag{
Name: "color",
Aliases: []string{"H"},
Usage: "Use color for outputted information",
},
},
}
var cmdRecreateTable = &cli.Command{
Name: "recreate-table",
Usage: "Recreate tables from XORM definitions and copy the data.",
ArgsUsage: "[TABLE]... : (TABLEs to recreate - leave blank for all)",
Flags: []cli.Flag{
&cli.BoolFlag{
Name: "debug",
Usage: "Print SQL commands sent",
},
},
Description: `The database definitions Forgejo uses change across versions, sometimes changing default values and leaving old unused columns.
This command will cause Xorm to recreate tables, copying over the data and deleting the old table.
You should back-up your database before doing this and ensure that your database is up-to-date first.`,
Action: runRecreateTable,
}
func runRecreateTable(ctx *cli.Context) error {
stdCtx, cancel := installSignals()
defer cancel()
// Redirect the default golog to here
golog.SetFlags(0)
golog.SetPrefix("")
golog.SetOutput(log.LoggerToWriter(log.GetLogger(log.DEFAULT).Info))
debug := ctx.Bool("debug")
setting.MustInstalled()
setting.LoadDBSetting()
if debug {
setting.InitSQLLoggersForCli(log.DEBUG)
} else {
setting.InitSQLLoggersForCli(log.INFO)
}
setting.Database.LogSQL = debug
if err := db.InitEngine(stdCtx); err != nil {
fmt.Println(err)
fmt.Println("Check if you are using the right config file. You can use a --config directive to specify one.")
return nil
}
args := ctx.Args()
names := make([]string, 0, ctx.NArg())
for i := 0; i < ctx.NArg(); i++ {
names = append(names, args.Get(i))
}
beans, err := db.NamesToBean(names...)
if err != nil {
return err
}
recreateTables := migrate_base.RecreateTables(beans...)
return db.InitEngineWithMigration(stdCtx, func(x *xorm.Engine) error {
if err := migrations.EnsureUpToDate(x); err != nil {
return err
}
return recreateTables(x)
})
}
func setupDoctorDefaultLogger(ctx *cli.Context, colorize bool) {
// Silence the default loggers
setupConsoleLogger(log.FATAL, log.CanColorStderr, os.Stderr)
logFile := ctx.String("log-file")
if logFile == "" {
return // if no doctor log-file is set, do not show any log from default logger
} else if logFile == "-" {
setupConsoleLogger(log.TRACE, colorize, os.Stdout)
} else {
logFile, _ = filepath.Abs(logFile)
writeMode := log.WriterMode{Level: log.TRACE, WriterOption: log.WriterFileOption{FileName: logFile}}
writer, err := log.NewEventWriter("console-to-file", "file", writeMode)
if err != nil {
log.FallbackErrorf("unable to create file log writer: %v", err)
return
}
log.GetManager().GetLogger(log.DEFAULT).ReplaceAllWriters(writer)
}
}
func runDoctorCheck(ctx *cli.Context) error {
stdCtx, cancel := installSignals()
defer cancel()
colorize := log.CanColorStdout
if ctx.IsSet("color") {
colorize = ctx.Bool("color")
}
setupDoctorDefaultLogger(ctx, colorize)
// Finally redirect the default golang's log to here
golog.SetFlags(0)
golog.SetPrefix("")
golog.SetOutput(log.LoggerToWriter(log.GetLogger(log.DEFAULT).Info))
if ctx.IsSet("list") {
w := tabwriter.NewWriter(os.Stdout, 0, 8, 1, '\t', 0)
_, _ = w.Write([]byte("Default\tName\tTitle\n"))
doctor.SortChecks(doctor.Checks)
for _, check := range doctor.Checks {
if check.IsDefault {
_, _ = w.Write([]byte{'*'})
}
_, _ = w.Write([]byte{'\t'})
_, _ = w.Write([]byte(check.Name))
_, _ = w.Write([]byte{'\t'})
_, _ = w.Write([]byte(check.Title))
_, _ = w.Write([]byte{'\n'})
}
return w.Flush()
}
var checks []*doctor.Check
if ctx.Bool("all") {
checks = make([]*doctor.Check, len(doctor.Checks))
copy(checks, doctor.Checks)
} else if ctx.IsSet("run") {
addDefault := ctx.Bool("default")
runNamesSet := container.SetOf(ctx.StringSlice("run")...)
for _, check := range doctor.Checks {
if (addDefault && check.IsDefault) || runNamesSet.Contains(check.Name) {
checks = append(checks, check)
runNamesSet.Remove(check.Name)
}
}
if len(runNamesSet) > 0 {
return fmt.Errorf("unknown checks: %q", strings.Join(runNamesSet.Values(), ","))
}
} else {
for _, check := range doctor.Checks {
if check.IsDefault {
checks = append(checks, check)
}
}
}
return doctor.RunChecks(stdCtx, colorize, ctx.Bool("fix"), checks)
}

Some files were not shown because too many files have changed in this diff Show more