mirror of
https://github.com/go-gitea/gitea.git
synced 2025-07-27 07:44:43 +02:00
Merge branch 'main' into lunny/refactor_getpatch
This commit is contained in:
commit
217bd847a7
@ -22,20 +22,25 @@ groups:
|
|||||||
name: FEATURES
|
name: FEATURES
|
||||||
labels:
|
labels:
|
||||||
- type/feature
|
- type/feature
|
||||||
-
|
|
||||||
name: API
|
|
||||||
labels:
|
|
||||||
- modifies/api
|
|
||||||
-
|
-
|
||||||
name: ENHANCEMENTS
|
name: ENHANCEMENTS
|
||||||
labels:
|
labels:
|
||||||
- type/enhancement
|
- type/enhancement
|
||||||
- type/refactoring
|
-
|
||||||
- topic/ui
|
name: PERFORMANCE
|
||||||
|
labels:
|
||||||
|
- performance/memory
|
||||||
|
- performance/speed
|
||||||
|
- performance/bigrepo
|
||||||
|
- performance/cpu
|
||||||
-
|
-
|
||||||
name: BUGFIXES
|
name: BUGFIXES
|
||||||
labels:
|
labels:
|
||||||
- type/bug
|
- type/bug
|
||||||
|
-
|
||||||
|
name: API
|
||||||
|
labels:
|
||||||
|
- modifies/api
|
||||||
-
|
-
|
||||||
name: TESTING
|
name: TESTING
|
||||||
labels:
|
labels:
|
||||||
|
117
.eslintrc.cjs
117
.eslintrc.cjs
@ -1,3 +1,4 @@
|
|||||||
|
const vitestPlugin = require('@vitest/eslint-plugin');
|
||||||
const restrictedSyntax = ['WithStatement', 'ForInStatement', 'LabeledStatement', 'SequenceExpression'];
|
const restrictedSyntax = ['WithStatement', 'ForInStatement', 'LabeledStatement', 'SequenceExpression'];
|
||||||
|
|
||||||
module.exports = {
|
module.exports = {
|
||||||
@ -37,8 +38,6 @@ module.exports = {
|
|||||||
'eslint-plugin-regexp',
|
'eslint-plugin-regexp',
|
||||||
'eslint-plugin-sonarjs',
|
'eslint-plugin-sonarjs',
|
||||||
'eslint-plugin-unicorn',
|
'eslint-plugin-unicorn',
|
||||||
'eslint-plugin-vitest',
|
|
||||||
'eslint-plugin-vitest-globals',
|
|
||||||
'eslint-plugin-wc',
|
'eslint-plugin-wc',
|
||||||
],
|
],
|
||||||
env: {
|
env: {
|
||||||
@ -46,6 +45,13 @@ module.exports = {
|
|||||||
node: true,
|
node: true,
|
||||||
},
|
},
|
||||||
overrides: [
|
overrides: [
|
||||||
|
{
|
||||||
|
files: ['**/*.cjs'],
|
||||||
|
rules: {
|
||||||
|
'import-x/no-commonjs': [0],
|
||||||
|
'@typescript-eslint/no-require-imports': [0],
|
||||||
|
},
|
||||||
|
},
|
||||||
{
|
{
|
||||||
files: ['web_src/**/*'],
|
files: ['web_src/**/*'],
|
||||||
globals: {
|
globals: {
|
||||||
@ -82,59 +88,58 @@ module.exports = {
|
|||||||
},
|
},
|
||||||
{
|
{
|
||||||
files: ['**/*.test.*', 'web_src/js/test/setup.ts'],
|
files: ['**/*.test.*', 'web_src/js/test/setup.ts'],
|
||||||
env: {
|
plugins: ['@vitest/eslint-plugin'],
|
||||||
'vitest-globals/env': true,
|
globals: vitestPlugin.environments.env.globals,
|
||||||
},
|
|
||||||
rules: {
|
rules: {
|
||||||
'vitest/consistent-test-filename': [0],
|
'@vitest/consistent-test-filename': [0],
|
||||||
'vitest/consistent-test-it': [0],
|
'@vitest/consistent-test-it': [0],
|
||||||
'vitest/expect-expect': [0],
|
'@vitest/expect-expect': [0],
|
||||||
'vitest/max-expects': [0],
|
'@vitest/max-expects': [0],
|
||||||
'vitest/max-nested-describe': [0],
|
'@vitest/max-nested-describe': [0],
|
||||||
'vitest/no-alias-methods': [0],
|
'@vitest/no-alias-methods': [0],
|
||||||
'vitest/no-commented-out-tests': [0],
|
'@vitest/no-commented-out-tests': [0],
|
||||||
'vitest/no-conditional-expect': [0],
|
'@vitest/no-conditional-expect': [0],
|
||||||
'vitest/no-conditional-in-test': [0],
|
'@vitest/no-conditional-in-test': [0],
|
||||||
'vitest/no-conditional-tests': [0],
|
'@vitest/no-conditional-tests': [0],
|
||||||
'vitest/no-disabled-tests': [0],
|
'@vitest/no-disabled-tests': [0],
|
||||||
'vitest/no-done-callback': [0],
|
'@vitest/no-done-callback': [0],
|
||||||
'vitest/no-duplicate-hooks': [0],
|
'@vitest/no-duplicate-hooks': [0],
|
||||||
'vitest/no-focused-tests': [0],
|
'@vitest/no-focused-tests': [0],
|
||||||
'vitest/no-hooks': [0],
|
'@vitest/no-hooks': [0],
|
||||||
'vitest/no-identical-title': [2],
|
'@vitest/no-identical-title': [2],
|
||||||
'vitest/no-interpolation-in-snapshots': [0],
|
'@vitest/no-interpolation-in-snapshots': [0],
|
||||||
'vitest/no-large-snapshots': [0],
|
'@vitest/no-large-snapshots': [0],
|
||||||
'vitest/no-mocks-import': [0],
|
'@vitest/no-mocks-import': [0],
|
||||||
'vitest/no-restricted-matchers': [0],
|
'@vitest/no-restricted-matchers': [0],
|
||||||
'vitest/no-restricted-vi-methods': [0],
|
'@vitest/no-restricted-vi-methods': [0],
|
||||||
'vitest/no-standalone-expect': [0],
|
'@vitest/no-standalone-expect': [0],
|
||||||
'vitest/no-test-prefixes': [0],
|
'@vitest/no-test-prefixes': [0],
|
||||||
'vitest/no-test-return-statement': [0],
|
'@vitest/no-test-return-statement': [0],
|
||||||
'vitest/prefer-called-with': [0],
|
'@vitest/prefer-called-with': [0],
|
||||||
'vitest/prefer-comparison-matcher': [0],
|
'@vitest/prefer-comparison-matcher': [0],
|
||||||
'vitest/prefer-each': [0],
|
'@vitest/prefer-each': [0],
|
||||||
'vitest/prefer-equality-matcher': [0],
|
'@vitest/prefer-equality-matcher': [0],
|
||||||
'vitest/prefer-expect-resolves': [0],
|
'@vitest/prefer-expect-resolves': [0],
|
||||||
'vitest/prefer-hooks-in-order': [0],
|
'@vitest/prefer-hooks-in-order': [0],
|
||||||
'vitest/prefer-hooks-on-top': [2],
|
'@vitest/prefer-hooks-on-top': [2],
|
||||||
'vitest/prefer-lowercase-title': [0],
|
'@vitest/prefer-lowercase-title': [0],
|
||||||
'vitest/prefer-mock-promise-shorthand': [0],
|
'@vitest/prefer-mock-promise-shorthand': [0],
|
||||||
'vitest/prefer-snapshot-hint': [0],
|
'@vitest/prefer-snapshot-hint': [0],
|
||||||
'vitest/prefer-spy-on': [0],
|
'@vitest/prefer-spy-on': [0],
|
||||||
'vitest/prefer-strict-equal': [0],
|
'@vitest/prefer-strict-equal': [0],
|
||||||
'vitest/prefer-to-be': [0],
|
'@vitest/prefer-to-be': [0],
|
||||||
'vitest/prefer-to-be-falsy': [0],
|
'@vitest/prefer-to-be-falsy': [0],
|
||||||
'vitest/prefer-to-be-object': [0],
|
'@vitest/prefer-to-be-object': [0],
|
||||||
'vitest/prefer-to-be-truthy': [0],
|
'@vitest/prefer-to-be-truthy': [0],
|
||||||
'vitest/prefer-to-contain': [0],
|
'@vitest/prefer-to-contain': [0],
|
||||||
'vitest/prefer-to-have-length': [0],
|
'@vitest/prefer-to-have-length': [0],
|
||||||
'vitest/prefer-todo': [0],
|
'@vitest/prefer-todo': [0],
|
||||||
'vitest/require-hook': [0],
|
'@vitest/require-hook': [0],
|
||||||
'vitest/require-to-throw-message': [0],
|
'@vitest/require-to-throw-message': [0],
|
||||||
'vitest/require-top-level-describe': [0],
|
'@vitest/require-top-level-describe': [0],
|
||||||
'vitest/valid-describe-callback': [2],
|
'@vitest/valid-describe-callback': [2],
|
||||||
'vitest/valid-expect': [2],
|
'@vitest/valid-expect': [2],
|
||||||
'vitest/valid-title': [2],
|
'@vitest/valid-title': [2],
|
||||||
},
|
},
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
@ -163,7 +168,7 @@ module.exports = {
|
|||||||
{
|
{
|
||||||
files: ['tests/e2e/**'],
|
files: ['tests/e2e/**'],
|
||||||
plugins: [
|
plugins: [
|
||||||
'eslint-plugin-playwright'
|
'eslint-plugin-playwright',
|
||||||
],
|
],
|
||||||
extends: [
|
extends: [
|
||||||
'plugin:playwright/recommended',
|
'plugin:playwright/recommended',
|
||||||
@ -403,7 +408,7 @@ module.exports = {
|
|||||||
'github/a11y-svg-has-accessible-name': [0],
|
'github/a11y-svg-has-accessible-name': [0],
|
||||||
'github/array-foreach': [0],
|
'github/array-foreach': [0],
|
||||||
'github/async-currenttarget': [2],
|
'github/async-currenttarget': [2],
|
||||||
'github/async-preventdefault': [2],
|
'github/async-preventdefault': [0], // https://github.com/github/eslint-plugin-github/issues/599
|
||||||
'github/authenticity-token': [0],
|
'github/authenticity-token': [0],
|
||||||
'github/get-attribute': [0],
|
'github/get-attribute': [0],
|
||||||
'github/js-class-name': [0],
|
'github/js-class-name': [0],
|
||||||
|
2
.github/ISSUE_TEMPLATE/config.yml
vendored
2
.github/ISSUE_TEMPLATE/config.yml
vendored
@ -13,5 +13,5 @@ contact_links:
|
|||||||
url: https://docs.gitea.com/help/faq
|
url: https://docs.gitea.com/help/faq
|
||||||
about: Please check if your question isn't mentioned here.
|
about: Please check if your question isn't mentioned here.
|
||||||
- name: Crowdin Translations
|
- name: Crowdin Translations
|
||||||
url: https://crowdin.com/project/gitea
|
url: https://translate.gitea.com
|
||||||
about: Translations are managed here.
|
about: Translations are managed here.
|
||||||
|
4
.github/workflows/cron-licenses.yml
vendored
4
.github/workflows/cron-licenses.yml
vendored
@ -1,8 +1,8 @@
|
|||||||
name: cron-licenses
|
name: cron-licenses
|
||||||
|
|
||||||
on:
|
on:
|
||||||
schedule:
|
# schedule:
|
||||||
- cron: "7 0 * * 1" # every Monday at 00:07 UTC
|
# - cron: "7 0 * * 1" # every Monday at 00:07 UTC
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
2
.mailmap
Normal file
2
.mailmap
Normal file
@ -0,0 +1,2 @@
|
|||||||
|
Unknwon <u@gogs.io> <joe2010xtmf@163.com>
|
||||||
|
Unknwon <u@gogs.io> 无闻 <u@gogs.io>
|
@ -182,7 +182,7 @@ Here's how to run the test suite:
|
|||||||
|
|
||||||
## Translation
|
## Translation
|
||||||
|
|
||||||
All translation work happens on [Crowdin](https://crowdin.com/project/gitea).
|
All translation work happens on [Crowdin](https://translate.gitea.com).
|
||||||
The only translation that is maintained in this repository is [the English translation](https://github.com/go-gitea/gitea/blob/main/options/locale/locale_en-US.ini).
|
The only translation that is maintained in this repository is [the English translation](https://github.com/go-gitea/gitea/blob/main/options/locale/locale_en-US.ini).
|
||||||
It is synced regularly with Crowdin. \
|
It is synced regularly with Crowdin. \
|
||||||
Other locales on main branch **should not** be updated manually as they will be overwritten with each sync. \
|
Other locales on main branch **should not** be updated manually as they will be overwritten with each sync. \
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# Build stage
|
# Build stage
|
||||||
FROM docker.io/library/golang:1.23-alpine3.21 AS build-env
|
FROM docker.io/library/golang:1.24-alpine3.21 AS build-env
|
||||||
|
|
||||||
ARG GOPROXY
|
ARG GOPROXY
|
||||||
ENV GOPROXY=${GOPROXY:-direct}
|
ENV GOPROXY=${GOPROXY:-direct}
|
||||||
|
@ -1,5 +1,5 @@
|
|||||||
# Build stage
|
# Build stage
|
||||||
FROM docker.io/library/golang:1.23-alpine3.21 AS build-env
|
FROM docker.io/library/golang:1.24-alpine3.21 AS build-env
|
||||||
|
|
||||||
ARG GOPROXY
|
ARG GOPROXY
|
||||||
ENV GOPROXY=${GOPROXY:-direct}
|
ENV GOPROXY=${GOPROXY:-direct}
|
||||||
|
@ -63,3 +63,4 @@ Kemal Zebari <kemalzebra@gmail.com> (@kemzeb)
|
|||||||
Rowan Bohde <rowan.bohde@gmail.com> (@bohde)
|
Rowan Bohde <rowan.bohde@gmail.com> (@bohde)
|
||||||
hiifong <i@hiif.ong> (@hiifong)
|
hiifong <i@hiif.ong> (@hiifong)
|
||||||
metiftikci <metiftikci@hotmail.com> (@metiftikci)
|
metiftikci <metiftikci@hotmail.com> (@metiftikci)
|
||||||
|
Christopher Homberger <christopher.homberger@web.de> (@ChristopherHX)
|
||||||
|
188
Makefile
188
Makefile
@ -23,12 +23,12 @@ SHASUM ?= shasum -a 256
|
|||||||
HAS_GO := $(shell hash $(GO) > /dev/null 2>&1 && echo yes)
|
HAS_GO := $(shell hash $(GO) > /dev/null 2>&1 && echo yes)
|
||||||
COMMA := ,
|
COMMA := ,
|
||||||
|
|
||||||
XGO_VERSION := go-1.23.x
|
XGO_VERSION := go-1.24.x
|
||||||
|
|
||||||
AIR_PACKAGE ?= github.com/air-verse/air@v1
|
AIR_PACKAGE ?= github.com/air-verse/air@v1
|
||||||
EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/v3/cmd/editorconfig-checker@v3.0.3
|
EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/v3/cmd/editorconfig-checker@v3.1.2
|
||||||
GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.7.0
|
GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.7.0
|
||||||
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.62.2
|
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.63.4
|
||||||
GXZ_PACKAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.12
|
GXZ_PACKAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.12
|
||||||
MISSPELL_PACKAGE ?= github.com/golangci/misspell/cmd/misspell@v0.6.0
|
MISSPELL_PACKAGE ?= github.com/golangci/misspell/cmd/misspell@v0.6.0
|
||||||
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.31.0
|
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.31.0
|
||||||
@ -36,7 +36,7 @@ XGO_PACKAGE ?= src.techknowlogick.com/xgo@latest
|
|||||||
GO_LICENSES_PACKAGE ?= github.com/google/go-licenses@v1
|
GO_LICENSES_PACKAGE ?= github.com/google/go-licenses@v1
|
||||||
GOVULNCHECK_PACKAGE ?= golang.org/x/vuln/cmd/govulncheck@v1
|
GOVULNCHECK_PACKAGE ?= golang.org/x/vuln/cmd/govulncheck@v1
|
||||||
ACTIONLINT_PACKAGE ?= github.com/rhysd/actionlint/cmd/actionlint@v1
|
ACTIONLINT_PACKAGE ?= github.com/rhysd/actionlint/cmd/actionlint@v1
|
||||||
GOPLS_PACKAGE ?= golang.org/x/tools/gopls@v0.17.0
|
GOPLS_PACKAGE ?= golang.org/x/tools/gopls@v0.17.1
|
||||||
|
|
||||||
DOCKER_IMAGE ?= gitea/gitea
|
DOCKER_IMAGE ?= gitea/gitea
|
||||||
DOCKER_TAG ?= latest
|
DOCKER_TAG ?= latest
|
||||||
@ -144,7 +144,7 @@ TAR_EXCLUDES := .git data indexers queues log node_modules $(EXECUTABLE) $(FOMAN
|
|||||||
GO_DIRS := build cmd models modules routers services tests
|
GO_DIRS := build cmd models modules routers services tests
|
||||||
WEB_DIRS := web_src/js web_src/css
|
WEB_DIRS := web_src/js web_src/css
|
||||||
|
|
||||||
ESLINT_FILES := web_src/js tools *.js *.ts tests/e2e
|
ESLINT_FILES := web_src/js tools *.js *.ts *.cjs tests/e2e
|
||||||
STYLELINT_FILES := web_src/css web_src/js/components/*.vue
|
STYLELINT_FILES := web_src/css web_src/js/components/*.vue
|
||||||
SPELLCHECK_FILES := $(GO_DIRS) $(WEB_DIRS) templates options/locale/locale_en-US.ini .github $(filter-out CHANGELOG.md, $(wildcard *.go *.js *.md *.yml *.yaml *.toml))
|
SPELLCHECK_FILES := $(GO_DIRS) $(WEB_DIRS) templates options/locale/locale_en-US.ini .github $(filter-out CHANGELOG.md, $(wildcard *.go *.js *.md *.yml *.yaml *.toml))
|
||||||
EDITORCONFIG_FILES := templates .github/workflows options/locale/locale_en-US.ini
|
EDITORCONFIG_FILES := templates .github/workflows options/locale/locale_en-US.ini
|
||||||
@ -189,67 +189,11 @@ TEST_MSSQL_PASSWORD ?= MwantsaSecurePassword1
|
|||||||
all: build
|
all: build
|
||||||
|
|
||||||
.PHONY: help
|
.PHONY: help
|
||||||
help:
|
help: Makefile ## print Makefile help information.
|
||||||
@echo "Make Routines:"
|
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m[TARGETS] default target: build\033[0m\n\n\033[35mTargets:\033[0m\n"} /^[0-9A-Za-z._-]+:.*?##/ { printf " \033[36m%-45s\033[0m %s\n", $$1, $$2 }' Makefile #$(MAKEFILE_LIST)
|
||||||
@echo " - \"\" equivalent to \"build\""
|
@printf " \033[36m%-46s\033[0m %s\n" "test-e2e[#TestSpecificName]" "test end to end using playwright"
|
||||||
@echo " - build build everything"
|
@printf " \033[36m%-46s\033[0m %s\n" "test[#TestSpecificName]" "run unit test"
|
||||||
@echo " - frontend build frontend files"
|
@printf " \033[36m%-46s\033[0m %s\n" "test-sqlite[#TestSpecificName]" "run integration test for sqlite"
|
||||||
@echo " - backend build backend files"
|
|
||||||
@echo " - watch watch everything and continuously rebuild"
|
|
||||||
@echo " - watch-frontend watch frontend files and continuously rebuild"
|
|
||||||
@echo " - watch-backend watch backend files and continuously rebuild"
|
|
||||||
@echo " - clean delete backend and integration files"
|
|
||||||
@echo " - clean-all delete backend, frontend and integration files"
|
|
||||||
@echo " - deps install dependencies"
|
|
||||||
@echo " - deps-frontend install frontend dependencies"
|
|
||||||
@echo " - deps-backend install backend dependencies"
|
|
||||||
@echo " - deps-tools install tool dependencies"
|
|
||||||
@echo " - deps-py install python dependencies"
|
|
||||||
@echo " - lint lint everything"
|
|
||||||
@echo " - lint-fix lint everything and fix issues"
|
|
||||||
@echo " - lint-actions lint action workflow files"
|
|
||||||
@echo " - lint-frontend lint frontend files"
|
|
||||||
@echo " - lint-frontend-fix lint frontend files and fix issues"
|
|
||||||
@echo " - lint-backend lint backend files"
|
|
||||||
@echo " - lint-backend-fix lint backend files and fix issues"
|
|
||||||
@echo " - lint-go lint go files"
|
|
||||||
@echo " - lint-go-fix lint go files and fix issues"
|
|
||||||
@echo " - lint-go-vet lint go files with vet"
|
|
||||||
@echo " - lint-go-gopls lint go files with gopls"
|
|
||||||
@echo " - lint-js lint js files"
|
|
||||||
@echo " - lint-js-fix lint js files and fix issues"
|
|
||||||
@echo " - lint-css lint css files"
|
|
||||||
@echo " - lint-css-fix lint css files and fix issues"
|
|
||||||
@echo " - lint-md lint markdown files"
|
|
||||||
@echo " - lint-swagger lint swagger files"
|
|
||||||
@echo " - lint-templates lint template files"
|
|
||||||
@echo " - lint-yaml lint yaml files"
|
|
||||||
@echo " - lint-spell lint spelling"
|
|
||||||
@echo " - lint-spell-fix lint spelling and fix issues"
|
|
||||||
@echo " - checks run various consistency checks"
|
|
||||||
@echo " - checks-frontend check frontend files"
|
|
||||||
@echo " - checks-backend check backend files"
|
|
||||||
@echo " - test test everything"
|
|
||||||
@echo " - test-frontend test frontend files"
|
|
||||||
@echo " - test-backend test backend files"
|
|
||||||
@echo " - test-e2e[\#TestSpecificName] test end to end using playwright"
|
|
||||||
@echo " - update update js and py dependencies"
|
|
||||||
@echo " - update-js update js dependencies"
|
|
||||||
@echo " - update-py update py dependencies"
|
|
||||||
@echo " - webpack build webpack files"
|
|
||||||
@echo " - svg build svg files"
|
|
||||||
@echo " - fomantic build fomantic files"
|
|
||||||
@echo " - generate run \"go generate\""
|
|
||||||
@echo " - fmt format the Go code"
|
|
||||||
@echo " - generate-license update license files"
|
|
||||||
@echo " - generate-gitignore update gitignore files"
|
|
||||||
@echo " - generate-manpage generate manpage"
|
|
||||||
@echo " - generate-swagger generate the swagger spec from code comments"
|
|
||||||
@echo " - swagger-validate check if the swagger spec is valid"
|
|
||||||
@echo " - go-licenses regenerate go licenses"
|
|
||||||
@echo " - tidy run go mod tidy"
|
|
||||||
@echo " - test[\#TestSpecificName] run unit test"
|
|
||||||
@echo " - test-sqlite[\#TestSpecificName] run integration test for sqlite"
|
|
||||||
|
|
||||||
.PHONY: go-check
|
.PHONY: go-check
|
||||||
go-check:
|
go-check:
|
||||||
@ -280,11 +224,11 @@ node-check:
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
.PHONY: clean-all
|
.PHONY: clean-all
|
||||||
clean-all: clean
|
clean-all: clean ## delete backend, frontend and integration files
|
||||||
rm -rf $(WEBPACK_DEST_ENTRIES) node_modules
|
rm -rf $(WEBPACK_DEST_ENTRIES) node_modules
|
||||||
|
|
||||||
.PHONY: clean
|
.PHONY: clean
|
||||||
clean:
|
clean: ## delete backend and integration files
|
||||||
rm -rf $(EXECUTABLE) $(DIST) $(BINDATA_DEST) $(BINDATA_HASH) \
|
rm -rf $(EXECUTABLE) $(DIST) $(BINDATA_DEST) $(BINDATA_HASH) \
|
||||||
integrations*.test \
|
integrations*.test \
|
||||||
e2e*.test \
|
e2e*.test \
|
||||||
@ -296,7 +240,7 @@ clean:
|
|||||||
tests/e2e/reports/ tests/e2e/test-artifacts/ tests/e2e/test-snapshots/
|
tests/e2e/reports/ tests/e2e/test-artifacts/ tests/e2e/test-snapshots/
|
||||||
|
|
||||||
.PHONY: fmt
|
.PHONY: fmt
|
||||||
fmt:
|
fmt: ## format the Go code
|
||||||
@GOFUMPT_PACKAGE=$(GOFUMPT_PACKAGE) $(GO) run build/code-batch-process.go gitea-fmt -w '{file-list}'
|
@GOFUMPT_PACKAGE=$(GOFUMPT_PACKAGE) $(GO) run build/code-batch-process.go gitea-fmt -w '{file-list}'
|
||||||
$(eval TEMPLATES := $(shell find templates -type f -name '*.tmpl'))
|
$(eval TEMPLATES := $(shell find templates -type f -name '*.tmpl'))
|
||||||
@# strip whitespace after '{{' or '(' and before '}}' or ')' unless there is only
|
@# strip whitespace after '{{' or '(' and before '}}' or ')' unless there is only
|
||||||
@ -325,7 +269,7 @@ TAGS_PREREQ := $(TAGS_EVIDENCE)
|
|||||||
endif
|
endif
|
||||||
|
|
||||||
.PHONY: generate-swagger
|
.PHONY: generate-swagger
|
||||||
generate-swagger: $(SWAGGER_SPEC)
|
generate-swagger: $(SWAGGER_SPEC) ## generate the swagger spec from code comments
|
||||||
|
|
||||||
$(SWAGGER_SPEC): $(GO_SOURCES_NO_BINDATA)
|
$(SWAGGER_SPEC): $(GO_SOURCES_NO_BINDATA)
|
||||||
$(GO) run $(SWAGGER_PACKAGE) generate spec -x "$(SWAGGER_EXCLUDE)" -o './$(SWAGGER_SPEC)'
|
$(GO) run $(SWAGGER_PACKAGE) generate spec -x "$(SWAGGER_EXCLUDE)" -o './$(SWAGGER_SPEC)'
|
||||||
@ -342,78 +286,78 @@ swagger-check: generate-swagger
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
.PHONY: swagger-validate
|
.PHONY: swagger-validate
|
||||||
swagger-validate:
|
swagger-validate: ## check if the swagger spec is valid
|
||||||
$(SED_INPLACE) '$(SWAGGER_SPEC_S_JSON)' './$(SWAGGER_SPEC)'
|
$(SED_INPLACE) '$(SWAGGER_SPEC_S_JSON)' './$(SWAGGER_SPEC)'
|
||||||
$(GO) run $(SWAGGER_PACKAGE) validate './$(SWAGGER_SPEC)'
|
$(GO) run $(SWAGGER_PACKAGE) validate './$(SWAGGER_SPEC)'
|
||||||
$(SED_INPLACE) '$(SWAGGER_SPEC_S_TMPL)' './$(SWAGGER_SPEC)'
|
$(SED_INPLACE) '$(SWAGGER_SPEC_S_TMPL)' './$(SWAGGER_SPEC)'
|
||||||
|
|
||||||
.PHONY: checks
|
.PHONY: checks
|
||||||
checks: checks-frontend checks-backend
|
checks: checks-frontend checks-backend ## run various consistency checks
|
||||||
|
|
||||||
.PHONY: checks-frontend
|
.PHONY: checks-frontend
|
||||||
checks-frontend: lockfile-check svg-check
|
checks-frontend: lockfile-check svg-check ## check frontend files
|
||||||
|
|
||||||
.PHONY: checks-backend
|
.PHONY: checks-backend
|
||||||
checks-backend: tidy-check swagger-check fmt-check swagger-validate security-check
|
checks-backend: tidy-check swagger-check fmt-check swagger-validate security-check ## check backend files
|
||||||
|
|
||||||
.PHONY: lint
|
.PHONY: lint
|
||||||
lint: lint-frontend lint-backend lint-spell
|
lint: lint-frontend lint-backend lint-spell ## lint everything
|
||||||
|
|
||||||
.PHONY: lint-fix
|
.PHONY: lint-fix
|
||||||
lint-fix: lint-frontend-fix lint-backend-fix lint-spell-fix
|
lint-fix: lint-frontend-fix lint-backend-fix lint-spell-fix ## lint everything and fix issues
|
||||||
|
|
||||||
.PHONY: lint-frontend
|
.PHONY: lint-frontend
|
||||||
lint-frontend: lint-js lint-css
|
lint-frontend: lint-js lint-css ## lint frontend files
|
||||||
|
|
||||||
.PHONY: lint-frontend-fix
|
.PHONY: lint-frontend-fix
|
||||||
lint-frontend-fix: lint-js-fix lint-css-fix
|
lint-frontend-fix: lint-js-fix lint-css-fix ## lint frontend files and fix issues
|
||||||
|
|
||||||
.PHONY: lint-backend
|
.PHONY: lint-backend
|
||||||
lint-backend: lint-go lint-go-vet lint-go-gopls lint-editorconfig
|
lint-backend: lint-go lint-go-vet lint-go-gopls lint-editorconfig ## lint backend files
|
||||||
|
|
||||||
.PHONY: lint-backend-fix
|
.PHONY: lint-backend-fix
|
||||||
lint-backend-fix: lint-go-fix lint-go-vet lint-editorconfig
|
lint-backend-fix: lint-go-fix lint-go-vet lint-editorconfig ## lint backend files and fix issues
|
||||||
|
|
||||||
.PHONY: lint-js
|
.PHONY: lint-js
|
||||||
lint-js: node_modules
|
lint-js: node_modules ## lint js files
|
||||||
npx eslint --color --max-warnings=0 --ext js,ts,vue $(ESLINT_FILES)
|
npx eslint --color --max-warnings=0 --ext js,ts,vue $(ESLINT_FILES)
|
||||||
npx vue-tsc
|
npx vue-tsc
|
||||||
|
|
||||||
.PHONY: lint-js-fix
|
.PHONY: lint-js-fix
|
||||||
lint-js-fix: node_modules
|
lint-js-fix: node_modules ## lint js files and fix issues
|
||||||
npx eslint --color --max-warnings=0 --ext js,ts,vue $(ESLINT_FILES) --fix
|
npx eslint --color --max-warnings=0 --ext js,ts,vue $(ESLINT_FILES) --fix
|
||||||
npx vue-tsc
|
npx vue-tsc
|
||||||
|
|
||||||
.PHONY: lint-css
|
.PHONY: lint-css
|
||||||
lint-css: node_modules
|
lint-css: node_modules ## lint css files
|
||||||
npx stylelint --color --max-warnings=0 $(STYLELINT_FILES)
|
npx stylelint --color --max-warnings=0 $(STYLELINT_FILES)
|
||||||
|
|
||||||
.PHONY: lint-css-fix
|
.PHONY: lint-css-fix
|
||||||
lint-css-fix: node_modules
|
lint-css-fix: node_modules ## lint css files and fix issues
|
||||||
npx stylelint --color --max-warnings=0 $(STYLELINT_FILES) --fix
|
npx stylelint --color --max-warnings=0 $(STYLELINT_FILES) --fix
|
||||||
|
|
||||||
.PHONY: lint-swagger
|
.PHONY: lint-swagger
|
||||||
lint-swagger: node_modules
|
lint-swagger: node_modules ## lint swagger files
|
||||||
npx spectral lint -q -F hint $(SWAGGER_SPEC)
|
npx spectral lint -q -F hint $(SWAGGER_SPEC)
|
||||||
|
|
||||||
.PHONY: lint-md
|
.PHONY: lint-md
|
||||||
lint-md: node_modules
|
lint-md: node_modules ## lint markdown files
|
||||||
npx markdownlint *.md
|
npx markdownlint *.md
|
||||||
|
|
||||||
.PHONY: lint-spell
|
.PHONY: lint-spell
|
||||||
lint-spell:
|
lint-spell: ## lint spelling
|
||||||
@go run $(MISSPELL_PACKAGE) -dict tools/misspellings.csv -error $(SPELLCHECK_FILES)
|
@go run $(MISSPELL_PACKAGE) -dict tools/misspellings.csv -error $(SPELLCHECK_FILES)
|
||||||
|
|
||||||
.PHONY: lint-spell-fix
|
.PHONY: lint-spell-fix
|
||||||
lint-spell-fix:
|
lint-spell-fix: ## lint spelling and fix issues
|
||||||
@go run $(MISSPELL_PACKAGE) -dict tools/misspellings.csv -w $(SPELLCHECK_FILES)
|
@go run $(MISSPELL_PACKAGE) -dict tools/misspellings.csv -w $(SPELLCHECK_FILES)
|
||||||
|
|
||||||
.PHONY: lint-go
|
.PHONY: lint-go
|
||||||
lint-go:
|
lint-go: ## lint go files
|
||||||
$(GO) run $(GOLANGCI_LINT_PACKAGE) run
|
$(GO) run $(GOLANGCI_LINT_PACKAGE) run
|
||||||
|
|
||||||
.PHONY: lint-go-fix
|
.PHONY: lint-go-fix
|
||||||
lint-go-fix:
|
lint-go-fix: ## lint go files and fix issues
|
||||||
$(GO) run $(GOLANGCI_LINT_PACKAGE) run --fix
|
$(GO) run $(GOLANGCI_LINT_PACKAGE) run --fix
|
||||||
|
|
||||||
# workaround step for the lint-go-windows CI task because 'go run' can not
|
# workaround step for the lint-go-windows CI task because 'go run' can not
|
||||||
@ -424,13 +368,13 @@ lint-go-windows:
|
|||||||
golangci-lint run
|
golangci-lint run
|
||||||
|
|
||||||
.PHONY: lint-go-vet
|
.PHONY: lint-go-vet
|
||||||
lint-go-vet:
|
lint-go-vet: ## lint go files with vet
|
||||||
@echo "Running go vet..."
|
@echo "Running go vet..."
|
||||||
@GOOS= GOARCH= $(GO) build code.gitea.io/gitea-vet
|
@GOOS= GOARCH= $(GO) build code.gitea.io/gitea-vet
|
||||||
@$(GO) vet -vettool=gitea-vet ./...
|
@$(GO) vet -vettool=gitea-vet ./...
|
||||||
|
|
||||||
.PHONY: lint-go-gopls
|
.PHONY: lint-go-gopls
|
||||||
lint-go-gopls:
|
lint-go-gopls: ## lint go files with gopls
|
||||||
@echo "Running gopls check..."
|
@echo "Running gopls check..."
|
||||||
@GO=$(GO) GOPLS_PACKAGE=$(GOPLS_PACKAGE) tools/lint-go-gopls.sh $(GO_SOURCES_NO_BINDATA)
|
@GO=$(GO) GOPLS_PACKAGE=$(GOPLS_PACKAGE) tools/lint-go-gopls.sh $(GO_SOURCES_NO_BINDATA)
|
||||||
|
|
||||||
@ -439,41 +383,41 @@ lint-editorconfig:
|
|||||||
@$(GO) run $(EDITORCONFIG_CHECKER_PACKAGE) $(EDITORCONFIG_FILES)
|
@$(GO) run $(EDITORCONFIG_CHECKER_PACKAGE) $(EDITORCONFIG_FILES)
|
||||||
|
|
||||||
.PHONY: lint-actions
|
.PHONY: lint-actions
|
||||||
lint-actions:
|
lint-actions: ## lint action workflow files
|
||||||
$(GO) run $(ACTIONLINT_PACKAGE)
|
$(GO) run $(ACTIONLINT_PACKAGE)
|
||||||
|
|
||||||
.PHONY: lint-templates
|
.PHONY: lint-templates
|
||||||
lint-templates: .venv node_modules
|
lint-templates: .venv node_modules ## lint template files
|
||||||
@node tools/lint-templates-svg.js
|
@node tools/lint-templates-svg.js
|
||||||
@poetry run djlint $(shell find templates -type f -iname '*.tmpl')
|
@poetry run djlint $(shell find templates -type f -iname '*.tmpl')
|
||||||
|
|
||||||
.PHONY: lint-yaml
|
.PHONY: lint-yaml
|
||||||
lint-yaml: .venv
|
lint-yaml: .venv ## lint yaml files
|
||||||
@poetry run yamllint .
|
@poetry run yamllint -s .
|
||||||
|
|
||||||
.PHONY: watch
|
.PHONY: watch
|
||||||
watch:
|
watch: ## watch everything and continuously rebuild
|
||||||
@bash tools/watch.sh
|
@bash tools/watch.sh
|
||||||
|
|
||||||
.PHONY: watch-frontend
|
.PHONY: watch-frontend
|
||||||
watch-frontend: node-check node_modules
|
watch-frontend: node-check node_modules ## watch frontend files and continuously rebuild
|
||||||
@rm -rf $(WEBPACK_DEST_ENTRIES)
|
@rm -rf $(WEBPACK_DEST_ENTRIES)
|
||||||
NODE_ENV=development npx webpack --watch --progress
|
NODE_ENV=development npx webpack --watch --progress
|
||||||
|
|
||||||
.PHONY: watch-backend
|
.PHONY: watch-backend
|
||||||
watch-backend: go-check
|
watch-backend: go-check ## watch backend files and continuously rebuild
|
||||||
GITEA_RUN_MODE=dev $(GO) run $(AIR_PACKAGE) -c .air.toml
|
GITEA_RUN_MODE=dev $(GO) run $(AIR_PACKAGE) -c .air.toml
|
||||||
|
|
||||||
.PHONY: test
|
.PHONY: test
|
||||||
test: test-frontend test-backend
|
test: test-frontend test-backend ## test everything
|
||||||
|
|
||||||
.PHONY: test-backend
|
.PHONY: test-backend
|
||||||
test-backend:
|
test-backend: ## test frontend files
|
||||||
@echo "Running go test with $(GOTESTFLAGS) -tags '$(TEST_TAGS)'..."
|
@echo "Running go test with $(GOTESTFLAGS) -tags '$(TEST_TAGS)'..."
|
||||||
@$(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' $(GO_TEST_PACKAGES)
|
@$(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' $(GO_TEST_PACKAGES)
|
||||||
|
|
||||||
.PHONY: test-frontend
|
.PHONY: test-frontend
|
||||||
test-frontend: node_modules
|
test-frontend: node_modules ## test backend files
|
||||||
npx vitest
|
npx vitest
|
||||||
|
|
||||||
.PHONY: test-check
|
.PHONY: test-check
|
||||||
@ -505,7 +449,7 @@ unit-test-coverage:
|
|||||||
@$(GO) test $(GOTESTFLAGS) -timeout=20m -tags='$(TEST_TAGS)' -cover -coverprofile coverage.out $(GO_TEST_PACKAGES) && echo "\n==>\033[32m Ok\033[m\n" || exit 1
|
@$(GO) test $(GOTESTFLAGS) -timeout=20m -tags='$(TEST_TAGS)' -cover -coverprofile coverage.out $(GO_TEST_PACKAGES) && echo "\n==>\033[32m Ok\033[m\n" || exit 1
|
||||||
|
|
||||||
.PHONY: tidy
|
.PHONY: tidy
|
||||||
tidy:
|
tidy: ## run go mod tidy
|
||||||
$(eval MIN_GO_VERSION := $(shell grep -Eo '^go\s+[0-9]+\.[0-9.]+' go.mod | cut -d' ' -f2))
|
$(eval MIN_GO_VERSION := $(shell grep -Eo '^go\s+[0-9]+\.[0-9.]+' go.mod | cut -d' ' -f2))
|
||||||
$(GO) mod tidy -compat=$(MIN_GO_VERSION)
|
$(GO) mod tidy -compat=$(MIN_GO_VERSION)
|
||||||
@$(MAKE) --no-print-directory $(GO_LICENSE_FILE)
|
@$(MAKE) --no-print-directory $(GO_LICENSE_FILE)
|
||||||
@ -524,7 +468,7 @@ tidy-check: tidy
|
|||||||
fi
|
fi
|
||||||
|
|
||||||
.PHONY: go-licenses
|
.PHONY: go-licenses
|
||||||
go-licenses: $(GO_LICENSE_FILE)
|
go-licenses: $(GO_LICENSE_FILE) ## regenerate go licenses
|
||||||
|
|
||||||
$(GO_LICENSE_FILE): go.mod go.sum
|
$(GO_LICENSE_FILE): go.mod go.sum
|
||||||
-$(GO) run $(GO_LICENSES_PACKAGE) save . --force --save_path=$(GO_LICENSE_TMP_DIR) 2>/dev/null
|
-$(GO) run $(GO_LICENSES_PACKAGE) save . --force --save_path=$(GO_LICENSE_TMP_DIR) 2>/dev/null
|
||||||
@ -771,17 +715,17 @@ install: $(wildcard *.go)
|
|||||||
CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) install -v -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)'
|
CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) install -v -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)'
|
||||||
|
|
||||||
.PHONY: build
|
.PHONY: build
|
||||||
build: frontend backend
|
build: frontend backend ## build everything
|
||||||
|
|
||||||
.PHONY: frontend
|
.PHONY: frontend
|
||||||
frontend: $(WEBPACK_DEST)
|
frontend: $(WEBPACK_DEST) ## build frontend files
|
||||||
|
|
||||||
.PHONY: backend
|
.PHONY: backend
|
||||||
backend: go-check generate-backend $(EXECUTABLE)
|
backend: go-check generate-backend $(EXECUTABLE) ## build backend files
|
||||||
|
|
||||||
# We generate the backend before the frontend in case we in future we want to generate things in the frontend from generated files in backend
|
# We generate the backend before the frontend in case we in future we want to generate things in the frontend from generated files in backend
|
||||||
.PHONY: generate
|
.PHONY: generate
|
||||||
generate: generate-backend
|
generate: generate-backend ## run "go generate"
|
||||||
|
|
||||||
.PHONY: generate-backend
|
.PHONY: generate-backend
|
||||||
generate-backend: $(TAGS_PREREQ) generate-go
|
generate-backend: $(TAGS_PREREQ) generate-go
|
||||||
@ -846,20 +790,20 @@ release-sources: | $(DIST_DIRS)
|
|||||||
rm -f $(STORED_VERSION_FILE)
|
rm -f $(STORED_VERSION_FILE)
|
||||||
|
|
||||||
.PHONY: deps
|
.PHONY: deps
|
||||||
deps: deps-frontend deps-backend deps-tools deps-py
|
deps: deps-frontend deps-backend deps-tools deps-py ## install dependencies
|
||||||
|
|
||||||
.PHONY: deps-py
|
.PHONY: deps-py
|
||||||
deps-py: .venv
|
deps-py: .venv ## install python dependencies
|
||||||
|
|
||||||
.PHONY: deps-frontend
|
.PHONY: deps-frontend
|
||||||
deps-frontend: node_modules
|
deps-frontend: node_modules ## install frontend dependencies
|
||||||
|
|
||||||
.PHONY: deps-backend
|
.PHONY: deps-backend
|
||||||
deps-backend:
|
deps-backend: ## install backend dependencies
|
||||||
$(GO) mod download
|
$(GO) mod download
|
||||||
|
|
||||||
.PHONY: deps-tools
|
.PHONY: deps-tools
|
||||||
deps-tools:
|
deps-tools: ## install tool dependencies
|
||||||
$(GO) install $(AIR_PACKAGE) & \
|
$(GO) install $(AIR_PACKAGE) & \
|
||||||
$(GO) install $(EDITORCONFIG_CHECKER_PACKAGE) & \
|
$(GO) install $(EDITORCONFIG_CHECKER_PACKAGE) & \
|
||||||
$(GO) install $(GOFUMPT_PACKAGE) & \
|
$(GO) install $(GOFUMPT_PACKAGE) & \
|
||||||
@ -883,10 +827,10 @@ node_modules: package-lock.json
|
|||||||
@touch .venv
|
@touch .venv
|
||||||
|
|
||||||
.PHONY: update
|
.PHONY: update
|
||||||
update: update-js update-py
|
update: update-js update-py ## update js and py dependencies
|
||||||
|
|
||||||
.PHONY: update-js
|
.PHONY: update-js
|
||||||
update-js: node-check | node_modules
|
update-js: node-check | node_modules ## update js dependencies
|
||||||
npx updates -u -f package.json
|
npx updates -u -f package.json
|
||||||
rm -rf node_modules package-lock.json
|
rm -rf node_modules package-lock.json
|
||||||
npm install --package-lock
|
npm install --package-lock
|
||||||
@ -895,14 +839,14 @@ update-js: node-check | node_modules
|
|||||||
@touch node_modules
|
@touch node_modules
|
||||||
|
|
||||||
.PHONY: update-py
|
.PHONY: update-py
|
||||||
update-py: node-check | node_modules
|
update-py: node-check | node_modules ## update py dependencies
|
||||||
npx updates -u -f pyproject.toml
|
npx updates -u -f pyproject.toml
|
||||||
rm -rf .venv poetry.lock
|
rm -rf .venv poetry.lock
|
||||||
poetry install
|
poetry install
|
||||||
@touch .venv
|
@touch .venv
|
||||||
|
|
||||||
.PHONY: fomantic
|
.PHONY: fomantic
|
||||||
fomantic:
|
fomantic: ## build fomantic files
|
||||||
rm -rf $(FOMANTIC_WORK_DIR)/build
|
rm -rf $(FOMANTIC_WORK_DIR)/build
|
||||||
cd $(FOMANTIC_WORK_DIR) && npm install --no-save
|
cd $(FOMANTIC_WORK_DIR) && npm install --no-save
|
||||||
cp -f $(FOMANTIC_WORK_DIR)/theme.config.less $(FOMANTIC_WORK_DIR)/node_modules/fomantic-ui/src/theme.config
|
cp -f $(FOMANTIC_WORK_DIR)/theme.config.less $(FOMANTIC_WORK_DIR)/node_modules/fomantic-ui/src/theme.config
|
||||||
@ -915,7 +859,7 @@ fomantic:
|
|||||||
rm -f $(FOMANTIC_WORK_DIR)/build/*.min.*
|
rm -f $(FOMANTIC_WORK_DIR)/build/*.min.*
|
||||||
|
|
||||||
.PHONY: webpack
|
.PHONY: webpack
|
||||||
webpack: $(WEBPACK_DEST)
|
webpack: $(WEBPACK_DEST) ## build webpack files
|
||||||
|
|
||||||
$(WEBPACK_DEST): $(WEBPACK_SOURCES) $(WEBPACK_CONFIGS) package-lock.json
|
$(WEBPACK_DEST): $(WEBPACK_SOURCES) $(WEBPACK_CONFIGS) package-lock.json
|
||||||
@$(MAKE) -s node-check node_modules
|
@$(MAKE) -s node-check node_modules
|
||||||
@ -925,7 +869,7 @@ $(WEBPACK_DEST): $(WEBPACK_SOURCES) $(WEBPACK_CONFIGS) package-lock.json
|
|||||||
@touch $(WEBPACK_DEST)
|
@touch $(WEBPACK_DEST)
|
||||||
|
|
||||||
.PHONY: svg
|
.PHONY: svg
|
||||||
svg: node-check | node_modules
|
svg: node-check | node_modules ## build svg files
|
||||||
rm -rf $(SVG_DEST_DIR)
|
rm -rf $(SVG_DEST_DIR)
|
||||||
node tools/generate-svg.js
|
node tools/generate-svg.js
|
||||||
|
|
||||||
@ -961,11 +905,11 @@ update-translations:
|
|||||||
rmdir ./translations
|
rmdir ./translations
|
||||||
|
|
||||||
.PHONY: generate-license
|
.PHONY: generate-license
|
||||||
generate-license:
|
generate-license: ## update license files
|
||||||
$(GO) run build/generate-licenses.go
|
$(GO) run build/generate-licenses.go
|
||||||
|
|
||||||
.PHONY: generate-gitignore
|
.PHONY: generate-gitignore
|
||||||
generate-gitignore:
|
generate-gitignore: ## update gitignore files
|
||||||
$(GO) run build/generate-gitignores.go
|
$(GO) run build/generate-gitignores.go
|
||||||
|
|
||||||
.PHONY: generate-images
|
.PHONY: generate-images
|
||||||
@ -974,7 +918,7 @@ generate-images: | node_modules
|
|||||||
node tools/generate-images.js $(TAGS)
|
node tools/generate-images.js $(TAGS)
|
||||||
|
|
||||||
.PHONY: generate-manpage
|
.PHONY: generate-manpage
|
||||||
generate-manpage:
|
generate-manpage: ## generate manpage
|
||||||
@[ -f gitea ] || make backend
|
@[ -f gitea ] || make backend
|
||||||
@mkdir -p man/man1/ man/man5
|
@mkdir -p man/man1/ man/man5
|
||||||
@./gitea docs --man > man/man1/gitea.1
|
@./gitea docs --man > man/man1/gitea.1
|
||||||
|
42
README.md
42
README.md
@ -9,7 +9,7 @@
|
|||||||
[](https://opencollective.com/gitea "Become a backer/sponsor of gitea")
|
[](https://opencollective.com/gitea "Become a backer/sponsor of gitea")
|
||||||
[](https://opensource.org/licenses/MIT "License: MIT")
|
[](https://opensource.org/licenses/MIT "License: MIT")
|
||||||
[](https://gitpod.io/#https://github.com/go-gitea/gitea)
|
[](https://gitpod.io/#https://github.com/go-gitea/gitea)
|
||||||
[](https://crowdin.com/project/gitea "Crowdin")
|
[](https://translate.gitea.com "Crowdin")
|
||||||
|
|
||||||
[View this document in Chinese](./README_ZH.md)
|
[View this document in Chinese](./README_ZH.md)
|
||||||
|
|
||||||
@ -31,6 +31,14 @@ For accessing free Gitea service (with a limited number of repositories), you ca
|
|||||||
|
|
||||||
To quickly deploy your own dedicated Gitea instance on Gitea Cloud, you can start a free trial at [cloud.gitea.com](https://cloud.gitea.com).
|
To quickly deploy your own dedicated Gitea instance on Gitea Cloud, you can start a free trial at [cloud.gitea.com](https://cloud.gitea.com).
|
||||||
|
|
||||||
|
## Documentation
|
||||||
|
|
||||||
|
You can find comprehensive documentation on our official [documentation website](https://docs.gitea.com/).
|
||||||
|
|
||||||
|
It includes installation, administration, usage, development, contributing guides, and more to help you get started and explore all features effectively.
|
||||||
|
|
||||||
|
If you have any suggestions or would like to contribute to it, you can visit the [documentation repository](https://gitea.com/gitea/docs)
|
||||||
|
|
||||||
## Building
|
## Building
|
||||||
|
|
||||||
From the root of the source tree, run:
|
From the root of the source tree, run:
|
||||||
@ -52,6 +60,8 @@ More info: https://docs.gitea.com/installation/install-from-source
|
|||||||
|
|
||||||
## Using
|
## Using
|
||||||
|
|
||||||
|
After building, a binary file named `gitea` will be generated in the root of the source tree by default. To run it, use:
|
||||||
|
|
||||||
./gitea web
|
./gitea web
|
||||||
|
|
||||||
> [!NOTE]
|
> [!NOTE]
|
||||||
@ -68,22 +78,25 @@ Expected workflow is: Fork -> Patch -> Push -> Pull Request
|
|||||||
|
|
||||||
## Translating
|
## Translating
|
||||||
|
|
||||||
Translations are done through Crowdin. If you want to translate to a new language ask one of the managers in the Crowdin project to add a new language there.
|
[](https://translate.gitea.com)
|
||||||
|
|
||||||
|
Translations are done through [Crowdin](https://translate.gitea.com). If you want to translate to a new language ask one of the managers in the Crowdin project to add a new language there.
|
||||||
|
|
||||||
You can also just create an issue for adding a language or ask on discord on the #translation channel. If you need context or find some translation issues, you can leave a comment on the string or ask on Discord. For general translation questions there is a section in the docs. Currently a bit empty but we hope to fill it as questions pop up.
|
You can also just create an issue for adding a language or ask on discord on the #translation channel. If you need context or find some translation issues, you can leave a comment on the string or ask on Discord. For general translation questions there is a section in the docs. Currently a bit empty but we hope to fill it as questions pop up.
|
||||||
|
|
||||||
https://docs.gitea.com/contributing/localization
|
Get more information from [documentation](https://docs.gitea.com/contributing/localization).
|
||||||
|
|
||||||
[](https://crowdin.com/project/gitea)
|
## Official and Third-Party Projects
|
||||||
|
|
||||||
## Further information
|
We provide an official [go-sdk](https://gitea.com/gitea/go-sdk), a CLI tool called [tea](https://gitea.com/gitea/tea) and an [action runner](https://gitea.com/gitea/act_runner) for Gitea Action.
|
||||||
|
|
||||||
For more information and instructions about how to install Gitea, please look at our [documentation](https://docs.gitea.com/).
|
We maintain a list of Gitea-related projects at [gitea/awesome-gitea](https://gitea.com/gitea/awesome-gitea), where you can discover more third-party projects, including SDKs, plugins, themes, and more.
|
||||||
If you have questions that are not covered by the documentation, you can get in contact with us on our [Discord server](https://discord.gg/Gitea) or create a post in the [discourse forum](https://forum.gitea.com/).
|
|
||||||
|
|
||||||
We maintain a list of Gitea-related projects at [gitea/awesome-gitea](https://gitea.com/gitea/awesome-gitea).
|
## Communication
|
||||||
|
|
||||||
The official Gitea CLI is developed at [gitea/tea](https://gitea.com/gitea/tea).
|
[](https://discord.gg/Gitea "Join the Discord chat at https://discord.gg/Gitea")
|
||||||
|
|
||||||
|
If you have questions that are not covered by the [documentation](https://docs.gitea.com/), you can get in contact with us on our [Discord server](https://discord.gg/Gitea) or create a post in the [discourse forum](https://forum.gitea.com/).
|
||||||
|
|
||||||
## Authors
|
## Authors
|
||||||
|
|
||||||
@ -122,18 +135,25 @@ Gitea is pronounced [/ɡɪ’ti:/](https://youtu.be/EM71-2uDAoY) as in "gi-tea"
|
|||||||
|
|
||||||
We're [working on it](https://github.com/go-gitea/gitea/issues/1029).
|
We're [working on it](https://github.com/go-gitea/gitea/issues/1029).
|
||||||
|
|
||||||
|
**Where can I find the security patches?**
|
||||||
|
|
||||||
|
In the [release log](https://github.com/go-gitea/gitea/releases) or the [change log](https://github.com/go-gitea/gitea/blob/main/CHANGELOG.md), search for the keyword `SECURITY` to find the security patches.
|
||||||
|
|
||||||
## License
|
## License
|
||||||
|
|
||||||
This project is licensed under the MIT License.
|
This project is licensed under the MIT License.
|
||||||
See the [LICENSE](https://github.com/go-gitea/gitea/blob/main/LICENSE) file
|
See the [LICENSE](https://github.com/go-gitea/gitea/blob/main/LICENSE) file
|
||||||
for the full license text.
|
for the full license text.
|
||||||
|
|
||||||
## Screenshots
|
## Further information
|
||||||
|
|
||||||
Looking for an overview of the interface? Check it out!
|
<details>
|
||||||
|
<summary>Looking for an overview of the interface? Check it out!</summary>
|
||||||
|
|
||||||
||||
|
||||
|
||||||
|:---:|:---:|:---:|
|
|:---:|:---:|:---:|
|
||||||
||||
|
||||
|
||||||
||||
|
||||
|
||||||
||||
|
||||
|
||||||
|
|
||||||
|
</details>
|
||||||
|
73
README_ZH.md
73
README_ZH.md
@ -9,13 +9,13 @@
|
|||||||
[](https://opencollective.com/gitea "Become a backer/sponsor of gitea")
|
[](https://opencollective.com/gitea "Become a backer/sponsor of gitea")
|
||||||
[](https://opensource.org/licenses/MIT "License: MIT")
|
[](https://opensource.org/licenses/MIT "License: MIT")
|
||||||
[](https://gitpod.io/#https://github.com/go-gitea/gitea)
|
[](https://gitpod.io/#https://github.com/go-gitea/gitea)
|
||||||
[](https://crowdin.com/project/gitea "Crowdin")
|
[](https://translate.gitea.com "Crowdin")
|
||||||
|
|
||||||
[View this document in English](./README.md)
|
[View this document in English](./README.md)
|
||||||
|
|
||||||
## 目标
|
## 目标
|
||||||
|
|
||||||
Gitea 的首要目标是创建一个极易安装,运行非常快速,安装和使用体验良好的自建 Git 服务。我们采用 Go 作为后端语言,这使我们只要生成一个可执行程序即可。并且他还支持跨平台,支持 Linux, macOS 和 Windows 以及各种架构,除了 x86,amd64,还包括 ARM 和 PowerPC。
|
Gitea 的首要目标是创建一个极易安装,运行非常快速,安装和使用体验良好的自建 Git 服务。我们采用 Go 作为后端语言,这使我们只要生成一个可执行程序即可。并且他还支持跨平台,支持 Linux、macOS 和 Windows 以及各种架构,除了 x86 和 amd64,还包括 ARM 和 PowerPC。
|
||||||
|
|
||||||
如果你想试用在线演示和报告问题,请访问 [demo.gitea.com](https://demo.gitea.com/)。
|
如果你想试用在线演示和报告问题,请访问 [demo.gitea.com](https://demo.gitea.com/)。
|
||||||
|
|
||||||
@ -23,39 +23,80 @@ Gitea 的首要目标是创建一个极易安装,运行非常快速,安装
|
|||||||
|
|
||||||
如果你想在 Gitea Cloud 上快速部署你自己独享的 Gitea 实例,请访问 [cloud.gitea.com](https://cloud.gitea.com) 开始免费试用。
|
如果你想在 Gitea Cloud 上快速部署你自己独享的 Gitea 实例,请访问 [cloud.gitea.com](https://cloud.gitea.com) 开始免费试用。
|
||||||
|
|
||||||
## 提示
|
|
||||||
|
|
||||||
1. **开始贡献代码之前请确保你已经看过了 [贡献者向导(英文)](CONTRIBUTING.md)**.
|
|
||||||
2. 所有的安全问题,请私下发送邮件给 **security@gitea.io**。谢谢!
|
|
||||||
3. 如果你要使用API,请参见 [API 文档](https://godoc.org/code.gitea.io/sdk/gitea).
|
|
||||||
|
|
||||||
## 文档
|
## 文档
|
||||||
|
|
||||||
关于如何安装请访问我们的 [文档站](https://docs.gitea.com/zh-cn/category/installation),如果没有找到对应的文档,你也可以通过 [Discord - 英文](https://discord.gg/gitea) 和 QQ群 328432459 来和我们交流。
|
关于如何安装请访问我们的 [文档站](https://docs.gitea.com/zh-cn/category/installation),如果没有找到对应的文档,你也可以通过 [Discord - 英文](https://discord.gg/gitea) 和 QQ群 328432459 来和我们交流。
|
||||||
|
|
||||||
## 贡献流程
|
## 编译
|
||||||
|
|
||||||
Fork -> Patch -> Push -> Pull Request
|
在源代码的根目录下执行:
|
||||||
|
|
||||||
|
TAGS="bindata" make build
|
||||||
|
|
||||||
|
或者如果需要SQLite支持:
|
||||||
|
|
||||||
|
TAGS="bindata sqlite sqlite_unlock_notify" make build
|
||||||
|
|
||||||
|
编译过程会分成2个子任务:
|
||||||
|
|
||||||
|
- `make backend`,需要 [Go Stable](https://go.dev/dl/),最低版本需求可查看 [go.mod](/go.mod)。
|
||||||
|
- `make frontend`,需要 [Node.js LTS](https://nodejs.org/en/download/) 或更高版本。
|
||||||
|
|
||||||
|
你需要连接网络来下载 go 和 npm modules。当从 tar 格式的源文件编译时,其中包含了预编译的前端文件,因此 `make frontend` 将不会被执行。这允许编译时不需要 Node.js。
|
||||||
|
|
||||||
|
更多信息: https://docs.gitea.com/installation/install-from-source
|
||||||
|
|
||||||
|
## 使用
|
||||||
|
|
||||||
|
编译之后,默认会在根目录下生成一个名为 `gitea` 的文件。你可以这样执行它:
|
||||||
|
|
||||||
|
./gitea web
|
||||||
|
|
||||||
|
> [!注意]
|
||||||
|
> 如果你要使用API,请参见 [API 文档](https://godoc.org/code.gitea.io/sdk/gitea)。
|
||||||
|
|
||||||
|
## 贡献
|
||||||
|
|
||||||
|
贡献流程:Fork -> Patch -> Push -> Pull Request
|
||||||
|
|
||||||
|
> [!注意]
|
||||||
|
>
|
||||||
|
> 1. **开始贡献代码之前请确保你已经看过了 [贡献者向导(英文)](CONTRIBUTING.md)**。
|
||||||
|
> 2. 所有的安全问题,请私下发送邮件给 **security@gitea.io**。 谢谢!
|
||||||
|
|
||||||
## 翻译
|
## 翻译
|
||||||
|
|
||||||
多语言翻译是基于Crowdin进行的.
|
[](https://translate.gitea.com)
|
||||||
[](https://crowdin.com/project/gitea)
|
|
||||||
|
多语言翻译是基于Crowdin进行的。
|
||||||
|
|
||||||
|
从 [文档](https://docs.gitea.com/contributing/localization) 中获取更多信息。
|
||||||
|
|
||||||
|
## 官方和第三方项目
|
||||||
|
|
||||||
|
Gitea 提供官方的 [go-sdk](https://gitea.com/gitea/go-sdk),以及名为 [tea](https://gitea.com/gitea/tea) 的 CLI 工具 和 用于 Gitea Action 的 [action runner](https://gitea.com/gitea/act_runner)。
|
||||||
|
|
||||||
|
[gitea/awesome-gitea](https://gitea.com/gitea/awesome-gitea) 是一个 Gitea 相关项目的列表,你可以在这里找到更多的第三方项目,包括 SDK、插件、主题等等。
|
||||||
|
|
||||||
## 作者
|
## 作者
|
||||||
|
|
||||||
* [Maintainers](https://github.com/orgs/go-gitea/people)
|
- [Maintainers](https://github.com/orgs/go-gitea/people)
|
||||||
* [Contributors](https://github.com/go-gitea/gitea/graphs/contributors)
|
- [Contributors](https://github.com/go-gitea/gitea/graphs/contributors)
|
||||||
* [Translators](options/locale/TRANSLATORS)
|
- [Translators](options/locale/TRANSLATORS)
|
||||||
|
|
||||||
## 授权许可
|
## 授权许可
|
||||||
|
|
||||||
本项目采用 MIT 开源授权许可证,完整的授权说明已放置在 [LICENSE](https://github.com/go-gitea/gitea/blob/main/LICENSE) 文件中。
|
本项目采用 MIT 开源授权许可证,完整的授权说明已放置在 [LICENSE](https://github.com/go-gitea/gitea/blob/main/LICENSE) 文件中。
|
||||||
|
|
||||||
## 截图
|
## 更多信息
|
||||||
|
|
||||||
|
<details>
|
||||||
|
<summary>截图</summary>
|
||||||
|
|
||||||
||||
|
||||
|
||||||
|:---:|:---:|:---:|
|
|:---:|:---:|:---:|
|
||||||
||||
|
||||
|
||||||
||||
|
||||
|
||||||
||||
|
||||
|
||||||
|
|
||||||
|
</details>
|
||||||
|
5
assets/go-licenses.json
generated
5
assets/go-licenses.json
generated
@ -744,11 +744,6 @@
|
|||||||
"path": "github.com/kevinburke/ssh_config/LICENSE",
|
"path": "github.com/kevinburke/ssh_config/LICENSE",
|
||||||
"licenseText": "Copyright (c) 2017 Kevin Burke.\n\nPermission is hereby granted, free of charge, to any person\nobtaining a copy of this software and associated documentation\nfiles (the \"Software\"), to deal in the Software without\nrestriction, including without limitation the rights to use,\ncopy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following\nconditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES\nOF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\nNONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\nHOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,\nWHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR\nOTHER DEALINGS IN THE SOFTWARE.\n\n===================\n\nThe lexer and parser borrow heavily from github.com/pelletier/go-toml. The\nlicense for that project is copied below.\n\nThe MIT License (MIT)\n\nCopyright (c) 2013 - 2017 Thomas Pelletier, Eric Anderton\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
|
"licenseText": "Copyright (c) 2017 Kevin Burke.\n\nPermission is hereby granted, free of charge, to any person\nobtaining a copy of this software and associated documentation\nfiles (the \"Software\"), to deal in the Software without\nrestriction, including without limitation the rights to use,\ncopy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following\nconditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES\nOF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\nNONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\nHOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,\nWHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR\nOTHER DEALINGS IN THE SOFTWARE.\n\n===================\n\nThe lexer and parser borrow heavily from github.com/pelletier/go-toml. The\nlicense for that project is copied below.\n\nThe MIT License (MIT)\n\nCopyright (c) 2013 - 2017 Thomas Pelletier, Eric Anderton\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "github.com/keybase/go-crypto",
|
|
||||||
"path": "github.com/keybase/go-crypto/LICENSE",
|
|
||||||
"licenseText": "Copyright (c) 2009 The Go Authors. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "github.com/klauspost/compress",
|
"name": "github.com/klauspost/compress",
|
||||||
"path": "github.com/klauspost/compress/LICENSE",
|
"path": "github.com/klauspost/compress/LICENSE",
|
||||||
|
@ -31,6 +31,11 @@ var microcmdUserCreate = &cli.Command{
|
|||||||
Name: "username",
|
Name: "username",
|
||||||
Usage: "Username",
|
Usage: "Username",
|
||||||
},
|
},
|
||||||
|
&cli.StringFlag{
|
||||||
|
Name: "user-type",
|
||||||
|
Usage: "Set user's type: individual or bot",
|
||||||
|
Value: "individual",
|
||||||
|
},
|
||||||
&cli.StringFlag{
|
&cli.StringFlag{
|
||||||
Name: "password",
|
Name: "password",
|
||||||
Usage: "User password",
|
Usage: "User password",
|
||||||
@ -77,6 +82,22 @@ func runCreateUser(c *cli.Context) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
userTypes := map[string]user_model.UserType{
|
||||||
|
"individual": user_model.UserTypeIndividual,
|
||||||
|
"bot": user_model.UserTypeBot,
|
||||||
|
}
|
||||||
|
userType, ok := userTypes[c.String("user-type")]
|
||||||
|
if !ok {
|
||||||
|
return fmt.Errorf("invalid user type: %s", c.String("user-type"))
|
||||||
|
}
|
||||||
|
if userType != user_model.UserTypeIndividual {
|
||||||
|
// Some other commands like "change-password" also only support individual users.
|
||||||
|
// It needs to clarify the "password" behavior for bot users in the future.
|
||||||
|
// At the moment, we do not allow setting password for bot users.
|
||||||
|
if c.IsSet("password") || c.IsSet("random-password") {
|
||||||
|
return errors.New("password can only be set for individual users")
|
||||||
|
}
|
||||||
|
}
|
||||||
if c.IsSet("name") && c.IsSet("username") {
|
if c.IsSet("name") && c.IsSet("username") {
|
||||||
return errors.New("cannot set both --name and --username flags")
|
return errors.New("cannot set both --name and --username flags")
|
||||||
}
|
}
|
||||||
@ -118,16 +139,19 @@ func runCreateUser(c *cli.Context) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
fmt.Printf("generated random password is '%s'\n", password)
|
fmt.Printf("generated random password is '%s'\n", password)
|
||||||
} else {
|
} else if userType == user_model.UserTypeIndividual {
|
||||||
return errors.New("must set either password or random-password flag")
|
return errors.New("must set either password or random-password flag")
|
||||||
}
|
}
|
||||||
|
|
||||||
isAdmin := c.Bool("admin")
|
isAdmin := c.Bool("admin")
|
||||||
mustChangePassword := true // always default to true
|
mustChangePassword := true // always default to true
|
||||||
if c.IsSet("must-change-password") {
|
if c.IsSet("must-change-password") {
|
||||||
|
if userType != user_model.UserTypeIndividual {
|
||||||
|
return errors.New("must-change-password flag can only be set for individual users")
|
||||||
|
}
|
||||||
// if the flag is set, use the value provided by the user
|
// if the flag is set, use the value provided by the user
|
||||||
mustChangePassword = c.Bool("must-change-password")
|
mustChangePassword = c.Bool("must-change-password")
|
||||||
} else {
|
} else if userType == user_model.UserTypeIndividual {
|
||||||
// check whether there are users in the database
|
// check whether there are users in the database
|
||||||
hasUserRecord, err := db.IsTableNotEmpty(&user_model.User{})
|
hasUserRecord, err := db.IsTableNotEmpty(&user_model.User{})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -151,8 +175,9 @@ func runCreateUser(c *cli.Context) error {
|
|||||||
u := &user_model.User{
|
u := &user_model.User{
|
||||||
Name: username,
|
Name: username,
|
||||||
Email: c.String("email"),
|
Email: c.String("email"),
|
||||||
Passwd: password,
|
|
||||||
IsAdmin: isAdmin,
|
IsAdmin: isAdmin,
|
||||||
|
Type: userType,
|
||||||
|
Passwd: password,
|
||||||
MustChangePassword: mustChangePassword,
|
MustChangePassword: mustChangePassword,
|
||||||
Visibility: visibility,
|
Visibility: visibility,
|
||||||
}
|
}
|
||||||
|
@ -13,32 +13,54 @@ import (
|
|||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestAdminUserCreate(t *testing.T) {
|
func TestAdminUserCreate(t *testing.T) {
|
||||||
app := NewMainApp(AppVersion{})
|
app := NewMainApp(AppVersion{})
|
||||||
|
|
||||||
reset := func() {
|
reset := func() {
|
||||||
assert.NoError(t, db.TruncateBeans(db.DefaultContext, &user_model.User{}))
|
require.NoError(t, db.TruncateBeans(db.DefaultContext, &user_model.User{}))
|
||||||
assert.NoError(t, db.TruncateBeans(db.DefaultContext, &user_model.EmailAddress{}))
|
require.NoError(t, db.TruncateBeans(db.DefaultContext, &user_model.EmailAddress{}))
|
||||||
}
|
}
|
||||||
|
|
||||||
type createCheck struct{ IsAdmin, MustChangePassword bool }
|
t.Run("MustChangePassword", func(t *testing.T) {
|
||||||
createUser := func(name, args string) createCheck {
|
type check struct {
|
||||||
assert.NoError(t, app.Run(strings.Fields(fmt.Sprintf("./gitea admin user create --username %s --email %s@gitea.local %s --password foobar", name, name, args))))
|
IsAdmin bool
|
||||||
u := unittest.AssertExistsAndLoadBean(t, &user_model.User{LowerName: name})
|
MustChangePassword bool
|
||||||
return createCheck{u.IsAdmin, u.MustChangePassword}
|
}
|
||||||
}
|
createCheck := func(name, args string) check {
|
||||||
reset()
|
require.NoError(t, app.Run(strings.Fields(fmt.Sprintf("./gitea admin user create --username %s --email %s@gitea.local %s --password foobar", name, name, args))))
|
||||||
assert.Equal(t, createCheck{IsAdmin: false, MustChangePassword: false}, createUser("u", ""), "first non-admin user doesn't need to change password")
|
u := unittest.AssertExistsAndLoadBean(t, &user_model.User{LowerName: name})
|
||||||
|
return check{IsAdmin: u.IsAdmin, MustChangePassword: u.MustChangePassword}
|
||||||
|
}
|
||||||
|
reset()
|
||||||
|
assert.Equal(t, check{IsAdmin: false, MustChangePassword: false}, createCheck("u", ""), "first non-admin user doesn't need to change password")
|
||||||
|
|
||||||
reset()
|
reset()
|
||||||
assert.Equal(t, createCheck{IsAdmin: true, MustChangePassword: false}, createUser("u", "--admin"), "first admin user doesn't need to change password")
|
assert.Equal(t, check{IsAdmin: true, MustChangePassword: false}, createCheck("u", "--admin"), "first admin user doesn't need to change password")
|
||||||
|
|
||||||
reset()
|
reset()
|
||||||
assert.Equal(t, createCheck{IsAdmin: true, MustChangePassword: true}, createUser("u", "--admin --must-change-password"))
|
assert.Equal(t, check{IsAdmin: true, MustChangePassword: true}, createCheck("u", "--admin --must-change-password"))
|
||||||
assert.Equal(t, createCheck{IsAdmin: true, MustChangePassword: true}, createUser("u2", "--admin"))
|
assert.Equal(t, check{IsAdmin: true, MustChangePassword: true}, createCheck("u2", "--admin"))
|
||||||
assert.Equal(t, createCheck{IsAdmin: true, MustChangePassword: false}, createUser("u3", "--admin --must-change-password=false"))
|
assert.Equal(t, check{IsAdmin: true, MustChangePassword: false}, createCheck("u3", "--admin --must-change-password=false"))
|
||||||
assert.Equal(t, createCheck{IsAdmin: false, MustChangePassword: true}, createUser("u4", ""))
|
assert.Equal(t, check{IsAdmin: false, MustChangePassword: true}, createCheck("u4", ""))
|
||||||
assert.Equal(t, createCheck{IsAdmin: false, MustChangePassword: false}, createUser("u5", "--must-change-password=false"))
|
assert.Equal(t, check{IsAdmin: false, MustChangePassword: false}, createCheck("u5", "--must-change-password=false"))
|
||||||
|
})
|
||||||
|
|
||||||
|
t.Run("UserType", func(t *testing.T) {
|
||||||
|
createUser := func(name, args string) error {
|
||||||
|
return app.Run(strings.Fields(fmt.Sprintf("./gitea admin user create --username %s --email %s@gitea.local %s", name, name, args)))
|
||||||
|
}
|
||||||
|
|
||||||
|
reset()
|
||||||
|
assert.ErrorContains(t, createUser("u", "--user-type invalid"), "invalid user type")
|
||||||
|
assert.ErrorContains(t, createUser("u", "--user-type bot --password 123"), "can only be set for individual users")
|
||||||
|
assert.ErrorContains(t, createUser("u", "--user-type bot --must-change-password"), "can only be set for individual users")
|
||||||
|
|
||||||
|
assert.NoError(t, createUser("u", "--user-type bot"))
|
||||||
|
u := unittest.AssertExistsAndLoadBean(t, &user_model.User{LowerName: "u"})
|
||||||
|
assert.Equal(t, user_model.UserTypeBot, u.Type)
|
||||||
|
assert.Equal(t, "", u.Passwd)
|
||||||
|
})
|
||||||
}
|
}
|
||||||
|
@ -196,7 +196,7 @@ func migrateActionsLog(ctx context.Context, dstStorage storage.ObjectStorage) er
|
|||||||
|
|
||||||
func migrateActionsArtifacts(ctx context.Context, dstStorage storage.ObjectStorage) error {
|
func migrateActionsArtifacts(ctx context.Context, dstStorage storage.ObjectStorage) error {
|
||||||
return db.Iterate(ctx, nil, func(ctx context.Context, artifact *actions_model.ActionArtifact) error {
|
return db.Iterate(ctx, nil, func(ctx context.Context, artifact *actions_model.ActionArtifact) error {
|
||||||
if artifact.Status == int64(actions_model.ArtifactStatusExpired) {
|
if artifact.Status == actions_model.ArtifactStatusExpired {
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -104,7 +104,10 @@ func fail(ctx context.Context, userMessage, logMsgFmt string, args ...any) error
|
|||||||
// There appears to be a chance to cause a zombie process and failure to read the Exit status
|
// There appears to be a chance to cause a zombie process and failure to read the Exit status
|
||||||
// if nothing is outputted on stdout.
|
// if nothing is outputted on stdout.
|
||||||
_, _ = fmt.Fprintln(os.Stdout, "")
|
_, _ = fmt.Fprintln(os.Stdout, "")
|
||||||
_, _ = fmt.Fprintln(os.Stderr, "Gitea:", userMessage)
|
// add extra empty lines to separate our message from other git errors to get more attention
|
||||||
|
_, _ = fmt.Fprintln(os.Stderr, "error:")
|
||||||
|
_, _ = fmt.Fprintln(os.Stderr, "error:", userMessage)
|
||||||
|
_, _ = fmt.Fprintln(os.Stderr, "error:")
|
||||||
|
|
||||||
if logMsgFmt != "" {
|
if logMsgFmt != "" {
|
||||||
logMsg := fmt.Sprintf(logMsgFmt, args...)
|
logMsg := fmt.Sprintf(logMsgFmt, args...)
|
||||||
|
@ -18,10 +18,12 @@ import (
|
|||||||
|
|
||||||
"code.gitea.io/gitea/modules/container"
|
"code.gitea.io/gitea/modules/container"
|
||||||
"code.gitea.io/gitea/modules/graceful"
|
"code.gitea.io/gitea/modules/graceful"
|
||||||
|
"code.gitea.io/gitea/modules/gtprof"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/process"
|
"code.gitea.io/gitea/modules/process"
|
||||||
"code.gitea.io/gitea/modules/public"
|
"code.gitea.io/gitea/modules/public"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
"code.gitea.io/gitea/modules/util"
|
||||||
"code.gitea.io/gitea/routers"
|
"code.gitea.io/gitea/routers"
|
||||||
"code.gitea.io/gitea/routers/install"
|
"code.gitea.io/gitea/routers/install"
|
||||||
|
|
||||||
@ -218,6 +220,8 @@ func serveInstalled(ctx *cli.Context) error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
gtprof.EnableBuiltinTracer(util.Iif(setting.IsProd, 2000*time.Millisecond, 100*time.Millisecond))
|
||||||
|
|
||||||
// Set up Chi routes
|
// Set up Chi routes
|
||||||
webRoutes := routers.NormalRoutes()
|
webRoutes := routers.NormalRoutes()
|
||||||
err := listen(webRoutes, true)
|
err := listen(webRoutes, true)
|
||||||
|
@ -790,10 +790,13 @@ LEVEL = Info
|
|||||||
;; Please note that setting this to false will not disable OAuth Basic or Basic authentication using a token
|
;; Please note that setting this to false will not disable OAuth Basic or Basic authentication using a token
|
||||||
;ENABLE_BASIC_AUTHENTICATION = true
|
;ENABLE_BASIC_AUTHENTICATION = true
|
||||||
;;
|
;;
|
||||||
;; Show the password sign-in form (for password-based login), otherwise, only show OAuth2 login methods.
|
;; Show the password sign-in form (for password-based login), otherwise, only show OAuth2 or passkey login methods if they are enabled.
|
||||||
;; If you set it to false, maybe it also needs to set ENABLE_BASIC_AUTHENTICATION to false to completely disable password-based authentication.
|
;; If you set it to false, maybe it also needs to set ENABLE_BASIC_AUTHENTICATION to false to completely disable password-based authentication.
|
||||||
;ENABLE_PASSWORD_SIGNIN_FORM = true
|
;ENABLE_PASSWORD_SIGNIN_FORM = true
|
||||||
;;
|
;;
|
||||||
|
;; Allow users to sign-in with a passkey
|
||||||
|
;ENABLE_PASSKEY_AUTHENTICATION = true
|
||||||
|
;;
|
||||||
;; More detail: https://github.com/gogits/gogs/issues/165
|
;; More detail: https://github.com/gogits/gogs/issues/165
|
||||||
;ENABLE_REVERSE_PROXY_AUTHENTICATION = false
|
;ENABLE_REVERSE_PROXY_AUTHENTICATION = false
|
||||||
; Enable this to allow reverse proxy authentication for API requests, the reverse proxy is responsible for ensuring that no CSRF is possible.
|
; Enable this to allow reverse proxy authentication for API requests, the reverse proxy is responsible for ensuring that no CSRF is possible.
|
||||||
@ -1126,6 +1129,9 @@ LEVEL = Info
|
|||||||
;; In default merge messages only include approvers who are official
|
;; In default merge messages only include approvers who are official
|
||||||
;DEFAULT_MERGE_MESSAGE_OFFICIAL_APPROVERS_ONLY = true
|
;DEFAULT_MERGE_MESSAGE_OFFICIAL_APPROVERS_ONLY = true
|
||||||
;;
|
;;
|
||||||
|
;; In default squash-merge messages include the commit message of all commits comprising the pull request.
|
||||||
|
;POPULATE_SQUASH_COMMENT_WITH_COMMIT_MESSAGES = false
|
||||||
|
;;
|
||||||
;; Add co-authored-by and co-committed-by trailers if committer does not match author
|
;; Add co-authored-by and co-committed-by trailers if committer does not match author
|
||||||
;ADD_CO_COMMITTER_TRAILERS = true
|
;ADD_CO_COMMITTER_TRAILERS = true
|
||||||
;;
|
;;
|
||||||
|
12
flake.lock
generated
12
flake.lock
generated
@ -5,11 +5,11 @@
|
|||||||
"systems": "systems"
|
"systems": "systems"
|
||||||
},
|
},
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1726560853,
|
"lastModified": 1731533236,
|
||||||
"narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=",
|
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||||
"owner": "numtide",
|
"owner": "numtide",
|
||||||
"repo": "flake-utils",
|
"repo": "flake-utils",
|
||||||
"rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a",
|
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
@ -20,11 +20,11 @@
|
|||||||
},
|
},
|
||||||
"nixpkgs": {
|
"nixpkgs": {
|
||||||
"locked": {
|
"locked": {
|
||||||
"lastModified": 1731139594,
|
"lastModified": 1739214665,
|
||||||
"narHash": "sha256-IigrKK3vYRpUu+HEjPL/phrfh7Ox881er1UEsZvw9Q4=",
|
"narHash": "sha256-26L8VAu3/1YRxS8MHgBOyOM8xALdo6N0I04PgorE7UM=",
|
||||||
"owner": "nixos",
|
"owner": "nixos",
|
||||||
"repo": "nixpkgs",
|
"repo": "nixpkgs",
|
||||||
"rev": "76612b17c0ce71689921ca12d9ffdc9c23ce40b2",
|
"rev": "64e75cd44acf21c7933d61d7721e812eac1b5a0a",
|
||||||
"type": "github"
|
"type": "github"
|
||||||
},
|
},
|
||||||
"original": {
|
"original": {
|
||||||
|
@ -29,9 +29,14 @@
|
|||||||
poetry
|
poetry
|
||||||
|
|
||||||
# backend
|
# backend
|
||||||
|
go_1_24
|
||||||
gofumpt
|
gofumpt
|
||||||
sqlite
|
sqlite
|
||||||
];
|
];
|
||||||
|
shellHook = ''
|
||||||
|
export GO="${pkgs.go_1_24}/bin/go"
|
||||||
|
export GOROOT="${pkgs.go_1_24}/share/go"
|
||||||
|
'';
|
||||||
};
|
};
|
||||||
}
|
}
|
||||||
);
|
);
|
||||||
|
3
go.mod
3
go.mod
@ -1,6 +1,6 @@
|
|||||||
module code.gitea.io/gitea
|
module code.gitea.io/gitea
|
||||||
|
|
||||||
go 1.23
|
go 1.24
|
||||||
|
|
||||||
// rfc5280 said: "The serial number is an integer assigned by the CA to each certificate."
|
// rfc5280 said: "The serial number is an integer assigned by the CA to each certificate."
|
||||||
// But some CAs use negative serial number, just relax the check. related:
|
// But some CAs use negative serial number, just relax the check. related:
|
||||||
@ -78,7 +78,6 @@ require (
|
|||||||
github.com/jhillyerd/enmime v1.3.0
|
github.com/jhillyerd/enmime v1.3.0
|
||||||
github.com/json-iterator/go v1.1.12
|
github.com/json-iterator/go v1.1.12
|
||||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51
|
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51
|
||||||
github.com/keybase/go-crypto v0.0.0-20200123153347-de78d2cb44f4
|
|
||||||
github.com/klauspost/compress v1.17.11
|
github.com/klauspost/compress v1.17.11
|
||||||
github.com/klauspost/cpuid/v2 v2.2.8
|
github.com/klauspost/cpuid/v2 v2.2.8
|
||||||
github.com/lib/pq v1.10.9
|
github.com/lib/pq v1.10.9
|
||||||
|
2
go.sum
2
go.sum
@ -506,8 +506,6 @@ github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNU
|
|||||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
||||||
github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4=
|
github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4=
|
||||||
github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
|
github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
|
||||||
github.com/keybase/go-crypto v0.0.0-20200123153347-de78d2cb44f4 h1:cTxwSmnaqLoo+4tLukHoB9iqHOu3LmLhRmgUxZo6Vp4=
|
|
||||||
github.com/keybase/go-crypto v0.0.0-20200123153347-de78d2cb44f4/go.mod h1:ghbZscTyKdM07+Fw3KSi0hcJm+AlEUWj8QLlPtijN/M=
|
|
||||||
github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
|
github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
|
||||||
github.com/klauspost/compress v1.11.4/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=
|
github.com/klauspost/compress v1.11.4/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=
|
||||||
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
|
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
|
||||||
|
16
main_timezones.go
Normal file
16
main_timezones.go
Normal file
@ -0,0 +1,16 @@
|
|||||||
|
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
//go:build windows
|
||||||
|
|
||||||
|
package main
|
||||||
|
|
||||||
|
// Golang has the ability to load OS's timezone data from most UNIX systems (https://github.com/golang/go/blob/master/src/time/zoneinfo_unix.go)
|
||||||
|
// Even if the timezone data is missing, users could install the related packages to get it.
|
||||||
|
// But on Windows, although `zoneinfo_windows.go` tries to load the timezone data from Windows registry,
|
||||||
|
// some users still suffer from the issue that the timezone data is missing: https://github.com/go-gitea/gitea/issues/33235
|
||||||
|
// So we import the tzdata package to make sure the timezone data is included in the binary.
|
||||||
|
//
|
||||||
|
// For non-Windows package builders, they could still use the "TAGS=timetzdata" to include the tzdata package in the binary.
|
||||||
|
// If we decided to add the tzdata for other platforms, modify the "go:build" directive above.
|
||||||
|
import _ "time/tzdata"
|
@ -48,7 +48,7 @@ type ActionArtifact struct {
|
|||||||
ContentEncoding string // The content encoding of the artifact
|
ContentEncoding string // The content encoding of the artifact
|
||||||
ArtifactPath string `xorm:"index unique(runid_name_path)"` // The path to the artifact when runner uploads it
|
ArtifactPath string `xorm:"index unique(runid_name_path)"` // The path to the artifact when runner uploads it
|
||||||
ArtifactName string `xorm:"index unique(runid_name_path)"` // The name of the artifact when runner uploads it
|
ArtifactName string `xorm:"index unique(runid_name_path)"` // The name of the artifact when runner uploads it
|
||||||
Status int64 `xorm:"index"` // The status of the artifact, uploading, expired or need-delete
|
Status ArtifactStatus `xorm:"index"` // The status of the artifact, uploading, expired or need-delete
|
||||||
CreatedUnix timeutil.TimeStamp `xorm:"created"`
|
CreatedUnix timeutil.TimeStamp `xorm:"created"`
|
||||||
UpdatedUnix timeutil.TimeStamp `xorm:"updated index"`
|
UpdatedUnix timeutil.TimeStamp `xorm:"updated index"`
|
||||||
ExpiredUnix timeutil.TimeStamp `xorm:"index"` // The time when the artifact will be expired
|
ExpiredUnix timeutil.TimeStamp `xorm:"index"` // The time when the artifact will be expired
|
||||||
@ -68,7 +68,7 @@ func CreateArtifact(ctx context.Context, t *ActionTask, artifactName, artifactPa
|
|||||||
RepoID: t.RepoID,
|
RepoID: t.RepoID,
|
||||||
OwnerID: t.OwnerID,
|
OwnerID: t.OwnerID,
|
||||||
CommitSHA: t.CommitSHA,
|
CommitSHA: t.CommitSHA,
|
||||||
Status: int64(ArtifactStatusUploadPending),
|
Status: ArtifactStatusUploadPending,
|
||||||
ExpiredUnix: timeutil.TimeStamp(time.Now().Unix() + timeutil.Day*expiredDays),
|
ExpiredUnix: timeutil.TimeStamp(time.Now().Unix() + timeutil.Day*expiredDays),
|
||||||
}
|
}
|
||||||
if _, err := db.GetEngine(ctx).Insert(artifact); err != nil {
|
if _, err := db.GetEngine(ctx).Insert(artifact); err != nil {
|
||||||
@ -108,12 +108,19 @@ func UpdateArtifactByID(ctx context.Context, id int64, art *ActionArtifact) erro
|
|||||||
|
|
||||||
type FindArtifactsOptions struct {
|
type FindArtifactsOptions struct {
|
||||||
db.ListOptions
|
db.ListOptions
|
||||||
RepoID int64
|
RepoID int64
|
||||||
RunID int64
|
RunID int64
|
||||||
ArtifactName string
|
ArtifactName string
|
||||||
Status int
|
Status int
|
||||||
|
FinalizedArtifactsV4 bool
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func (opts FindArtifactsOptions) ToOrders() string {
|
||||||
|
return "id"
|
||||||
|
}
|
||||||
|
|
||||||
|
var _ db.FindOptionsOrder = (*FindArtifactsOptions)(nil)
|
||||||
|
|
||||||
func (opts FindArtifactsOptions) ToConds() builder.Cond {
|
func (opts FindArtifactsOptions) ToConds() builder.Cond {
|
||||||
cond := builder.NewCond()
|
cond := builder.NewCond()
|
||||||
if opts.RepoID > 0 {
|
if opts.RepoID > 0 {
|
||||||
@ -128,11 +135,15 @@ func (opts FindArtifactsOptions) ToConds() builder.Cond {
|
|||||||
if opts.Status > 0 {
|
if opts.Status > 0 {
|
||||||
cond = cond.And(builder.Eq{"status": opts.Status})
|
cond = cond.And(builder.Eq{"status": opts.Status})
|
||||||
}
|
}
|
||||||
|
if opts.FinalizedArtifactsV4 {
|
||||||
|
cond = cond.And(builder.Eq{"status": ArtifactStatusUploadConfirmed}.Or(builder.Eq{"status": ArtifactStatusExpired}))
|
||||||
|
cond = cond.And(builder.Eq{"content_encoding": "application/zip"})
|
||||||
|
}
|
||||||
|
|
||||||
return cond
|
return cond
|
||||||
}
|
}
|
||||||
|
|
||||||
// ActionArtifactMeta is the meta data of an artifact
|
// ActionArtifactMeta is the meta-data of an artifact
|
||||||
type ActionArtifactMeta struct {
|
type ActionArtifactMeta struct {
|
||||||
ArtifactName string
|
ArtifactName string
|
||||||
FileSize int64
|
FileSize int64
|
||||||
@ -166,18 +177,18 @@ func ListPendingDeleteArtifacts(ctx context.Context, limit int) ([]*ActionArtifa
|
|||||||
|
|
||||||
// SetArtifactExpired sets an artifact to expired
|
// SetArtifactExpired sets an artifact to expired
|
||||||
func SetArtifactExpired(ctx context.Context, artifactID int64) error {
|
func SetArtifactExpired(ctx context.Context, artifactID int64) error {
|
||||||
_, err := db.GetEngine(ctx).Where("id=? AND status = ?", artifactID, ArtifactStatusUploadConfirmed).Cols("status").Update(&ActionArtifact{Status: int64(ArtifactStatusExpired)})
|
_, err := db.GetEngine(ctx).Where("id=? AND status = ?", artifactID, ArtifactStatusUploadConfirmed).Cols("status").Update(&ActionArtifact{Status: ArtifactStatusExpired})
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetArtifactNeedDelete sets an artifact to need-delete, cron job will delete it
|
// SetArtifactNeedDelete sets an artifact to need-delete, cron job will delete it
|
||||||
func SetArtifactNeedDelete(ctx context.Context, runID int64, name string) error {
|
func SetArtifactNeedDelete(ctx context.Context, runID int64, name string) error {
|
||||||
_, err := db.GetEngine(ctx).Where("run_id=? AND artifact_name=? AND status = ?", runID, name, ArtifactStatusUploadConfirmed).Cols("status").Update(&ActionArtifact{Status: int64(ArtifactStatusPendingDeletion)})
|
_, err := db.GetEngine(ctx).Where("run_id=? AND artifact_name=? AND status = ?", runID, name, ArtifactStatusUploadConfirmed).Cols("status").Update(&ActionArtifact{Status: ArtifactStatusPendingDeletion})
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// SetArtifactDeleted sets an artifact to deleted
|
// SetArtifactDeleted sets an artifact to deleted
|
||||||
func SetArtifactDeleted(ctx context.Context, artifactID int64) error {
|
func SetArtifactDeleted(ctx context.Context, artifactID int64) error {
|
||||||
_, err := db.GetEngine(ctx).ID(artifactID).Cols("status").Update(&ActionArtifact{Status: int64(ArtifactStatusDeleted)})
|
_, err := db.GetEngine(ctx).ID(artifactID).Cols("status").Update(&ActionArtifact{Status: ArtifactStatusDeleted})
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -88,7 +88,7 @@ func (run *ActionRun) RefLink() string {
|
|||||||
if refName.IsPull() {
|
if refName.IsPull() {
|
||||||
return run.Repo.Link() + "/pulls/" + refName.ShortName()
|
return run.Repo.Link() + "/pulls/" + refName.ShortName()
|
||||||
}
|
}
|
||||||
return git.RefURL(run.Repo.Link(), run.Ref)
|
return run.Repo.Link() + "/src/" + refName.RefWebLinkPath()
|
||||||
}
|
}
|
||||||
|
|
||||||
// PrettyRef return #id for pull ref or ShortName for others
|
// PrettyRef return #id for pull ref or ShortName for others
|
||||||
|
@ -167,6 +167,7 @@ func init() {
|
|||||||
|
|
||||||
type FindRunnerOptions struct {
|
type FindRunnerOptions struct {
|
||||||
db.ListOptions
|
db.ListOptions
|
||||||
|
IDs []int64
|
||||||
RepoID int64
|
RepoID int64
|
||||||
OwnerID int64 // it will be ignored if RepoID is set
|
OwnerID int64 // it will be ignored if RepoID is set
|
||||||
Sort string
|
Sort string
|
||||||
@ -178,6 +179,14 @@ type FindRunnerOptions struct {
|
|||||||
func (opts FindRunnerOptions) ToConds() builder.Cond {
|
func (opts FindRunnerOptions) ToConds() builder.Cond {
|
||||||
cond := builder.NewCond()
|
cond := builder.NewCond()
|
||||||
|
|
||||||
|
if len(opts.IDs) > 0 {
|
||||||
|
if len(opts.IDs) == 1 {
|
||||||
|
cond = cond.And(builder.Eq{"id": opts.IDs[0]})
|
||||||
|
} else {
|
||||||
|
cond = cond.And(builder.In("id", opts.IDs))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
if opts.RepoID > 0 {
|
if opts.RepoID > 0 {
|
||||||
c := builder.NewCond().And(builder.Eq{"repo_id": opts.RepoID})
|
c := builder.NewCond().And(builder.Eq{"repo_id": opts.RepoID})
|
||||||
if opts.WithAvailable {
|
if opts.WithAvailable {
|
||||||
|
@ -58,6 +58,7 @@ func InsertVariable(ctx context.Context, ownerID, repoID int64, name, data strin
|
|||||||
|
|
||||||
type FindVariablesOpts struct {
|
type FindVariablesOpts struct {
|
||||||
db.ListOptions
|
db.ListOptions
|
||||||
|
IDs []int64
|
||||||
RepoID int64
|
RepoID int64
|
||||||
OwnerID int64 // it will be ignored if RepoID is set
|
OwnerID int64 // it will be ignored if RepoID is set
|
||||||
Name string
|
Name string
|
||||||
@ -65,6 +66,15 @@ type FindVariablesOpts struct {
|
|||||||
|
|
||||||
func (opts FindVariablesOpts) ToConds() builder.Cond {
|
func (opts FindVariablesOpts) ToConds() builder.Cond {
|
||||||
cond := builder.NewCond()
|
cond := builder.NewCond()
|
||||||
|
|
||||||
|
if len(opts.IDs) > 0 {
|
||||||
|
if len(opts.IDs) == 1 {
|
||||||
|
cond = cond.And(builder.Eq{"id": opts.IDs[0]})
|
||||||
|
} else {
|
||||||
|
cond = cond.And(builder.In("id", opts.IDs))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
// Since we now support instance-level variables,
|
// Since we now support instance-level variables,
|
||||||
// there is no need to check for null values for `owner_id` and `repo_id`
|
// there is no need to check for null values for `owner_id` and `repo_id`
|
||||||
cond = cond.And(builder.Eq{"repo_id": opts.RepoID})
|
cond = cond.And(builder.Eq{"repo_id": opts.RepoID})
|
||||||
@ -85,12 +95,12 @@ func FindVariables(ctx context.Context, opts FindVariablesOpts) ([]*ActionVariab
|
|||||||
return db.Find[ActionVariable](ctx, opts)
|
return db.Find[ActionVariable](ctx, opts)
|
||||||
}
|
}
|
||||||
|
|
||||||
func UpdateVariable(ctx context.Context, variable *ActionVariable) (bool, error) {
|
func UpdateVariableCols(ctx context.Context, variable *ActionVariable, cols ...string) (bool, error) {
|
||||||
count, err := db.GetEngine(ctx).ID(variable.ID).Cols("name", "data").
|
variable.Name = strings.ToUpper(variable.Name)
|
||||||
Update(&ActionVariable{
|
count, err := db.GetEngine(ctx).
|
||||||
Name: variable.Name,
|
ID(variable.ID).
|
||||||
Data: variable.Data,
|
Cols(cols...).
|
||||||
})
|
Update(variable)
|
||||||
return count != 0, err
|
return count != 0, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -72,9 +72,9 @@ func (at ActionType) String() string {
|
|||||||
case ActionRenameRepo:
|
case ActionRenameRepo:
|
||||||
return "rename_repo"
|
return "rename_repo"
|
||||||
case ActionStarRepo:
|
case ActionStarRepo:
|
||||||
return "star_repo"
|
return "star_repo" // will not displayed in feeds.tmpl
|
||||||
case ActionWatchRepo:
|
case ActionWatchRepo:
|
||||||
return "watch_repo"
|
return "watch_repo" // will not displayed in feeds.tmpl
|
||||||
case ActionCommitRepo:
|
case ActionCommitRepo:
|
||||||
return "commit_repo"
|
return "commit_repo"
|
||||||
case ActionCreateIssue:
|
case ActionCreateIssue:
|
||||||
@ -355,7 +355,7 @@ func (a *Action) GetBranch() string {
|
|||||||
|
|
||||||
// GetRefLink returns the action's ref link.
|
// GetRefLink returns the action's ref link.
|
||||||
func (a *Action) GetRefLink(ctx context.Context) string {
|
func (a *Action) GetRefLink(ctx context.Context) string {
|
||||||
return git.RefURL(a.GetRepoLink(ctx), a.RefName)
|
return a.GetRepoLink(ctx) + "/src/" + git.RefName(a.RefName).RefWebLinkPath()
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetTag returns the action's repository tag.
|
// GetTag returns the action's repository tag.
|
||||||
|
@ -13,8 +13,8 @@ import (
|
|||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
|
|
||||||
"github.com/keybase/go-crypto/openpgp"
|
"github.com/ProtonMail/go-crypto/openpgp"
|
||||||
"github.com/keybase/go-crypto/openpgp/packet"
|
"github.com/ProtonMail/go-crypto/openpgp/packet"
|
||||||
"xorm.io/builder"
|
"xorm.io/builder"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -106,7 +106,7 @@ func GPGKeyToEntity(ctx context.Context, k *GPGKey) (*openpgp.Entity, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
keys, err := checkArmoredGPGKeyString(impKey.Content)
|
keys, err := CheckArmoredGPGKeyString(impKey.Content)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -115,7 +115,7 @@ func GPGKeyToEntity(ctx context.Context, k *GPGKey) (*openpgp.Entity, error) {
|
|||||||
|
|
||||||
// parseSubGPGKey parse a sub Key
|
// parseSubGPGKey parse a sub Key
|
||||||
func parseSubGPGKey(ownerID int64, primaryID string, pubkey *packet.PublicKey, expiry time.Time) (*GPGKey, error) {
|
func parseSubGPGKey(ownerID int64, primaryID string, pubkey *packet.PublicKey, expiry time.Time) (*GPGKey, error) {
|
||||||
content, err := base64EncPubKey(pubkey)
|
content, err := Base64EncPubKey(pubkey)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -141,7 +141,11 @@ func parseGPGKey(ctx context.Context, ownerID int64, e *openpgp.Entity, verified
|
|||||||
// Parse Subkeys
|
// Parse Subkeys
|
||||||
subkeys := make([]*GPGKey, len(e.Subkeys))
|
subkeys := make([]*GPGKey, len(e.Subkeys))
|
||||||
for i, k := range e.Subkeys {
|
for i, k := range e.Subkeys {
|
||||||
subs, err := parseSubGPGKey(ownerID, pubkey.KeyIdString(), k.PublicKey, expiry)
|
subkeyExpiry := expiry
|
||||||
|
if k.Sig.KeyLifetimeSecs != nil {
|
||||||
|
subkeyExpiry = k.PublicKey.CreationTime.Add(time.Duration(*k.Sig.KeyLifetimeSecs) * time.Second)
|
||||||
|
}
|
||||||
|
subs, err := parseSubGPGKey(ownerID, pubkey.KeyIdString(), k.PublicKey, subkeyExpiry)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, ErrGPGKeyParsing{ParseError: err}
|
return nil, ErrGPGKeyParsing{ParseError: err}
|
||||||
}
|
}
|
||||||
@ -156,7 +160,7 @@ func parseGPGKey(ctx context.Context, ownerID int64, e *openpgp.Entity, verified
|
|||||||
|
|
||||||
emails := make([]*user_model.EmailAddress, 0, len(e.Identities))
|
emails := make([]*user_model.EmailAddress, 0, len(e.Identities))
|
||||||
for _, ident := range e.Identities {
|
for _, ident := range e.Identities {
|
||||||
if ident.Revocation != nil {
|
if ident.Revoked(time.Now()) {
|
||||||
continue
|
continue
|
||||||
}
|
}
|
||||||
email := strings.ToLower(strings.TrimSpace(ident.UserId.Email))
|
email := strings.ToLower(strings.TrimSpace(ident.UserId.Email))
|
||||||
@ -179,7 +183,7 @@ func parseGPGKey(ctx context.Context, ownerID int64, e *openpgp.Entity, verified
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
content, err := base64EncPubKey(pubkey)
|
content, err := Base64EncPubKey(pubkey)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -235,33 +239,3 @@ func DeleteGPGKey(ctx context.Context, doer *user_model.User, id int64) (err err
|
|||||||
|
|
||||||
return committer.Commit()
|
return committer.Commit()
|
||||||
}
|
}
|
||||||
|
|
||||||
func checkKeyEmails(ctx context.Context, email string, keys ...*GPGKey) (bool, string) {
|
|
||||||
uid := int64(0)
|
|
||||||
var userEmails []*user_model.EmailAddress
|
|
||||||
var user *user_model.User
|
|
||||||
for _, key := range keys {
|
|
||||||
for _, e := range key.Emails {
|
|
||||||
if e.IsActivated && (email == "" || strings.EqualFold(e.Email, email)) {
|
|
||||||
return true, e.Email
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if key.Verified && key.OwnerID != 0 {
|
|
||||||
if uid != key.OwnerID {
|
|
||||||
userEmails, _ = user_model.GetEmailAddresses(ctx, key.OwnerID)
|
|
||||||
uid = key.OwnerID
|
|
||||||
user = &user_model.User{ID: uid}
|
|
||||||
_, _ = user_model.GetUser(ctx, user)
|
|
||||||
}
|
|
||||||
for _, e := range userEmails {
|
|
||||||
if e.IsActivated && (email == "" || strings.EqualFold(e.Email, email)) {
|
|
||||||
return true, e.Email
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if user.KeepEmailPrivate && strings.EqualFold(email, user.GetEmail()) {
|
|
||||||
return true, user.GetEmail()
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return false, email
|
|
||||||
}
|
|
||||||
|
@ -10,7 +10,7 @@ import (
|
|||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
|
||||||
"github.com/keybase/go-crypto/openpgp"
|
"github.com/ProtonMail/go-crypto/openpgp"
|
||||||
)
|
)
|
||||||
|
|
||||||
// __________________ ________ ____ __.
|
// __________________ ________ ____ __.
|
||||||
@ -67,7 +67,7 @@ func addGPGSubKey(ctx context.Context, key *GPGKey) (err error) {
|
|||||||
|
|
||||||
// AddGPGKey adds new public key to database.
|
// AddGPGKey adds new public key to database.
|
||||||
func AddGPGKey(ctx context.Context, ownerID int64, content, token, signature string) ([]*GPGKey, error) {
|
func AddGPGKey(ctx context.Context, ownerID int64, content, token, signature string) ([]*GPGKey, error) {
|
||||||
ekeys, err := checkArmoredGPGKeyString(content)
|
ekeys, err := CheckArmoredGPGKeyString(content)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -83,12 +83,12 @@ func AddGPGKey(ctx context.Context, ownerID int64, content, token, signature str
|
|||||||
verified := false
|
verified := false
|
||||||
// Handle provided signature
|
// Handle provided signature
|
||||||
if signature != "" {
|
if signature != "" {
|
||||||
signer, err := openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token), strings.NewReader(signature))
|
signer, err := openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token), strings.NewReader(signature), nil)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
signer, err = openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token+"\n"), strings.NewReader(signature))
|
signer, err = openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token+"\n"), strings.NewReader(signature), nil)
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
signer, err = openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token+"\r\n"), strings.NewReader(signature))
|
signer, err = openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token+"\r\n"), strings.NewReader(signature), nil)
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Error("Unable to validate token signature. Error: %v", err)
|
log.Error("Unable to validate token signature. Error: %v", err)
|
||||||
|
@ -4,19 +4,14 @@
|
|||||||
package asymkey
|
package asymkey
|
||||||
|
|
||||||
import (
|
import (
|
||||||
"context"
|
|
||||||
"fmt"
|
"fmt"
|
||||||
"hash"
|
"hash"
|
||||||
"strings"
|
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
|
||||||
repo_model "code.gitea.io/gitea/models/repo"
|
repo_model "code.gitea.io/gitea/models/repo"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
"code.gitea.io/gitea/modules/git"
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
|
||||||
|
|
||||||
"github.com/keybase/go-crypto/openpgp/packet"
|
"github.com/ProtonMail/go-crypto/openpgp/packet"
|
||||||
)
|
)
|
||||||
|
|
||||||
// __________________ ________ ____ __.
|
// __________________ ________ ____ __.
|
||||||
@ -70,263 +65,6 @@ const (
|
|||||||
NoKeyFound = "gpg.error.no_gpg_keys_found"
|
NoKeyFound = "gpg.error.no_gpg_keys_found"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ParseCommitsWithSignature checks if signaute of commits are corresponding to users gpg keys.
|
|
||||||
func ParseCommitsWithSignature(ctx context.Context, oldCommits []*user_model.UserCommit, repoTrustModel repo_model.TrustModelType, isOwnerMemberCollaborator func(*user_model.User) (bool, error)) []*SignCommit {
|
|
||||||
newCommits := make([]*SignCommit, 0, len(oldCommits))
|
|
||||||
keyMap := map[string]bool{}
|
|
||||||
|
|
||||||
for _, c := range oldCommits {
|
|
||||||
signCommit := &SignCommit{
|
|
||||||
UserCommit: c,
|
|
||||||
Verification: ParseCommitWithSignature(ctx, c.Commit),
|
|
||||||
}
|
|
||||||
|
|
||||||
_ = CalculateTrustStatus(signCommit.Verification, repoTrustModel, isOwnerMemberCollaborator, &keyMap)
|
|
||||||
|
|
||||||
newCommits = append(newCommits, signCommit)
|
|
||||||
}
|
|
||||||
return newCommits
|
|
||||||
}
|
|
||||||
|
|
||||||
// ParseCommitWithSignature check if signature is good against keystore.
|
|
||||||
func ParseCommitWithSignature(ctx context.Context, c *git.Commit) *CommitVerification {
|
|
||||||
var committer *user_model.User
|
|
||||||
if c.Committer != nil {
|
|
||||||
var err error
|
|
||||||
// Find Committer account
|
|
||||||
committer, err = user_model.GetUserByEmail(ctx, c.Committer.Email) // This finds the user by primary email or activated email so commit will not be valid if email is not
|
|
||||||
if err != nil { // Skipping not user for committer
|
|
||||||
committer = &user_model.User{
|
|
||||||
Name: c.Committer.Name,
|
|
||||||
Email: c.Committer.Email,
|
|
||||||
}
|
|
||||||
// We can expect this to often be an ErrUserNotExist. in the case
|
|
||||||
// it is not, however, it is important to log it.
|
|
||||||
if !user_model.IsErrUserNotExist(err) {
|
|
||||||
log.Error("GetUserByEmail: %v", err)
|
|
||||||
return &CommitVerification{
|
|
||||||
CommittingUser: committer,
|
|
||||||
Verified: false,
|
|
||||||
Reason: "gpg.error.no_committer_account",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If no signature just report the committer
|
|
||||||
if c.Signature == nil {
|
|
||||||
return &CommitVerification{
|
|
||||||
CommittingUser: committer,
|
|
||||||
Verified: false, // Default value
|
|
||||||
Reason: "gpg.error.not_signed_commit", // Default value
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// If this a SSH signature handle it differently
|
|
||||||
if strings.HasPrefix(c.Signature.Signature, "-----BEGIN SSH SIGNATURE-----") {
|
|
||||||
return ParseCommitWithSSHSignature(ctx, c, committer)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Parsing signature
|
|
||||||
sig, err := extractSignature(c.Signature.Signature)
|
|
||||||
if err != nil { // Skipping failed to extract sign
|
|
||||||
log.Error("SignatureRead err: %v", err)
|
|
||||||
return &CommitVerification{
|
|
||||||
CommittingUser: committer,
|
|
||||||
Verified: false,
|
|
||||||
Reason: "gpg.error.extract_sign",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
keyID := tryGetKeyIDFromSignature(sig)
|
|
||||||
defaultReason := NoKeyFound
|
|
||||||
|
|
||||||
// First check if the sig has a keyID and if so just look at that
|
|
||||||
if commitVerification := hashAndVerifyForKeyID(
|
|
||||||
ctx,
|
|
||||||
sig,
|
|
||||||
c.Signature.Payload,
|
|
||||||
committer,
|
|
||||||
keyID,
|
|
||||||
setting.AppName,
|
|
||||||
""); commitVerification != nil {
|
|
||||||
if commitVerification.Reason == BadSignature {
|
|
||||||
defaultReason = BadSignature
|
|
||||||
} else {
|
|
||||||
return commitVerification
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// Now try to associate the signature with the committer, if present
|
|
||||||
if committer.ID != 0 {
|
|
||||||
keys, err := db.Find[GPGKey](ctx, FindGPGKeyOptions{
|
|
||||||
OwnerID: committer.ID,
|
|
||||||
})
|
|
||||||
if err != nil { // Skipping failed to get gpg keys of user
|
|
||||||
log.Error("ListGPGKeys: %v", err)
|
|
||||||
return &CommitVerification{
|
|
||||||
CommittingUser: committer,
|
|
||||||
Verified: false,
|
|
||||||
Reason: "gpg.error.failed_retrieval_gpg_keys",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if err := GPGKeyList(keys).LoadSubKeys(ctx); err != nil {
|
|
||||||
log.Error("LoadSubKeys: %v", err)
|
|
||||||
return &CommitVerification{
|
|
||||||
CommittingUser: committer,
|
|
||||||
Verified: false,
|
|
||||||
Reason: "gpg.error.failed_retrieval_gpg_keys",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
committerEmailAddresses, _ := user_model.GetEmailAddresses(ctx, committer.ID)
|
|
||||||
activated := false
|
|
||||||
for _, e := range committerEmailAddresses {
|
|
||||||
if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) {
|
|
||||||
activated = true
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
for _, k := range keys {
|
|
||||||
// Pre-check (& optimization) that emails attached to key can be attached to the committer email and can validate
|
|
||||||
canValidate := false
|
|
||||||
email := ""
|
|
||||||
if k.Verified && activated {
|
|
||||||
canValidate = true
|
|
||||||
email = c.Committer.Email
|
|
||||||
}
|
|
||||||
if !canValidate {
|
|
||||||
for _, e := range k.Emails {
|
|
||||||
if e.IsActivated && strings.EqualFold(e.Email, c.Committer.Email) {
|
|
||||||
canValidate = true
|
|
||||||
email = e.Email
|
|
||||||
break
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if !canValidate {
|
|
||||||
continue // Skip this key
|
|
||||||
}
|
|
||||||
|
|
||||||
commitVerification := hashAndVerifyWithSubKeysCommitVerification(sig, c.Signature.Payload, k, committer, committer, email)
|
|
||||||
if commitVerification != nil {
|
|
||||||
return commitVerification
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if setting.Repository.Signing.SigningKey != "" && setting.Repository.Signing.SigningKey != "default" && setting.Repository.Signing.SigningKey != "none" {
|
|
||||||
// OK we should try the default key
|
|
||||||
gpgSettings := git.GPGSettings{
|
|
||||||
Sign: true,
|
|
||||||
KeyID: setting.Repository.Signing.SigningKey,
|
|
||||||
Name: setting.Repository.Signing.SigningName,
|
|
||||||
Email: setting.Repository.Signing.SigningEmail,
|
|
||||||
}
|
|
||||||
if err := gpgSettings.LoadPublicKeyContent(); err != nil {
|
|
||||||
log.Error("Error getting default signing key: %s %v", gpgSettings.KeyID, err)
|
|
||||||
} else if commitVerification := verifyWithGPGSettings(ctx, &gpgSettings, sig, c.Signature.Payload, committer, keyID); commitVerification != nil {
|
|
||||||
if commitVerification.Reason == BadSignature {
|
|
||||||
defaultReason = BadSignature
|
|
||||||
} else {
|
|
||||||
return commitVerification
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
defaultGPGSettings, err := c.GetRepositoryDefaultPublicGPGKey(false)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("Error getting default public gpg key: %v", err)
|
|
||||||
} else if defaultGPGSettings == nil {
|
|
||||||
log.Warn("Unable to get defaultGPGSettings for unattached commit: %s", c.ID.String())
|
|
||||||
} else if defaultGPGSettings.Sign {
|
|
||||||
if commitVerification := verifyWithGPGSettings(ctx, defaultGPGSettings, sig, c.Signature.Payload, committer, keyID); commitVerification != nil {
|
|
||||||
if commitVerification.Reason == BadSignature {
|
|
||||||
defaultReason = BadSignature
|
|
||||||
} else {
|
|
||||||
return commitVerification
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return &CommitVerification{ // Default at this stage
|
|
||||||
CommittingUser: committer,
|
|
||||||
Verified: false,
|
|
||||||
Warning: defaultReason != NoKeyFound,
|
|
||||||
Reason: defaultReason,
|
|
||||||
SigningKey: &GPGKey{
|
|
||||||
KeyID: keyID,
|
|
||||||
},
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
func verifyWithGPGSettings(ctx context.Context, gpgSettings *git.GPGSettings, sig *packet.Signature, payload string, committer *user_model.User, keyID string) *CommitVerification {
|
|
||||||
// First try to find the key in the db
|
|
||||||
if commitVerification := hashAndVerifyForKeyID(ctx, sig, payload, committer, gpgSettings.KeyID, gpgSettings.Name, gpgSettings.Email); commitVerification != nil {
|
|
||||||
return commitVerification
|
|
||||||
}
|
|
||||||
|
|
||||||
// Otherwise we have to parse the key
|
|
||||||
ekeys, err := checkArmoredGPGKeyString(gpgSettings.PublicKeyContent)
|
|
||||||
if err != nil {
|
|
||||||
log.Error("Unable to get default signing key: %v", err)
|
|
||||||
return &CommitVerification{
|
|
||||||
CommittingUser: committer,
|
|
||||||
Verified: false,
|
|
||||||
Reason: "gpg.error.generate_hash",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
for _, ekey := range ekeys {
|
|
||||||
pubkey := ekey.PrimaryKey
|
|
||||||
content, err := base64EncPubKey(pubkey)
|
|
||||||
if err != nil {
|
|
||||||
return &CommitVerification{
|
|
||||||
CommittingUser: committer,
|
|
||||||
Verified: false,
|
|
||||||
Reason: "gpg.error.generate_hash",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
k := &GPGKey{
|
|
||||||
Content: content,
|
|
||||||
CanSign: pubkey.CanSign(),
|
|
||||||
KeyID: pubkey.KeyIdString(),
|
|
||||||
}
|
|
||||||
for _, subKey := range ekey.Subkeys {
|
|
||||||
content, err := base64EncPubKey(subKey.PublicKey)
|
|
||||||
if err != nil {
|
|
||||||
return &CommitVerification{
|
|
||||||
CommittingUser: committer,
|
|
||||||
Verified: false,
|
|
||||||
Reason: "gpg.error.generate_hash",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
k.SubsKey = append(k.SubsKey, &GPGKey{
|
|
||||||
Content: content,
|
|
||||||
CanSign: subKey.PublicKey.CanSign(),
|
|
||||||
KeyID: subKey.PublicKey.KeyIdString(),
|
|
||||||
})
|
|
||||||
}
|
|
||||||
if commitVerification := hashAndVerifyWithSubKeysCommitVerification(sig, payload, k, committer, &user_model.User{
|
|
||||||
Name: gpgSettings.Name,
|
|
||||||
Email: gpgSettings.Email,
|
|
||||||
}, gpgSettings.Email); commitVerification != nil {
|
|
||||||
return commitVerification
|
|
||||||
}
|
|
||||||
if keyID == k.KeyID {
|
|
||||||
// This is a bad situation ... We have a key id that matches our default key but the signature doesn't match.
|
|
||||||
return &CommitVerification{
|
|
||||||
CommittingUser: committer,
|
|
||||||
Verified: false,
|
|
||||||
Warning: true,
|
|
||||||
Reason: BadSignature,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func verifySign(s *packet.Signature, h hash.Hash, k *GPGKey) error {
|
func verifySign(s *packet.Signature, h hash.Hash, k *GPGKey) error {
|
||||||
// Check if key can sign
|
// Check if key can sign
|
||||||
if !k.CanSign {
|
if !k.CanSign {
|
||||||
@ -369,7 +107,7 @@ func hashAndVerifyWithSubKeys(sig *packet.Signature, payload string, k *GPGKey)
|
|||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func hashAndVerifyWithSubKeysCommitVerification(sig *packet.Signature, payload string, k *GPGKey, committer, signer *user_model.User, email string) *CommitVerification {
|
func HashAndVerifyWithSubKeysCommitVerification(sig *packet.Signature, payload string, k *GPGKey, committer, signer *user_model.User, email string) *CommitVerification {
|
||||||
key, err := hashAndVerifyWithSubKeys(sig, payload, k)
|
key, err := hashAndVerifyWithSubKeys(sig, payload, k)
|
||||||
if err != nil { // Skipping failed to generate hash
|
if err != nil { // Skipping failed to generate hash
|
||||||
return &CommitVerification{
|
return &CommitVerification{
|
||||||
@ -392,78 +130,6 @@ func hashAndVerifyWithSubKeysCommitVerification(sig *packet.Signature, payload s
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func hashAndVerifyForKeyID(ctx context.Context, sig *packet.Signature, payload string, committer *user_model.User, keyID, name, email string) *CommitVerification {
|
|
||||||
if keyID == "" {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
keys, err := db.Find[GPGKey](ctx, FindGPGKeyOptions{
|
|
||||||
KeyID: keyID,
|
|
||||||
IncludeSubKeys: true,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
log.Error("GetGPGKeysByKeyID: %v", err)
|
|
||||||
return &CommitVerification{
|
|
||||||
CommittingUser: committer,
|
|
||||||
Verified: false,
|
|
||||||
Reason: "gpg.error.failed_retrieval_gpg_keys",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
if len(keys) == 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
for _, key := range keys {
|
|
||||||
var primaryKeys []*GPGKey
|
|
||||||
if key.PrimaryKeyID != "" {
|
|
||||||
primaryKeys, err = db.Find[GPGKey](ctx, FindGPGKeyOptions{
|
|
||||||
KeyID: key.PrimaryKeyID,
|
|
||||||
IncludeSubKeys: true,
|
|
||||||
})
|
|
||||||
if err != nil {
|
|
||||||
log.Error("GetGPGKeysByKeyID: %v", err)
|
|
||||||
return &CommitVerification{
|
|
||||||
CommittingUser: committer,
|
|
||||||
Verified: false,
|
|
||||||
Reason: "gpg.error.failed_retrieval_gpg_keys",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
activated, email := checkKeyEmails(ctx, email, append([]*GPGKey{key}, primaryKeys...)...)
|
|
||||||
if !activated {
|
|
||||||
continue
|
|
||||||
}
|
|
||||||
|
|
||||||
signer := &user_model.User{
|
|
||||||
Name: name,
|
|
||||||
Email: email,
|
|
||||||
}
|
|
||||||
if key.OwnerID != 0 {
|
|
||||||
owner, err := user_model.GetUserByID(ctx, key.OwnerID)
|
|
||||||
if err == nil {
|
|
||||||
signer = owner
|
|
||||||
} else if !user_model.IsErrUserNotExist(err) {
|
|
||||||
log.Error("Failed to user_model.GetUserByID: %d for key ID: %d (%s) %v", key.OwnerID, key.ID, key.KeyID, err)
|
|
||||||
return &CommitVerification{
|
|
||||||
CommittingUser: committer,
|
|
||||||
Verified: false,
|
|
||||||
Reason: "gpg.error.no_committer_account",
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
commitVerification := hashAndVerifyWithSubKeysCommitVerification(sig, payload, key, committer, signer, email)
|
|
||||||
if commitVerification != nil {
|
|
||||||
return commitVerification
|
|
||||||
}
|
|
||||||
}
|
|
||||||
// This is a bad situation ... We have a key id that is in our database but the signature doesn't match.
|
|
||||||
return &CommitVerification{
|
|
||||||
CommittingUser: committer,
|
|
||||||
Verified: false,
|
|
||||||
Warning: true,
|
|
||||||
Reason: BadSignature,
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
// CalculateTrustStatus will calculate the TrustStatus for a commit verification within a repository
|
// CalculateTrustStatus will calculate the TrustStatus for a commit verification within a repository
|
||||||
// There are several trust models in Gitea
|
// There are several trust models in Gitea
|
||||||
func CalculateTrustStatus(verification *CommitVerification, repoTrustModel repo_model.TrustModelType, isOwnerMemberCollaborator func(*user_model.User) (bool, error), keyMap *map[string]bool) error {
|
func CalculateTrustStatus(verification *CommitVerification, repoTrustModel repo_model.TrustModelType, isOwnerMemberCollaborator func(*user_model.User) (bool, error), keyMap *map[string]bool) error {
|
||||||
|
@ -13,9 +13,9 @@ import (
|
|||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
"github.com/keybase/go-crypto/openpgp"
|
"github.com/ProtonMail/go-crypto/openpgp"
|
||||||
"github.com/keybase/go-crypto/openpgp/armor"
|
"github.com/ProtonMail/go-crypto/openpgp/armor"
|
||||||
"github.com/keybase/go-crypto/openpgp/packet"
|
"github.com/ProtonMail/go-crypto/openpgp/packet"
|
||||||
)
|
)
|
||||||
|
|
||||||
// __________________ ________ ____ __.
|
// __________________ ________ ____ __.
|
||||||
@ -33,9 +33,9 @@ import (
|
|||||||
|
|
||||||
// This file provides common functions relating to GPG Keys
|
// This file provides common functions relating to GPG Keys
|
||||||
|
|
||||||
// checkArmoredGPGKeyString checks if the given key string is a valid GPG armored key.
|
// CheckArmoredGPGKeyString checks if the given key string is a valid GPG armored key.
|
||||||
// The function returns the actual public key on success
|
// The function returns the actual public key on success
|
||||||
func checkArmoredGPGKeyString(content string) (openpgp.EntityList, error) {
|
func CheckArmoredGPGKeyString(content string) (openpgp.EntityList, error) {
|
||||||
list, err := openpgp.ReadArmoredKeyRing(strings.NewReader(content))
|
list, err := openpgp.ReadArmoredKeyRing(strings.NewReader(content))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, ErrGPGKeyParsing{err}
|
return nil, ErrGPGKeyParsing{err}
|
||||||
@ -43,8 +43,8 @@ func checkArmoredGPGKeyString(content string) (openpgp.EntityList, error) {
|
|||||||
return list, nil
|
return list, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// base64EncPubKey encode public key content to base 64
|
// Base64EncPubKey encode public key content to base 64
|
||||||
func base64EncPubKey(pubkey *packet.PublicKey) (string, error) {
|
func Base64EncPubKey(pubkey *packet.PublicKey) (string, error) {
|
||||||
var w bytes.Buffer
|
var w bytes.Buffer
|
||||||
err := pubkey.Serialize(&w)
|
err := pubkey.Serialize(&w)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -80,7 +80,7 @@ func base64DecPubKey(content string) (*packet.PublicKey, error) {
|
|||||||
return pkey, nil
|
return pkey, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// getExpiryTime extract the expire time of primary key based on sig
|
// getExpiryTime extract the expiry time of primary key based on sig
|
||||||
func getExpiryTime(e *openpgp.Entity) time.Time {
|
func getExpiryTime(e *openpgp.Entity) time.Time {
|
||||||
expiry := time.Time{}
|
expiry := time.Time{}
|
||||||
// Extract self-sign for expire date based on : https://github.com/golang/crypto/blob/master/openpgp/keys.go#L165
|
// Extract self-sign for expire date based on : https://github.com/golang/crypto/blob/master/openpgp/keys.go#L165
|
||||||
@ -88,12 +88,12 @@ func getExpiryTime(e *openpgp.Entity) time.Time {
|
|||||||
for _, ident := range e.Identities {
|
for _, ident := range e.Identities {
|
||||||
if selfSig == nil {
|
if selfSig == nil {
|
||||||
selfSig = ident.SelfSignature
|
selfSig = ident.SelfSignature
|
||||||
} else if ident.SelfSignature.IsPrimaryId != nil && *ident.SelfSignature.IsPrimaryId {
|
} else if ident.SelfSignature != nil && ident.SelfSignature.IsPrimaryId != nil && *ident.SelfSignature.IsPrimaryId {
|
||||||
selfSig = ident.SelfSignature
|
selfSig = ident.SelfSignature
|
||||||
break
|
break
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if selfSig.KeyLifetimeSecs != nil {
|
if selfSig != nil && selfSig.KeyLifetimeSecs != nil {
|
||||||
expiry = e.PrimaryKey.CreationTime.Add(time.Duration(*selfSig.KeyLifetimeSecs) * time.Second)
|
expiry = e.PrimaryKey.CreationTime.Add(time.Duration(*selfSig.KeyLifetimeSecs) * time.Second)
|
||||||
}
|
}
|
||||||
return expiry
|
return expiry
|
||||||
@ -119,7 +119,7 @@ func readArmoredSign(r io.Reader) (body io.Reader, err error) {
|
|||||||
return block.Body, nil
|
return block.Body, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func extractSignature(s string) (*packet.Signature, error) {
|
func ExtractSignature(s string) (*packet.Signature, error) {
|
||||||
r, err := readArmoredSign(strings.NewReader(s))
|
r, err := readArmoredSign(strings.NewReader(s))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, fmt.Errorf("Failed to read signature armor")
|
return nil, fmt.Errorf("Failed to read signature armor")
|
||||||
@ -135,7 +135,7 @@ func extractSignature(s string) (*packet.Signature, error) {
|
|||||||
return sig, nil
|
return sig, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func tryGetKeyIDFromSignature(sig *packet.Signature) string {
|
func TryGetKeyIDFromSignature(sig *packet.Signature) string {
|
||||||
if sig.IssuerKeyId != nil && (*sig.IssuerKeyId) != 0 {
|
if sig.IssuerKeyId != nil && (*sig.IssuerKeyId) != 0 {
|
||||||
return fmt.Sprintf("%016X", *sig.IssuerKeyId)
|
return fmt.Sprintf("%016X", *sig.IssuerKeyId)
|
||||||
}
|
}
|
||||||
|
@ -13,7 +13,8 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
"github.com/keybase/go-crypto/openpgp/packet"
|
"github.com/ProtonMail/go-crypto/openpgp"
|
||||||
|
"github.com/ProtonMail/go-crypto/openpgp/packet"
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
"github.com/stretchr/testify/require"
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
@ -50,7 +51,7 @@ MkM/fdpyc2hY7Dl/+qFmN5MG5yGmMpQcX+RNNR222ibNC1D3wg==
|
|||||||
=i9b7
|
=i9b7
|
||||||
-----END PGP PUBLIC KEY BLOCK-----`
|
-----END PGP PUBLIC KEY BLOCK-----`
|
||||||
|
|
||||||
key, err := checkArmoredGPGKeyString(testGPGArmor)
|
key, err := CheckArmoredGPGKeyString(testGPGArmor)
|
||||||
assert.NoError(t, err, "Could not parse a valid GPG public armored rsa key", key)
|
assert.NoError(t, err, "Could not parse a valid GPG public armored rsa key", key)
|
||||||
// TODO verify value of key
|
// TODO verify value of key
|
||||||
}
|
}
|
||||||
@ -71,7 +72,7 @@ OyjLLnFQiVmq7kEA/0z0CQe3ZQiQIq5zrs7Nh1XRkFAo8GlU/SGC9XFFi722
|
|||||||
=ZiSe
|
=ZiSe
|
||||||
-----END PGP PUBLIC KEY BLOCK-----`
|
-----END PGP PUBLIC KEY BLOCK-----`
|
||||||
|
|
||||||
key, err := checkArmoredGPGKeyString(testGPGArmor)
|
key, err := CheckArmoredGPGKeyString(testGPGArmor)
|
||||||
assert.NoError(t, err, "Could not parse a valid GPG public armored brainpoolP256r1 key", key)
|
assert.NoError(t, err, "Could not parse a valid GPG public armored brainpoolP256r1 key", key)
|
||||||
// TODO verify value of key
|
// TODO verify value of key
|
||||||
}
|
}
|
||||||
@ -107,14 +108,14 @@ Av844q/BfRuVsJsK1NDNG09LC30B0l3LKBqlrRmRTUMHtgchdX2dY+p7GPOoSzlR
|
|||||||
MkM/fdpyc2hY7Dl/+qFmN5MG5yGmMpQcX+RNNR222ibNC1D3wg==
|
MkM/fdpyc2hY7Dl/+qFmN5MG5yGmMpQcX+RNNR222ibNC1D3wg==
|
||||||
=i9b7
|
=i9b7
|
||||||
-----END PGP PUBLIC KEY BLOCK-----`
|
-----END PGP PUBLIC KEY BLOCK-----`
|
||||||
keys, err := checkArmoredGPGKeyString(testGPGArmor)
|
keys, err := CheckArmoredGPGKeyString(testGPGArmor)
|
||||||
require.NotEmpty(t, keys)
|
require.NotEmpty(t, keys)
|
||||||
|
|
||||||
ekey := keys[0]
|
ekey := keys[0]
|
||||||
assert.NoError(t, err, "Could not parse a valid GPG armored key", ekey)
|
assert.NoError(t, err, "Could not parse a valid GPG armored key", ekey)
|
||||||
|
|
||||||
pubkey := ekey.PrimaryKey
|
pubkey := ekey.PrimaryKey
|
||||||
content, err := base64EncPubKey(pubkey)
|
content, err := Base64EncPubKey(pubkey)
|
||||||
assert.NoError(t, err, "Could not base64 encode a valid PublicKey content", ekey)
|
assert.NoError(t, err, "Could not base64 encode a valid PublicKey content", ekey)
|
||||||
|
|
||||||
key := &GPGKey{
|
key := &GPGKey{
|
||||||
@ -175,9 +176,9 @@ committer Antoine GIRARD <sapk@sapk.fr> 1489013107 +0100
|
|||||||
Unknown GPG key with good email
|
Unknown GPG key with good email
|
||||||
`
|
`
|
||||||
// Reading Sign
|
// Reading Sign
|
||||||
goodSig, err := extractSignature(testGoodSigArmor)
|
goodSig, err := ExtractSignature(testGoodSigArmor)
|
||||||
assert.NoError(t, err, "Could not parse a valid GPG armored signature", testGoodSigArmor)
|
assert.NoError(t, err, "Could not parse a valid GPG armored signature", testGoodSigArmor)
|
||||||
badSig, err := extractSignature(testBadSigArmor)
|
badSig, err := ExtractSignature(testBadSigArmor)
|
||||||
assert.NoError(t, err, "Could not parse a valid GPG armored signature", testBadSigArmor)
|
assert.NoError(t, err, "Could not parse a valid GPG armored signature", testBadSigArmor)
|
||||||
|
|
||||||
// Generating hash of commit
|
// Generating hash of commit
|
||||||
@ -385,7 +386,7 @@ epiDVQ==
|
|||||||
=VSKJ
|
=VSKJ
|
||||||
-----END PGP PUBLIC KEY BLOCK-----
|
-----END PGP PUBLIC KEY BLOCK-----
|
||||||
`
|
`
|
||||||
keys, err := checkArmoredGPGKeyString(testIssue6599)
|
keys, err := CheckArmoredGPGKeyString(testIssue6599)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
if assert.NotEmpty(t, keys) {
|
if assert.NotEmpty(t, keys) {
|
||||||
ekey := keys[0]
|
ekey := keys[0]
|
||||||
@ -395,11 +396,33 @@ epiDVQ==
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestTryGetKeyIDFromSignature(t *testing.T) {
|
func TestTryGetKeyIDFromSignature(t *testing.T) {
|
||||||
assert.Empty(t, tryGetKeyIDFromSignature(&packet.Signature{}))
|
assert.Empty(t, TryGetKeyIDFromSignature(&packet.Signature{}))
|
||||||
assert.Equal(t, "038D1A3EADDBEA9C", tryGetKeyIDFromSignature(&packet.Signature{
|
assert.Equal(t, "038D1A3EADDBEA9C", TryGetKeyIDFromSignature(&packet.Signature{
|
||||||
IssuerKeyId: util.ToPointer(uint64(0x38D1A3EADDBEA9C)),
|
IssuerKeyId: util.ToPointer(uint64(0x38D1A3EADDBEA9C)),
|
||||||
}))
|
}))
|
||||||
assert.Equal(t, "038D1A3EADDBEA9C", tryGetKeyIDFromSignature(&packet.Signature{
|
assert.Equal(t, "038D1A3EADDBEA9C", TryGetKeyIDFromSignature(&packet.Signature{
|
||||||
IssuerFingerprint: []uint8{0xb, 0x23, 0x24, 0xc7, 0xe6, 0xfe, 0x4f, 0x3a, 0x6, 0x26, 0xc1, 0x21, 0x3, 0x8d, 0x1a, 0x3e, 0xad, 0xdb, 0xea, 0x9c},
|
IssuerFingerprint: []uint8{0xb, 0x23, 0x24, 0xc7, 0xe6, 0xfe, 0x4f, 0x3a, 0x6, 0x26, 0xc1, 0x21, 0x3, 0x8d, 0x1a, 0x3e, 0xad, 0xdb, 0xea, 0x9c},
|
||||||
}))
|
}))
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestParseGPGKey(t *testing.T) {
|
||||||
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
assert.NoError(t, db.Insert(db.DefaultContext, &user_model.EmailAddress{UID: 1, Email: "email1@example.com", IsActivated: true}))
|
||||||
|
|
||||||
|
// create a key for test email
|
||||||
|
e, err := openpgp.NewEntity("name", "comment", "email1@example.com", nil)
|
||||||
|
require.NoError(t, err)
|
||||||
|
k, err := parseGPGKey(db.DefaultContext, 1, e, true)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.NotEmpty(t, k.KeyID)
|
||||||
|
assert.NotEmpty(t, k.Emails) // the key is valid, matches the email
|
||||||
|
|
||||||
|
// then revoke the key
|
||||||
|
for _, id := range e.Identities {
|
||||||
|
id.Revocations = append(id.Revocations, &packet.Signature{RevocationReason: util.ToPointer(packet.KeyCompromised)})
|
||||||
|
}
|
||||||
|
k, err = parseGPGKey(db.DefaultContext, 1, e, true)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.NotEmpty(t, k.KeyID)
|
||||||
|
assert.Empty(t, k.Emails) // the key is revoked, matches no email
|
||||||
|
}
|
||||||
|
@ -50,7 +50,7 @@ func VerifyGPGKey(ctx context.Context, ownerID int64, keyID, token, signature st
|
|||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
|
|
||||||
sig, err := extractSignature(signature)
|
sig, err := ExtractSignature(signature)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", ErrGPGInvalidTokenSignature{
|
return "", ErrGPGInvalidTokenSignature{
|
||||||
ID: key.KeyID,
|
ID: key.KeyID,
|
||||||
|
@ -7,23 +7,36 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/modules/gtprof"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
|
||||||
"xorm.io/xorm/contexts"
|
"xorm.io/xorm/contexts"
|
||||||
)
|
)
|
||||||
|
|
||||||
type SlowQueryHook struct {
|
type EngineHook struct {
|
||||||
Threshold time.Duration
|
Threshold time.Duration
|
||||||
Logger log.Logger
|
Logger log.Logger
|
||||||
}
|
}
|
||||||
|
|
||||||
var _ contexts.Hook = (*SlowQueryHook)(nil)
|
var _ contexts.Hook = (*EngineHook)(nil)
|
||||||
|
|
||||||
func (*SlowQueryHook) BeforeProcess(c *contexts.ContextHook) (context.Context, error) {
|
func (*EngineHook) BeforeProcess(c *contexts.ContextHook) (context.Context, error) {
|
||||||
return c.Ctx, nil
|
ctx, _ := gtprof.GetTracer().Start(c.Ctx, gtprof.TraceSpanDatabase)
|
||||||
|
return ctx, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func (h *SlowQueryHook) AfterProcess(c *contexts.ContextHook) error {
|
func (h *EngineHook) AfterProcess(c *contexts.ContextHook) error {
|
||||||
|
span := gtprof.GetContextSpan(c.Ctx)
|
||||||
|
if span != nil {
|
||||||
|
// Do not record SQL parameters here:
|
||||||
|
// * It shouldn't expose the parameters because they contain sensitive information, end users need to report the trace details safely.
|
||||||
|
// * Some parameters contain quite long texts, waste memory and are difficult to display.
|
||||||
|
span.SetAttributeString(gtprof.TraceAttrDbSQL, c.SQL)
|
||||||
|
span.End()
|
||||||
|
} else {
|
||||||
|
setting.PanicInDevOrTesting("span in database engine hook is nil")
|
||||||
|
}
|
||||||
if c.ExecuteTime >= h.Threshold {
|
if c.ExecuteTime >= h.Threshold {
|
||||||
// 8 is the amount of skips passed to runtime.Caller, so that in the log the correct function
|
// 8 is the amount of skips passed to runtime.Caller, so that in the log the correct function
|
||||||
// is being displayed (the function that ultimately wants to execute the query in the code)
|
// is being displayed (the function that ultimately wants to execute the query in the code)
|
||||||
|
@ -72,7 +72,7 @@ func InitEngine(ctx context.Context) error {
|
|||||||
xe.SetDefaultContext(ctx)
|
xe.SetDefaultContext(ctx)
|
||||||
|
|
||||||
if setting.Database.SlowQueryThreshold > 0 {
|
if setting.Database.SlowQueryThreshold > 0 {
|
||||||
xe.AddHook(&SlowQueryHook{
|
xe.AddHook(&EngineHook{
|
||||||
Threshold: setting.Database.SlowQueryThreshold,
|
Threshold: setting.Database.SlowQueryThreshold,
|
||||||
Logger: log.GetLogger("xorm"),
|
Logger: log.GetLogger("xorm"),
|
||||||
})
|
})
|
||||||
|
@ -11,8 +11,6 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
var ErrNameEmpty = util.SilentWrap{Message: "name is empty", Err: util.ErrInvalidArgument}
|
|
||||||
|
|
||||||
// ErrNameReserved represents a "reserved name" error.
|
// ErrNameReserved represents a "reserved name" error.
|
||||||
type ErrNameReserved struct {
|
type ErrNameReserved struct {
|
||||||
Name string
|
Name string
|
||||||
@ -79,7 +77,7 @@ func (err ErrNameCharsNotAllowed) Unwrap() error {
|
|||||||
func IsUsableName(reservedNames, reservedPatterns []string, name string) error {
|
func IsUsableName(reservedNames, reservedPatterns []string, name string) error {
|
||||||
name = strings.TrimSpace(strings.ToLower(name))
|
name = strings.TrimSpace(strings.ToLower(name))
|
||||||
if utf8.RuneCountInString(name) == 0 {
|
if utf8.RuneCountInString(name) == 0 {
|
||||||
return ErrNameEmpty
|
return util.SilentWrap{Message: "name is empty", Err: util.ErrInvalidArgument}
|
||||||
}
|
}
|
||||||
|
|
||||||
for i := range reservedNames {
|
for i := range reservedNames {
|
||||||
|
@ -171,3 +171,9 @@
|
|||||||
user_id: 40
|
user_id: 40
|
||||||
repo_id: 61
|
repo_id: 61
|
||||||
mode: 4
|
mode: 4
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 30
|
||||||
|
user_id: 40
|
||||||
|
repo_id: 1
|
||||||
|
mode: 2
|
||||||
|
@ -69,3 +69,21 @@
|
|||||||
created_unix: 1730330775
|
created_unix: 1730330775
|
||||||
updated_unix: 1730330775
|
updated_unix: 1730330775
|
||||||
expired_unix: 1738106775
|
expired_unix: 1738106775
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 23
|
||||||
|
run_id: 793
|
||||||
|
runner_id: 1
|
||||||
|
repo_id: 2
|
||||||
|
owner_id: 2
|
||||||
|
commit_sha: c2d72f548424103f01ee1dc02889c1e2bff816b0
|
||||||
|
storage_path: "27/5/1730330775594233150.chunk"
|
||||||
|
file_size: 1024
|
||||||
|
file_compressed_size: 1024
|
||||||
|
content_encoding: "application/zip"
|
||||||
|
artifact_path: "artifact-v4-download.zip"
|
||||||
|
artifact_name: "artifact-v4-download"
|
||||||
|
status: 2
|
||||||
|
created_unix: 1730330775
|
||||||
|
updated_unix: 1730330775
|
||||||
|
expired_unix: 1738106775
|
||||||
|
@ -1694,19 +1694,6 @@
|
|||||||
is_fsck_enabled: true
|
is_fsck_enabled: true
|
||||||
close_issues_via_commit_in_any_branch: false
|
close_issues_via_commit_in_any_branch: false
|
||||||
|
|
||||||
-
|
|
||||||
id: 59
|
|
||||||
owner_id: 2
|
|
||||||
owner_name: user2
|
|
||||||
lower_name: test_commit_revert
|
|
||||||
name: test_commit_revert
|
|
||||||
default_branch: main
|
|
||||||
is_empty: false
|
|
||||||
is_archived: false
|
|
||||||
is_private: true
|
|
||||||
status: 0
|
|
||||||
num_issues: 0
|
|
||||||
|
|
||||||
-
|
-
|
||||||
id: 60
|
id: 60
|
||||||
owner_id: 40
|
owner_id: 40
|
||||||
|
@ -67,7 +67,7 @@
|
|||||||
num_followers: 2
|
num_followers: 2
|
||||||
num_following: 1
|
num_following: 1
|
||||||
num_stars: 2
|
num_stars: 2
|
||||||
num_repos: 15
|
num_repos: 14
|
||||||
num_teams: 0
|
num_teams: 0
|
||||||
num_members: 0
|
num_members: 0
|
||||||
visibility: 0
|
visibility: 0
|
||||||
|
@ -22,6 +22,7 @@
|
|||||||
content_type: 1 # json
|
content_type: 1 # json
|
||||||
events: '{"push_only":false,"send_everything":false,"choose_events":false,"events":{"create":false,"push":true,"pull_request":true}}'
|
events: '{"push_only":false,"send_everything":false,"choose_events":false,"events":{"create":false,"push":true,"pull_request":true}}'
|
||||||
is_active: true
|
is_active: true
|
||||||
|
|
||||||
-
|
-
|
||||||
id: 4
|
id: 4
|
||||||
repo_id: 2
|
repo_id: 2
|
||||||
@ -29,3 +30,23 @@
|
|||||||
content_type: 1 # json
|
content_type: 1 # json
|
||||||
events: '{"push_only":true,"branch_filter":"{master,feature*}"}'
|
events: '{"push_only":true,"branch_filter":"{master,feature*}"}'
|
||||||
is_active: true
|
is_active: true
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 5
|
||||||
|
repo_id: 0
|
||||||
|
owner_id: 0
|
||||||
|
url: www.example.com/url5
|
||||||
|
content_type: 1 # json
|
||||||
|
events: '{"push_only":true,"branch_filter":"{master,feature*}"}'
|
||||||
|
is_active: true
|
||||||
|
is_system_webhook: true
|
||||||
|
|
||||||
|
-
|
||||||
|
id: 6
|
||||||
|
repo_id: 0
|
||||||
|
owner_id: 0
|
||||||
|
url: www.example.com/url6
|
||||||
|
content_type: 1 # json
|
||||||
|
events: '{"push_only":true,"branch_filter":"{master,feature*}"}'
|
||||||
|
is_active: true
|
||||||
|
is_system_webhook: false
|
||||||
|
@ -167,6 +167,9 @@ func GetBranch(ctx context.Context, repoID int64, branchName string) (*Branch, e
|
|||||||
BranchName: branchName,
|
BranchName: branchName,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// FIXME: this design is not right: it doesn't check `branch.IsDeleted`, it doesn't make sense to make callers to check IsDeleted again and again.
|
||||||
|
// It causes inconsistency with `GetBranches` and `git.GetBranch`, and will lead to strange bugs
|
||||||
|
// In the future, there should be 2 functions: `GetBranchExisting` and `GetBranchWithDeleted`
|
||||||
return &branch, nil
|
return &branch, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -440,6 +443,8 @@ type FindRecentlyPushedNewBranchesOptions struct {
|
|||||||
}
|
}
|
||||||
|
|
||||||
type RecentlyPushedNewBranch struct {
|
type RecentlyPushedNewBranch struct {
|
||||||
|
BranchRepo *repo_model.Repository
|
||||||
|
BranchName string
|
||||||
BranchDisplayName string
|
BranchDisplayName string
|
||||||
BranchLink string
|
BranchLink string
|
||||||
BranchCompareURL string
|
BranchCompareURL string
|
||||||
@ -540,7 +545,9 @@ func FindRecentlyPushedNewBranches(ctx context.Context, doer *user_model.User, o
|
|||||||
branchDisplayName = fmt.Sprintf("%s:%s", branch.Repo.FullName(), branchDisplayName)
|
branchDisplayName = fmt.Sprintf("%s:%s", branch.Repo.FullName(), branchDisplayName)
|
||||||
}
|
}
|
||||||
newBranches = append(newBranches, &RecentlyPushedNewBranch{
|
newBranches = append(newBranches, &RecentlyPushedNewBranch{
|
||||||
|
BranchRepo: branch.Repo,
|
||||||
BranchDisplayName: branchDisplayName,
|
BranchDisplayName: branchDisplayName,
|
||||||
|
BranchName: branch.Name,
|
||||||
BranchLink: fmt.Sprintf("%s/src/branch/%s", branch.Repo.Link(), util.PathEscapeSegments(branch.Name)),
|
BranchLink: fmt.Sprintf("%s/src/branch/%s", branch.Repo.Link(), util.PathEscapeSegments(branch.Name)),
|
||||||
BranchCompareURL: branch.Repo.ComposeBranchCompareURL(opts.BaseRepo, branch.Name),
|
BranchCompareURL: branch.Repo.ComposeBranchCompareURL(opts.BaseRepo, branch.Name),
|
||||||
CommitTime: branch.CommitTime,
|
CommitTime: branch.CommitTime,
|
||||||
|
@ -496,47 +496,11 @@ type SignCommitWithStatuses struct {
|
|||||||
*asymkey_model.SignCommit
|
*asymkey_model.SignCommit
|
||||||
}
|
}
|
||||||
|
|
||||||
// ParseCommitsWithStatus checks commits latest statuses and calculates its worst status state
|
|
||||||
func ParseCommitsWithStatus(ctx context.Context, oldCommits []*asymkey_model.SignCommit, repo *repo_model.Repository) []*SignCommitWithStatuses {
|
|
||||||
newCommits := make([]*SignCommitWithStatuses, 0, len(oldCommits))
|
|
||||||
|
|
||||||
for _, c := range oldCommits {
|
|
||||||
commit := &SignCommitWithStatuses{
|
|
||||||
SignCommit: c,
|
|
||||||
}
|
|
||||||
statuses, _, err := GetLatestCommitStatus(ctx, repo.ID, commit.ID.String(), db.ListOptions{})
|
|
||||||
if err != nil {
|
|
||||||
log.Error("GetLatestCommitStatus: %v", err)
|
|
||||||
} else {
|
|
||||||
commit.Statuses = statuses
|
|
||||||
commit.Status = CalcCommitStatus(statuses)
|
|
||||||
}
|
|
||||||
|
|
||||||
newCommits = append(newCommits, commit)
|
|
||||||
}
|
|
||||||
return newCommits
|
|
||||||
}
|
|
||||||
|
|
||||||
// hashCommitStatusContext hash context
|
// hashCommitStatusContext hash context
|
||||||
func hashCommitStatusContext(context string) string {
|
func hashCommitStatusContext(context string) string {
|
||||||
return fmt.Sprintf("%x", sha1.Sum([]byte(context)))
|
return fmt.Sprintf("%x", sha1.Sum([]byte(context)))
|
||||||
}
|
}
|
||||||
|
|
||||||
// ConvertFromGitCommit converts git commits into SignCommitWithStatuses
|
|
||||||
func ConvertFromGitCommit(ctx context.Context, commits []*git.Commit, repo *repo_model.Repository) []*SignCommitWithStatuses {
|
|
||||||
return ParseCommitsWithStatus(ctx,
|
|
||||||
asymkey_model.ParseCommitsWithSignature(
|
|
||||||
ctx,
|
|
||||||
user_model.ValidateCommitsWithEmails(ctx, commits),
|
|
||||||
repo.GetTrustModel(),
|
|
||||||
func(user *user_model.User) (bool, error) {
|
|
||||||
return repo_model.IsOwnerMemberCollaborator(ctx, repo, user.ID)
|
|
||||||
},
|
|
||||||
),
|
|
||||||
repo,
|
|
||||||
)
|
|
||||||
}
|
|
||||||
|
|
||||||
// CommitStatusesHideActionsURL hide Gitea Actions urls
|
// CommitStatusesHideActionsURL hide Gitea Actions urls
|
||||||
func CommitStatusesHideActionsURL(ctx context.Context, statuses []*CommitStatus) {
|
func CommitStatusesHideActionsURL(ctx context.Context, statuses []*CommitStatus) {
|
||||||
idToRepos := make(map[int64]*repo_model.Repository)
|
idToRepos := make(map[int64]*repo_model.Repository)
|
||||||
|
@ -19,8 +19,6 @@ import (
|
|||||||
repo_model "code.gitea.io/gitea/models/repo"
|
repo_model "code.gitea.io/gitea/models/repo"
|
||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
"code.gitea.io/gitea/modules/container"
|
"code.gitea.io/gitea/modules/container"
|
||||||
"code.gitea.io/gitea/modules/gitrepo"
|
|
||||||
"code.gitea.io/gitea/modules/json"
|
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/optional"
|
"code.gitea.io/gitea/modules/optional"
|
||||||
"code.gitea.io/gitea/modules/references"
|
"code.gitea.io/gitea/modules/references"
|
||||||
@ -112,8 +110,8 @@ const (
|
|||||||
CommentTypePRScheduledToAutoMerge // 34 pr was scheduled to auto merge when checks succeed
|
CommentTypePRScheduledToAutoMerge // 34 pr was scheduled to auto merge when checks succeed
|
||||||
CommentTypePRUnScheduledToAutoMerge // 35 pr was un scheduled to auto merge when checks succeed
|
CommentTypePRUnScheduledToAutoMerge // 35 pr was un scheduled to auto merge when checks succeed
|
||||||
|
|
||||||
CommentTypePin // 36 pin Issue
|
CommentTypePin // 36 pin Issue/PullRequest
|
||||||
CommentTypeUnpin // 37 unpin Issue
|
CommentTypeUnpin // 37 unpin Issue/PullRequest
|
||||||
|
|
||||||
CommentTypeChangeTimeEstimate // 38 Change time estimate
|
CommentTypeChangeTimeEstimate // 38 Change time estimate
|
||||||
)
|
)
|
||||||
@ -774,41 +772,6 @@ func (c *Comment) CodeCommentLink(ctx context.Context) string {
|
|||||||
return fmt.Sprintf("%s/files#%s", c.Issue.Link(), c.HashTag())
|
return fmt.Sprintf("%s/files#%s", c.Issue.Link(), c.HashTag())
|
||||||
}
|
}
|
||||||
|
|
||||||
// LoadPushCommits Load push commits
|
|
||||||
func (c *Comment) LoadPushCommits(ctx context.Context) (err error) {
|
|
||||||
if c.Content == "" || c.Commits != nil || c.Type != CommentTypePullRequestPush {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
|
|
||||||
var data PushActionContent
|
|
||||||
|
|
||||||
err = json.Unmarshal([]byte(c.Content), &data)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
c.IsForcePush = data.IsForcePush
|
|
||||||
|
|
||||||
if c.IsForcePush {
|
|
||||||
if len(data.CommitIDs) != 2 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
c.OldCommit = data.CommitIDs[0]
|
|
||||||
c.NewCommit = data.CommitIDs[1]
|
|
||||||
} else {
|
|
||||||
gitRepo, closer, err := gitrepo.RepositoryFromContextOrOpen(ctx, c.Issue.Repo)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
defer closer.Close()
|
|
||||||
|
|
||||||
c.Commits = git_model.ConvertFromGitCommit(ctx, gitRepo.GetCommitsFromIDs(data.CommitIDs), c.Issue.Repo)
|
|
||||||
c.CommitsNum = int64(len(c.Commits))
|
|
||||||
}
|
|
||||||
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// CreateComment creates comment with context
|
// CreateComment creates comment with context
|
||||||
func CreateComment(ctx context.Context, opts *CreateCommentOptions) (_ *Comment, err error) {
|
func CreateComment(ctx context.Context, opts *CreateCommentOptions) (_ *Comment, err error) {
|
||||||
ctx, committer, err := db.TxContext(ctx)
|
ctx, committer, err := db.TxContext(ctx)
|
||||||
|
@ -86,8 +86,10 @@ func findCodeComments(ctx context.Context, opts FindCommentsOptions, issue *Issu
|
|||||||
ids = append(ids, comment.ReviewID)
|
ids = append(ids, comment.ReviewID)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
if err := e.In("id", ids).Find(&reviews); err != nil {
|
if len(ids) > 0 {
|
||||||
return nil, err
|
if err := e.In("id", ids).Find(&reviews); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
n := 0
|
n := 0
|
||||||
|
@ -26,14 +26,14 @@ func (comments CommentList) LoadPosters(ctx context.Context) error {
|
|||||||
return c.PosterID, c.Poster == nil && c.PosterID > 0
|
return c.PosterID, c.Poster == nil && c.PosterID > 0
|
||||||
})
|
})
|
||||||
|
|
||||||
posterMaps, err := getPostersByIDs(ctx, posterIDs)
|
posterMaps, err := user_model.GetUsersMapByIDs(ctx, posterIDs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, comment := range comments {
|
for _, comment := range comments {
|
||||||
if comment.Poster == nil {
|
if comment.Poster == nil {
|
||||||
comment.Poster = getPoster(comment.PosterID, posterMaps)
|
comment.Poster = user_model.GetPossibleUserFromMap(comment.PosterID, posterMaps)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
@ -41,7 +41,7 @@ func (comments CommentList) LoadPosters(ctx context.Context) error {
|
|||||||
|
|
||||||
func (comments CommentList) getLabelIDs() []int64 {
|
func (comments CommentList) getLabelIDs() []int64 {
|
||||||
return container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
|
return container.FilterSlice(comments, func(comment *Comment) (int64, bool) {
|
||||||
return comment.LabelID, comment.LabelID > 0
|
return comment.LabelID, comment.LabelID > 0 && comment.Label == nil
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -51,6 +51,9 @@ func (comments CommentList) loadLabels(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
labelIDs := comments.getLabelIDs()
|
labelIDs := comments.getLabelIDs()
|
||||||
|
if len(labelIDs) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
commentLabels := make(map[int64]*Label, len(labelIDs))
|
commentLabels := make(map[int64]*Label, len(labelIDs))
|
||||||
left := len(labelIDs)
|
left := len(labelIDs)
|
||||||
for left > 0 {
|
for left > 0 {
|
||||||
@ -118,8 +121,8 @@ func (comments CommentList) loadMilestones(ctx context.Context) error {
|
|||||||
milestoneIDs = milestoneIDs[limit:]
|
milestoneIDs = milestoneIDs[limit:]
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, issue := range comments {
|
for _, comment := range comments {
|
||||||
issue.Milestone = milestoneMaps[issue.MilestoneID]
|
comment.Milestone = milestoneMaps[comment.MilestoneID]
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
@ -175,6 +178,9 @@ func (comments CommentList) loadAssignees(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
assigneeIDs := comments.getAssigneeIDs()
|
assigneeIDs := comments.getAssigneeIDs()
|
||||||
|
if len(assigneeIDs) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
assignees := make(map[int64]*user_model.User, len(assigneeIDs))
|
assignees := make(map[int64]*user_model.User, len(assigneeIDs))
|
||||||
left := len(assigneeIDs)
|
left := len(assigneeIDs)
|
||||||
for left > 0 {
|
for left > 0 {
|
||||||
@ -301,6 +307,9 @@ func (comments CommentList) loadDependentIssues(ctx context.Context) error {
|
|||||||
|
|
||||||
e := db.GetEngine(ctx)
|
e := db.GetEngine(ctx)
|
||||||
issueIDs := comments.getDependentIssueIDs()
|
issueIDs := comments.getDependentIssueIDs()
|
||||||
|
if len(issueIDs) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
issues := make(map[int64]*Issue, len(issueIDs))
|
issues := make(map[int64]*Issue, len(issueIDs))
|
||||||
left := len(issueIDs)
|
left := len(issueIDs)
|
||||||
for left > 0 {
|
for left > 0 {
|
||||||
@ -427,6 +436,9 @@ func (comments CommentList) loadReviews(ctx context.Context) error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
reviewIDs := comments.getReviewIDs()
|
reviewIDs := comments.getReviewIDs()
|
||||||
|
if len(reviewIDs) == 0 {
|
||||||
|
return nil
|
||||||
|
}
|
||||||
reviews := make(map[int64]*Review, len(reviewIDs))
|
reviews := make(map[int64]*Review, len(reviewIDs))
|
||||||
if err := db.GetEngine(ctx).In("id", reviewIDs).Find(&reviews); err != nil {
|
if err := db.GetEngine(ctx).In("id", reviewIDs).Find(&reviews); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -17,6 +17,7 @@ import (
|
|||||||
user_model "code.gitea.io/gitea/models/user"
|
user_model "code.gitea.io/gitea/models/user"
|
||||||
"code.gitea.io/gitea/modules/container"
|
"code.gitea.io/gitea/modules/container"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
|
"code.gitea.io/gitea/modules/optional"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
api "code.gitea.io/gitea/modules/structs"
|
api "code.gitea.io/gitea/modules/structs"
|
||||||
"code.gitea.io/gitea/modules/timeutil"
|
"code.gitea.io/gitea/modules/timeutil"
|
||||||
@ -238,6 +239,9 @@ func (issue *Issue) loadCommentsByType(ctx context.Context, tp CommentType) (err
|
|||||||
IssueID: issue.ID,
|
IssueID: issue.ID,
|
||||||
Type: tp,
|
Type: tp,
|
||||||
})
|
})
|
||||||
|
for _, comment := range issue.Comments {
|
||||||
|
comment.Issue = issue
|
||||||
|
}
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -498,6 +502,45 @@ func GetIssueByIndex(ctx context.Context, repoID, index int64) (*Issue, error) {
|
|||||||
return issue, nil
|
return issue, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func isPullToCond(isPull optional.Option[bool]) builder.Cond {
|
||||||
|
if isPull.Has() {
|
||||||
|
return builder.Eq{"is_pull": isPull.Value()}
|
||||||
|
}
|
||||||
|
return builder.NewCond()
|
||||||
|
}
|
||||||
|
|
||||||
|
func FindLatestUpdatedIssues(ctx context.Context, repoID int64, isPull optional.Option[bool], pageSize int) (IssueList, error) {
|
||||||
|
issues := make([]*Issue, 0, pageSize)
|
||||||
|
err := db.GetEngine(ctx).Where("repo_id = ?", repoID).
|
||||||
|
And(isPullToCond(isPull)).
|
||||||
|
OrderBy("updated_unix DESC").
|
||||||
|
Limit(pageSize).
|
||||||
|
Find(&issues)
|
||||||
|
return issues, err
|
||||||
|
}
|
||||||
|
|
||||||
|
func FindIssuesSuggestionByKeyword(ctx context.Context, repoID int64, keyword string, isPull optional.Option[bool], excludedID int64, pageSize int) (IssueList, error) {
|
||||||
|
cond := builder.NewCond()
|
||||||
|
if excludedID > 0 {
|
||||||
|
cond = cond.And(builder.Neq{"`id`": excludedID})
|
||||||
|
}
|
||||||
|
|
||||||
|
// It seems that GitHub searches both title and content (maybe sorting by the search engine's ranking system?)
|
||||||
|
// The first PR (https://github.com/go-gitea/gitea/pull/32327) uses "search indexer" to search "name(title) + content"
|
||||||
|
// But it seems that searching "content" (especially LIKE by DB engine) generates worse (unusable) results.
|
||||||
|
// So now (https://github.com/go-gitea/gitea/pull/33538) it only searches "name(title)", leave the improvements to the future.
|
||||||
|
cond = cond.And(db.BuildCaseInsensitiveLike("`name`", keyword))
|
||||||
|
|
||||||
|
issues := make([]*Issue, 0, pageSize)
|
||||||
|
err := db.GetEngine(ctx).Where("repo_id = ?", repoID).
|
||||||
|
And(isPullToCond(isPull)).
|
||||||
|
And(cond).
|
||||||
|
OrderBy("updated_unix DESC, `index` DESC").
|
||||||
|
Limit(pageSize).
|
||||||
|
Find(&issues)
|
||||||
|
return issues, err
|
||||||
|
}
|
||||||
|
|
||||||
// GetIssueWithAttrsByIndex returns issue by index in a repository.
|
// GetIssueWithAttrsByIndex returns issue by index in a repository.
|
||||||
func GetIssueWithAttrsByIndex(ctx context.Context, repoID, index int64) (*Issue, error) {
|
func GetIssueWithAttrsByIndex(ctx context.Context, repoID, index int64) (*Issue, error) {
|
||||||
issue, err := GetIssueByIndex(ctx, repoID, index)
|
issue, err := GetIssueByIndex(ctx, repoID, index)
|
||||||
|
@ -81,53 +81,19 @@ func (issues IssueList) LoadPosters(ctx context.Context) error {
|
|||||||
return issue.PosterID, issue.Poster == nil && issue.PosterID > 0
|
return issue.PosterID, issue.Poster == nil && issue.PosterID > 0
|
||||||
})
|
})
|
||||||
|
|
||||||
posterMaps, err := getPostersByIDs(ctx, posterIDs)
|
posterMaps, err := user_model.GetUsersMapByIDs(ctx, posterIDs)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
for _, issue := range issues {
|
for _, issue := range issues {
|
||||||
if issue.Poster == nil {
|
if issue.Poster == nil {
|
||||||
issue.Poster = getPoster(issue.PosterID, posterMaps)
|
issue.Poster = user_model.GetPossibleUserFromMap(issue.PosterID, posterMaps)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
func getPostersByIDs(ctx context.Context, posterIDs []int64) (map[int64]*user_model.User, error) {
|
|
||||||
posterMaps := make(map[int64]*user_model.User, len(posterIDs))
|
|
||||||
left := len(posterIDs)
|
|
||||||
for left > 0 {
|
|
||||||
limit := db.DefaultMaxInSize
|
|
||||||
if left < limit {
|
|
||||||
limit = left
|
|
||||||
}
|
|
||||||
err := db.GetEngine(ctx).
|
|
||||||
In("id", posterIDs[:limit]).
|
|
||||||
Find(&posterMaps)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
left -= limit
|
|
||||||
posterIDs = posterIDs[limit:]
|
|
||||||
}
|
|
||||||
return posterMaps, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
func getPoster(posterID int64, posterMaps map[int64]*user_model.User) *user_model.User {
|
|
||||||
if posterID == user_model.ActionsUserID {
|
|
||||||
return user_model.NewActionsUser()
|
|
||||||
}
|
|
||||||
if posterID <= 0 {
|
|
||||||
return nil
|
|
||||||
}
|
|
||||||
poster, ok := posterMaps[posterID]
|
|
||||||
if !ok {
|
|
||||||
return user_model.NewGhostUser()
|
|
||||||
}
|
|
||||||
return poster
|
|
||||||
}
|
|
||||||
|
|
||||||
func (issues IssueList) getIssueIDs() []int64 {
|
func (issues IssueList) getIssueIDs() []int64 {
|
||||||
ids := make([]int64, 0, len(issues))
|
ids := make([]int64, 0, len(issues))
|
||||||
for _, issue := range issues {
|
for _, issue := range issues {
|
||||||
|
@ -38,13 +38,30 @@ func (issue *Issue) projectID(ctx context.Context) int64 {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ProjectColumnID return project column id if issue was assigned to one
|
// ProjectColumnID return project column id if issue was assigned to one
|
||||||
func (issue *Issue) ProjectColumnID(ctx context.Context) int64 {
|
func (issue *Issue) ProjectColumnID(ctx context.Context) (int64, error) {
|
||||||
var ip project_model.ProjectIssue
|
var ip project_model.ProjectIssue
|
||||||
has, err := db.GetEngine(ctx).Where("issue_id=?", issue.ID).Get(&ip)
|
has, err := db.GetEngine(ctx).Where("issue_id=?", issue.ID).Get(&ip)
|
||||||
if err != nil || !has {
|
if err != nil {
|
||||||
return 0
|
return 0, err
|
||||||
|
} else if !has {
|
||||||
|
return 0, nil
|
||||||
}
|
}
|
||||||
return ip.ProjectColumnID
|
return ip.ProjectColumnID, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func LoadProjectIssueColumnMap(ctx context.Context, projectID, defaultColumnID int64) (map[int64]int64, error) {
|
||||||
|
issues := make([]project_model.ProjectIssue, 0)
|
||||||
|
if err := db.GetEngine(ctx).Where("project_id=?", projectID).Find(&issues); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
result := make(map[int64]int64, len(issues))
|
||||||
|
for _, issue := range issues {
|
||||||
|
if issue.ProjectColumnID == 0 {
|
||||||
|
issue.ProjectColumnID = defaultColumnID
|
||||||
|
}
|
||||||
|
result[issue.IssueID] = issue.ProjectColumnID
|
||||||
|
}
|
||||||
|
return result, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// LoadIssuesFromColumn load issues assigned to this column
|
// LoadIssuesFromColumn load issues assigned to this column
|
||||||
@ -59,11 +76,11 @@ func LoadIssuesFromColumn(ctx context.Context, b *project_model.Column, opts *Is
|
|||||||
}
|
}
|
||||||
|
|
||||||
if b.Default {
|
if b.Default {
|
||||||
issues, err := Issues(ctx, &IssuesOptions{
|
issues, err := Issues(ctx, opts.Copy(func(o *IssuesOptions) {
|
||||||
ProjectColumnID: db.NoConditionID,
|
o.ProjectColumnID = db.NoConditionID
|
||||||
ProjectID: b.ProjectID,
|
o.ProjectID = b.ProjectID
|
||||||
SortType: "project-column-sorting",
|
o.SortType = "project-column-sorting"
|
||||||
})
|
}))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, err
|
return nil, err
|
||||||
}
|
}
|
||||||
@ -77,19 +94,6 @@ func LoadIssuesFromColumn(ctx context.Context, b *project_model.Column, opts *Is
|
|||||||
return issueList, nil
|
return issueList, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// LoadIssuesFromColumnList load issues assigned to the columns
|
|
||||||
func LoadIssuesFromColumnList(ctx context.Context, bs project_model.ColumnList, opts *IssuesOptions) (map[int64]IssueList, error) {
|
|
||||||
issuesMap := make(map[int64]IssueList, len(bs))
|
|
||||||
for i := range bs {
|
|
||||||
il, err := LoadIssuesFromColumn(ctx, bs[i], opts)
|
|
||||||
if err != nil {
|
|
||||||
return nil, err
|
|
||||||
}
|
|
||||||
issuesMap[bs[i].ID] = il
|
|
||||||
}
|
|
||||||
return issuesMap, nil
|
|
||||||
}
|
|
||||||
|
|
||||||
// IssueAssignOrRemoveProject changes the project associated with an issue
|
// IssueAssignOrRemoveProject changes the project associated with an issue
|
||||||
// If newProjectID is 0, the issue is removed from the project
|
// If newProjectID is 0, the issue is removed from the project
|
||||||
func IssueAssignOrRemoveProject(ctx context.Context, issue *Issue, doer *user_model.User, newProjectID, newColumnID int64) error {
|
func IssueAssignOrRemoveProject(ctx context.Context, issue *Issue, doer *user_model.User, newProjectID, newColumnID int64) error {
|
||||||
@ -110,7 +114,7 @@ func IssueAssignOrRemoveProject(ctx context.Context, issue *Issue, doer *user_mo
|
|||||||
return util.NewPermissionDeniedErrorf("issue %d can't be accessed by project %d", issue.ID, newProject.ID)
|
return util.NewPermissionDeniedErrorf("issue %d can't be accessed by project %d", issue.ID, newProject.ID)
|
||||||
}
|
}
|
||||||
if newColumnID == 0 {
|
if newColumnID == 0 {
|
||||||
newDefaultColumn, err := newProject.GetDefaultColumn(ctx)
|
newDefaultColumn, err := newProject.MustDefaultColumn(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -49,9 +49,9 @@ type IssuesOptions struct { //nolint
|
|||||||
// prioritize issues from this repo
|
// prioritize issues from this repo
|
||||||
PriorityRepoID int64
|
PriorityRepoID int64
|
||||||
IsArchived optional.Option[bool]
|
IsArchived optional.Option[bool]
|
||||||
Org *organization.Organization // issues permission scope
|
Owner *user_model.User // issues permission scope, it could be an organization or a user
|
||||||
Team *organization.Team // issues permission scope
|
Team *organization.Team // issues permission scope
|
||||||
User *user_model.User // issues permission scope
|
Doer *user_model.User // issues permission scope
|
||||||
}
|
}
|
||||||
|
|
||||||
// Copy returns a copy of the options.
|
// Copy returns a copy of the options.
|
||||||
@ -273,8 +273,12 @@ func applyConditions(sess *xorm.Session, opts *IssuesOptions) {
|
|||||||
|
|
||||||
applyLabelsCondition(sess, opts)
|
applyLabelsCondition(sess, opts)
|
||||||
|
|
||||||
if opts.User != nil {
|
if opts.Owner != nil {
|
||||||
sess.And(issuePullAccessibleRepoCond("issue.repo_id", opts.User.ID, opts.Org, opts.Team, opts.IsPull.Value()))
|
sess.And(repo_model.UserOwnedRepoCond(opts.Owner.ID))
|
||||||
|
}
|
||||||
|
|
||||||
|
if opts.Doer != nil && !opts.Doer.IsAdmin {
|
||||||
|
sess.And(issuePullAccessibleRepoCond("issue.repo_id", opts.Doer.ID, opts.Owner, opts.Team, opts.IsPull.Value()))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -321,20 +325,20 @@ func teamUnitsRepoCond(id string, userID, orgID, teamID int64, units ...unit.Typ
|
|||||||
}
|
}
|
||||||
|
|
||||||
// issuePullAccessibleRepoCond userID must not be zero, this condition require join repository table
|
// issuePullAccessibleRepoCond userID must not be zero, this condition require join repository table
|
||||||
func issuePullAccessibleRepoCond(repoIDstr string, userID int64, org *organization.Organization, team *organization.Team, isPull bool) builder.Cond {
|
func issuePullAccessibleRepoCond(repoIDstr string, userID int64, owner *user_model.User, team *organization.Team, isPull bool) builder.Cond {
|
||||||
cond := builder.NewCond()
|
cond := builder.NewCond()
|
||||||
unitType := unit.TypeIssues
|
unitType := unit.TypeIssues
|
||||||
if isPull {
|
if isPull {
|
||||||
unitType = unit.TypePullRequests
|
unitType = unit.TypePullRequests
|
||||||
}
|
}
|
||||||
if org != nil {
|
if owner != nil && owner.IsOrganization() {
|
||||||
if team != nil {
|
if team != nil {
|
||||||
cond = cond.And(teamUnitsRepoCond(repoIDstr, userID, org.ID, team.ID, unitType)) // special team member repos
|
cond = cond.And(teamUnitsRepoCond(repoIDstr, userID, owner.ID, team.ID, unitType)) // special team member repos
|
||||||
} else {
|
} else {
|
||||||
cond = cond.And(
|
cond = cond.And(
|
||||||
builder.Or(
|
builder.Or(
|
||||||
repo_model.UserOrgUnitRepoCond(repoIDstr, userID, org.ID, unitType), // team member repos
|
repo_model.UserOrgUnitRepoCond(repoIDstr, userID, owner.ID, unitType), // team member repos
|
||||||
repo_model.UserOrgPublicUnitRepoCond(userID, org.ID), // user org public non-member repos, TODO: check repo has issues
|
repo_model.UserOrgPublicUnitRepoCond(userID, owner.ID), // user org public non-member repos, TODO: check repo has issues
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
@ -107,7 +107,7 @@ func GetIssueStats(ctx context.Context, opts *IssuesOptions) (*IssueStats, error
|
|||||||
accum.YourRepositoriesCount += stats.YourRepositoriesCount
|
accum.YourRepositoriesCount += stats.YourRepositoriesCount
|
||||||
accum.AssignCount += stats.AssignCount
|
accum.AssignCount += stats.AssignCount
|
||||||
accum.CreateCount += stats.CreateCount
|
accum.CreateCount += stats.CreateCount
|
||||||
accum.OpenCount += stats.MentionCount
|
accum.MentionCount += stats.MentionCount
|
||||||
accum.ReviewRequestedCount += stats.ReviewRequestedCount
|
accum.ReviewRequestedCount += stats.ReviewRequestedCount
|
||||||
accum.ReviewedCount += stats.ReviewedCount
|
accum.ReviewedCount += stats.ReviewedCount
|
||||||
i = chunk
|
i = chunk
|
||||||
|
@ -7,6 +7,7 @@ import (
|
|||||||
"bytes"
|
"bytes"
|
||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
|
"strings"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
repo_model "code.gitea.io/gitea/models/repo"
|
repo_model "code.gitea.io/gitea/models/repo"
|
||||||
@ -321,6 +322,11 @@ func valuesUser(m map[int64]*user_model.User) []*user_model.User {
|
|||||||
return values
|
return values
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// newMigrationOriginalUser creates and returns a fake user for external user
|
||||||
|
func newMigrationOriginalUser(name string) *user_model.User {
|
||||||
|
return &user_model.User{ID: 0, Name: name, LowerName: strings.ToLower(name)}
|
||||||
|
}
|
||||||
|
|
||||||
// LoadUsers loads reactions' all users
|
// LoadUsers loads reactions' all users
|
||||||
func (list ReactionList) LoadUsers(ctx context.Context, repo *repo_model.Repository) ([]*user_model.User, error) {
|
func (list ReactionList) LoadUsers(ctx context.Context, repo *repo_model.Repository) ([]*user_model.User, error) {
|
||||||
if len(list) == 0 {
|
if len(list) == 0 {
|
||||||
@ -338,7 +344,7 @@ func (list ReactionList) LoadUsers(ctx context.Context, repo *repo_model.Reposit
|
|||||||
|
|
||||||
for _, reaction := range list {
|
for _, reaction := range list {
|
||||||
if reaction.OriginalAuthor != "" {
|
if reaction.OriginalAuthor != "" {
|
||||||
reaction.User = user_model.NewReplaceUser(fmt.Sprintf("%s(%s)", reaction.OriginalAuthor, repo.OriginalServiceType.Name()))
|
reaction.User = newMigrationOriginalUser(fmt.Sprintf("%s(%s)", reaction.OriginalAuthor, repo.OriginalServiceType.Name()))
|
||||||
} else if user, ok := userMaps[reaction.UserID]; ok {
|
} else if user, ok := userMaps[reaction.UserID]; ok {
|
||||||
reaction.User = user
|
reaction.User = user
|
||||||
} else {
|
} else {
|
||||||
|
@ -930,17 +930,19 @@ func MarkConversation(ctx context.Context, comment *Comment, doer *user_model.Us
|
|||||||
}
|
}
|
||||||
|
|
||||||
// CanMarkConversation Add or remove Conversation mark for a code comment permission check
|
// CanMarkConversation Add or remove Conversation mark for a code comment permission check
|
||||||
// the PR writer , offfcial reviewer and poster can do it
|
// the PR writer , official reviewer and poster can do it
|
||||||
func CanMarkConversation(ctx context.Context, issue *Issue, doer *user_model.User) (permResult bool, err error) {
|
func CanMarkConversation(ctx context.Context, issue *Issue, doer *user_model.User) (permResult bool, err error) {
|
||||||
if doer == nil || issue == nil {
|
if doer == nil || issue == nil {
|
||||||
return false, fmt.Errorf("issue or doer is nil")
|
return false, fmt.Errorf("issue or doer is nil")
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err = issue.LoadRepo(ctx); err != nil {
|
||||||
|
return false, err
|
||||||
|
}
|
||||||
|
if issue.Repo.IsArchived {
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
if doer.ID != issue.PosterID {
|
if doer.ID != issue.PosterID {
|
||||||
if err = issue.LoadRepo(ctx); err != nil {
|
|
||||||
return false, err
|
|
||||||
}
|
|
||||||
|
|
||||||
p, err := access_model.GetUserRepoPermission(ctx, issue.Repo, doer)
|
p, err := access_model.GetUserRepoPermission(ctx, issue.Repo, doer)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return false, err
|
return false, err
|
||||||
|
@ -46,11 +46,6 @@ func (s Stopwatch) Seconds() int64 {
|
|||||||
return int64(timeutil.TimeStampNow() - s.CreatedUnix)
|
return int64(timeutil.TimeStampNow() - s.CreatedUnix)
|
||||||
}
|
}
|
||||||
|
|
||||||
// Duration returns a human-readable duration string based on local server time
|
|
||||||
func (s Stopwatch) Duration() string {
|
|
||||||
return util.SecToTime(s.Seconds())
|
|
||||||
}
|
|
||||||
|
|
||||||
func getStopwatch(ctx context.Context, userID, issueID int64) (sw *Stopwatch, exists bool, err error) {
|
func getStopwatch(ctx context.Context, userID, issueID int64) (sw *Stopwatch, exists bool, err error) {
|
||||||
sw = new(Stopwatch)
|
sw = new(Stopwatch)
|
||||||
exists, err = db.GetEngine(ctx).
|
exists, err = db.GetEngine(ctx).
|
||||||
@ -201,7 +196,7 @@ func FinishIssueStopwatch(ctx context.Context, user *user_model.User, issue *Iss
|
|||||||
Doer: user,
|
Doer: user,
|
||||||
Issue: issue,
|
Issue: issue,
|
||||||
Repo: issue.Repo,
|
Repo: issue.Repo,
|
||||||
Content: util.SecToTime(timediff),
|
Content: util.SecToHours(timediff),
|
||||||
Type: CommentTypeStopTracking,
|
Type: CommentTypeStopTracking,
|
||||||
TimeID: tt.ID,
|
TimeID: tt.ID,
|
||||||
}); err != nil {
|
}); err != nil {
|
||||||
|
103
models/organization/org_worktime.go
Normal file
103
models/organization/org_worktime.go
Normal file
@ -0,0 +1,103 @@
|
|||||||
|
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package organization
|
||||||
|
|
||||||
|
import (
|
||||||
|
"sort"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/db"
|
||||||
|
|
||||||
|
"xorm.io/builder"
|
||||||
|
)
|
||||||
|
|
||||||
|
type WorktimeSumByRepos struct {
|
||||||
|
RepoName string
|
||||||
|
SumTime int64
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetWorktimeByRepos(org *Organization, unitFrom, unixTo int64) (results []WorktimeSumByRepos, err error) {
|
||||||
|
err = db.GetEngine(db.DefaultContext).
|
||||||
|
Select("repository.name AS repo_name, SUM(tracked_time.time) AS sum_time").
|
||||||
|
Table("tracked_time").
|
||||||
|
Join("INNER", "issue", "tracked_time.issue_id = issue.id").
|
||||||
|
Join("INNER", "repository", "issue.repo_id = repository.id").
|
||||||
|
Where(builder.Eq{"repository.owner_id": org.ID}).
|
||||||
|
And(builder.Eq{"tracked_time.deleted": false}).
|
||||||
|
And(builder.Gte{"tracked_time.created_unix": unitFrom}).
|
||||||
|
And(builder.Lte{"tracked_time.created_unix": unixTo}).
|
||||||
|
GroupBy("repository.name").
|
||||||
|
OrderBy("repository.name").
|
||||||
|
Find(&results)
|
||||||
|
return results, err
|
||||||
|
}
|
||||||
|
|
||||||
|
type WorktimeSumByMilestones struct {
|
||||||
|
RepoName string
|
||||||
|
MilestoneName string
|
||||||
|
MilestoneID int64
|
||||||
|
MilestoneDeadline int64
|
||||||
|
SumTime int64
|
||||||
|
HideRepoName bool
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetWorktimeByMilestones(org *Organization, unitFrom, unixTo int64) (results []WorktimeSumByMilestones, err error) {
|
||||||
|
err = db.GetEngine(db.DefaultContext).
|
||||||
|
Select("repository.name AS repo_name, milestone.name AS milestone_name, milestone.id AS milestone_id, milestone.deadline_unix as milestone_deadline, SUM(tracked_time.time) AS sum_time").
|
||||||
|
Table("tracked_time").
|
||||||
|
Join("INNER", "issue", "tracked_time.issue_id = issue.id").
|
||||||
|
Join("INNER", "repository", "issue.repo_id = repository.id").
|
||||||
|
Join("LEFT", "milestone", "issue.milestone_id = milestone.id").
|
||||||
|
Where(builder.Eq{"repository.owner_id": org.ID}).
|
||||||
|
And(builder.Eq{"tracked_time.deleted": false}).
|
||||||
|
And(builder.Gte{"tracked_time.created_unix": unitFrom}).
|
||||||
|
And(builder.Lte{"tracked_time.created_unix": unixTo}).
|
||||||
|
GroupBy("repository.name, milestone.name, milestone.deadline_unix, milestone.id").
|
||||||
|
OrderBy("repository.name, milestone.deadline_unix, milestone.id").
|
||||||
|
Find(&results)
|
||||||
|
|
||||||
|
// TODO: pgsql: NULL values are sorted last in default ascending order, so we need to sort them manually again.
|
||||||
|
sort.Slice(results, func(i, j int) bool {
|
||||||
|
if results[i].RepoName != results[j].RepoName {
|
||||||
|
return results[i].RepoName < results[j].RepoName
|
||||||
|
}
|
||||||
|
if results[i].MilestoneDeadline != results[j].MilestoneDeadline {
|
||||||
|
return results[i].MilestoneDeadline < results[j].MilestoneDeadline
|
||||||
|
}
|
||||||
|
return results[i].MilestoneID < results[j].MilestoneID
|
||||||
|
})
|
||||||
|
|
||||||
|
// Show only the first RepoName, for nicer output.
|
||||||
|
prevRepoName := ""
|
||||||
|
for i := 0; i < len(results); i++ {
|
||||||
|
res := &results[i]
|
||||||
|
res.MilestoneDeadline = 0 // clear the deadline because we do not really need it
|
||||||
|
if prevRepoName == res.RepoName {
|
||||||
|
res.HideRepoName = true
|
||||||
|
}
|
||||||
|
prevRepoName = res.RepoName
|
||||||
|
}
|
||||||
|
return results, err
|
||||||
|
}
|
||||||
|
|
||||||
|
type WorktimeSumByMembers struct {
|
||||||
|
UserName string
|
||||||
|
SumTime int64
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetWorktimeByMembers(org *Organization, unitFrom, unixTo int64) (results []WorktimeSumByMembers, err error) {
|
||||||
|
err = db.GetEngine(db.DefaultContext).
|
||||||
|
Select("`user`.name AS user_name, SUM(tracked_time.time) AS sum_time").
|
||||||
|
Table("tracked_time").
|
||||||
|
Join("INNER", "issue", "tracked_time.issue_id = issue.id").
|
||||||
|
Join("INNER", "repository", "issue.repo_id = repository.id").
|
||||||
|
Join("INNER", "`user`", "tracked_time.user_id = `user`.id").
|
||||||
|
Where(builder.Eq{"repository.owner_id": org.ID}).
|
||||||
|
And(builder.Eq{"tracked_time.deleted": false}).
|
||||||
|
And(builder.Gte{"tracked_time.created_unix": unitFrom}).
|
||||||
|
And(builder.Lte{"tracked_time.created_unix": unixTo}).
|
||||||
|
GroupBy("`user`.name").
|
||||||
|
OrderBy("sum_time DESC").
|
||||||
|
Find(&results)
|
||||||
|
return results, err
|
||||||
|
}
|
@ -228,6 +228,11 @@ func SetRepositoryLink(ctx context.Context, packageID, repoID int64) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func UnlinkRepository(ctx context.Context, packageID int64) error {
|
||||||
|
_, err := db.GetEngine(ctx).ID(packageID).Cols("repo_id").Update(&Package{RepoID: 0})
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
// UnlinkRepositoryFromAllPackages unlinks every package from the repository
|
// UnlinkRepositoryFromAllPackages unlinks every package from the repository
|
||||||
func UnlinkRepositoryFromAllPackages(ctx context.Context, repoID int64) error {
|
func UnlinkRepositoryFromAllPackages(ctx context.Context, repoID int64) error {
|
||||||
_, err := db.GetEngine(ctx).Where("repo_id = ?", repoID).Cols("repo_id").Update(&Package{})
|
_, err := db.GetEngine(ctx).Where("repo_id = ?", repoID).Cols("repo_id").Update(&Package{})
|
||||||
|
@ -175,10 +175,14 @@ func (p *Permission) LogString() string {
|
|||||||
return fmt.Sprintf(format, args...)
|
return fmt.Sprintf(format, args...)
|
||||||
}
|
}
|
||||||
|
|
||||||
func applyEveryoneRepoPermission(user *user_model.User, perm *Permission) {
|
func finalProcessRepoUnitPermission(user *user_model.User, perm *Permission) {
|
||||||
if user == nil || user.ID <= 0 {
|
if user == nil || user.ID <= 0 {
|
||||||
|
// for anonymous access, it could be:
|
||||||
|
// AccessMode is None or Read, units has repo units, unitModes is nil
|
||||||
return
|
return
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// apply everyone access permissions
|
||||||
for _, u := range perm.units {
|
for _, u := range perm.units {
|
||||||
if u.EveryoneAccessMode >= perm_model.AccessModeRead && u.EveryoneAccessMode > perm.everyoneAccessMode[u.Type] {
|
if u.EveryoneAccessMode >= perm_model.AccessModeRead && u.EveryoneAccessMode > perm.everyoneAccessMode[u.Type] {
|
||||||
if perm.everyoneAccessMode == nil {
|
if perm.everyoneAccessMode == nil {
|
||||||
@ -187,17 +191,40 @@ func applyEveryoneRepoPermission(user *user_model.User, perm *Permission) {
|
|||||||
perm.everyoneAccessMode[u.Type] = u.EveryoneAccessMode
|
perm.everyoneAccessMode[u.Type] = u.EveryoneAccessMode
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if perm.unitsMode == nil {
|
||||||
|
// if unitsMode is not set, then it means that the default p.AccessMode applies to all units
|
||||||
|
return
|
||||||
|
}
|
||||||
|
|
||||||
|
// remove no permission units
|
||||||
|
origPermUnits := perm.units
|
||||||
|
perm.units = make([]*repo_model.RepoUnit, 0, len(perm.units))
|
||||||
|
for _, u := range origPermUnits {
|
||||||
|
shouldKeep := false
|
||||||
|
for t := range perm.unitsMode {
|
||||||
|
if shouldKeep = u.Type == t; shouldKeep {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
for t := range perm.everyoneAccessMode {
|
||||||
|
if shouldKeep = shouldKeep || u.Type == t; shouldKeep {
|
||||||
|
break
|
||||||
|
}
|
||||||
|
}
|
||||||
|
if shouldKeep {
|
||||||
|
perm.units = append(perm.units, u)
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetUserRepoPermission returns the user permissions to the repository
|
// GetUserRepoPermission returns the user permissions to the repository
|
||||||
func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, user *user_model.User) (perm Permission, err error) {
|
func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, user *user_model.User) (perm Permission, err error) {
|
||||||
defer func() {
|
defer func() {
|
||||||
if err == nil {
|
if err == nil {
|
||||||
applyEveryoneRepoPermission(user, &perm)
|
finalProcessRepoUnitPermission(user, &perm)
|
||||||
}
|
|
||||||
if log.IsTrace() {
|
|
||||||
log.Trace("Permission Loaded for user %-v in repo %-v, permissions: %-+v", user, repo, perm)
|
|
||||||
}
|
}
|
||||||
|
log.Trace("Permission Loaded for user %-v in repo %-v, permissions: %-+v", user, repo, perm)
|
||||||
}()
|
}()
|
||||||
|
|
||||||
if err = repo.LoadUnits(ctx); err != nil {
|
if err = repo.LoadUnits(ctx); err != nil {
|
||||||
@ -294,16 +321,6 @@ func GetUserRepoPermission(ctx context.Context, repo *repo_model.Repository, use
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// remove no permission units
|
|
||||||
perm.units = make([]*repo_model.RepoUnit, 0, len(repo.Units))
|
|
||||||
for t := range perm.unitsMode {
|
|
||||||
for _, u := range repo.Units {
|
|
||||||
if u.Type == t {
|
|
||||||
perm.units = append(perm.units, u)
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
return perm, err
|
return perm, err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -50,7 +50,7 @@ func TestApplyEveryoneRepoPermission(t *testing.T) {
|
|||||||
{Type: unit.TypeWiki, EveryoneAccessMode: perm_model.AccessModeRead},
|
{Type: unit.TypeWiki, EveryoneAccessMode: perm_model.AccessModeRead},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
applyEveryoneRepoPermission(nil, &perm)
|
finalProcessRepoUnitPermission(nil, &perm)
|
||||||
assert.False(t, perm.CanRead(unit.TypeWiki))
|
assert.False(t, perm.CanRead(unit.TypeWiki))
|
||||||
|
|
||||||
perm = Permission{
|
perm = Permission{
|
||||||
@ -59,7 +59,7 @@ func TestApplyEveryoneRepoPermission(t *testing.T) {
|
|||||||
{Type: unit.TypeWiki, EveryoneAccessMode: perm_model.AccessModeRead},
|
{Type: unit.TypeWiki, EveryoneAccessMode: perm_model.AccessModeRead},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
applyEveryoneRepoPermission(&user_model.User{ID: 0}, &perm)
|
finalProcessRepoUnitPermission(&user_model.User{ID: 0}, &perm)
|
||||||
assert.False(t, perm.CanRead(unit.TypeWiki))
|
assert.False(t, perm.CanRead(unit.TypeWiki))
|
||||||
|
|
||||||
perm = Permission{
|
perm = Permission{
|
||||||
@ -68,7 +68,7 @@ func TestApplyEveryoneRepoPermission(t *testing.T) {
|
|||||||
{Type: unit.TypeWiki, EveryoneAccessMode: perm_model.AccessModeRead},
|
{Type: unit.TypeWiki, EveryoneAccessMode: perm_model.AccessModeRead},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
applyEveryoneRepoPermission(&user_model.User{ID: 1}, &perm)
|
finalProcessRepoUnitPermission(&user_model.User{ID: 1}, &perm)
|
||||||
assert.True(t, perm.CanRead(unit.TypeWiki))
|
assert.True(t, perm.CanRead(unit.TypeWiki))
|
||||||
|
|
||||||
perm = Permission{
|
perm = Permission{
|
||||||
@ -77,20 +77,22 @@ func TestApplyEveryoneRepoPermission(t *testing.T) {
|
|||||||
{Type: unit.TypeWiki, EveryoneAccessMode: perm_model.AccessModeRead},
|
{Type: unit.TypeWiki, EveryoneAccessMode: perm_model.AccessModeRead},
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
applyEveryoneRepoPermission(&user_model.User{ID: 1}, &perm)
|
finalProcessRepoUnitPermission(&user_model.User{ID: 1}, &perm)
|
||||||
// it should work the same as "EveryoneAccessMode: none" because the default AccessMode should be applied to units
|
// it should work the same as "EveryoneAccessMode: none" because the default AccessMode should be applied to units
|
||||||
assert.True(t, perm.CanWrite(unit.TypeWiki))
|
assert.True(t, perm.CanWrite(unit.TypeWiki))
|
||||||
|
|
||||||
perm = Permission{
|
perm = Permission{
|
||||||
units: []*repo_model.RepoUnit{
|
units: []*repo_model.RepoUnit{
|
||||||
|
{Type: unit.TypeCode}, // will be removed
|
||||||
{Type: unit.TypeWiki, EveryoneAccessMode: perm_model.AccessModeRead},
|
{Type: unit.TypeWiki, EveryoneAccessMode: perm_model.AccessModeRead},
|
||||||
},
|
},
|
||||||
unitsMode: map[unit.Type]perm_model.AccessMode{
|
unitsMode: map[unit.Type]perm_model.AccessMode{
|
||||||
unit.TypeWiki: perm_model.AccessModeWrite,
|
unit.TypeWiki: perm_model.AccessModeWrite,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
applyEveryoneRepoPermission(&user_model.User{ID: 1}, &perm)
|
finalProcessRepoUnitPermission(&user_model.User{ID: 1}, &perm)
|
||||||
assert.True(t, perm.CanWrite(unit.TypeWiki))
|
assert.True(t, perm.CanWrite(unit.TypeWiki))
|
||||||
|
assert.Len(t, perm.units, 1)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestUnitAccessMode(t *testing.T) {
|
func TestUnitAccessMode(t *testing.T) {
|
||||||
|
@ -48,6 +48,8 @@ type Column struct {
|
|||||||
ProjectID int64 `xorm:"INDEX NOT NULL"`
|
ProjectID int64 `xorm:"INDEX NOT NULL"`
|
||||||
CreatorID int64 `xorm:"NOT NULL"`
|
CreatorID int64 `xorm:"NOT NULL"`
|
||||||
|
|
||||||
|
NumIssues int64 `xorm:"-"`
|
||||||
|
|
||||||
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
|
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
|
||||||
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
|
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
|
||||||
}
|
}
|
||||||
@ -57,20 +59,6 @@ func (Column) TableName() string {
|
|||||||
return "project_board" // TODO: the legacy table name should be project_column
|
return "project_board" // TODO: the legacy table name should be project_column
|
||||||
}
|
}
|
||||||
|
|
||||||
// NumIssues return counter of all issues assigned to the column
|
|
||||||
func (c *Column) NumIssues(ctx context.Context) int {
|
|
||||||
total, err := db.GetEngine(ctx).Table("project_issue").
|
|
||||||
Where("project_id=?", c.ProjectID).
|
|
||||||
And("project_board_id=?", c.ID).
|
|
||||||
GroupBy("issue_id").
|
|
||||||
Cols("issue_id").
|
|
||||||
Count()
|
|
||||||
if err != nil {
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
return int(total)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Column) GetIssues(ctx context.Context) ([]*ProjectIssue, error) {
|
func (c *Column) GetIssues(ctx context.Context) ([]*ProjectIssue, error) {
|
||||||
issues := make([]*ProjectIssue, 0, 5)
|
issues := make([]*ProjectIssue, 0, 5)
|
||||||
if err := db.GetEngine(ctx).Where("project_id=?", c.ProjectID).
|
if err := db.GetEngine(ctx).Where("project_id=?", c.ProjectID).
|
||||||
@ -192,7 +180,7 @@ func deleteColumnByID(ctx context.Context, columnID int64) error {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
defaultColumn, err := project.GetDefaultColumn(ctx)
|
defaultColumn, err := project.MustDefaultColumn(ctx)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -257,8 +245,8 @@ func (p *Project) GetColumns(ctx context.Context) (ColumnList, error) {
|
|||||||
return columns, nil
|
return columns, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetDefaultColumn return default column and ensure only one exists
|
// getDefaultColumn return default column and ensure only one exists
|
||||||
func (p *Project) GetDefaultColumn(ctx context.Context) (*Column, error) {
|
func (p *Project) getDefaultColumn(ctx context.Context) (*Column, error) {
|
||||||
var column Column
|
var column Column
|
||||||
has, err := db.GetEngine(ctx).
|
has, err := db.GetEngine(ctx).
|
||||||
Where("project_id=? AND `default` = ?", p.ID, true).
|
Where("project_id=? AND `default` = ?", p.ID, true).
|
||||||
@ -270,6 +258,33 @@ func (p *Project) GetDefaultColumn(ctx context.Context) (*Column, error) {
|
|||||||
if has {
|
if has {
|
||||||
return &column, nil
|
return &column, nil
|
||||||
}
|
}
|
||||||
|
return nil, ErrProjectColumnNotExist{ColumnID: 0}
|
||||||
|
}
|
||||||
|
|
||||||
|
// MustDefaultColumn returns the default column for a project.
|
||||||
|
// If one exists, it is returned
|
||||||
|
// If none exists, the first column will be elevated to the default column of this project
|
||||||
|
func (p *Project) MustDefaultColumn(ctx context.Context) (*Column, error) {
|
||||||
|
c, err := p.getDefaultColumn(ctx)
|
||||||
|
if err != nil && !IsErrProjectColumnNotExist(err) {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if c != nil {
|
||||||
|
return c, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
var column Column
|
||||||
|
has, err := db.GetEngine(ctx).Where("project_id=?", p.ID).OrderBy("sorting, id").Get(&column)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
if has {
|
||||||
|
column.Default = true
|
||||||
|
if _, err := db.GetEngine(ctx).ID(column.ID).Cols("`default`").Update(&column); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return &column, nil
|
||||||
|
}
|
||||||
|
|
||||||
// create a default column if none is found
|
// create a default column if none is found
|
||||||
column = Column{
|
column = Column{
|
||||||
|
@ -20,19 +20,19 @@ func TestGetDefaultColumn(t *testing.T) {
|
|||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
// check if default column was added
|
// check if default column was added
|
||||||
column, err := projectWithoutDefault.GetDefaultColumn(db.DefaultContext)
|
column, err := projectWithoutDefault.MustDefaultColumn(db.DefaultContext)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, int64(5), column.ProjectID)
|
assert.Equal(t, int64(5), column.ProjectID)
|
||||||
assert.Equal(t, "Uncategorized", column.Title)
|
assert.Equal(t, "Done", column.Title)
|
||||||
|
|
||||||
projectWithMultipleDefaults, err := GetProjectByID(db.DefaultContext, 6)
|
projectWithMultipleDefaults, err := GetProjectByID(db.DefaultContext, 6)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
|
|
||||||
// check if multiple defaults were removed
|
// check if multiple defaults were removed
|
||||||
column, err = projectWithMultipleDefaults.GetDefaultColumn(db.DefaultContext)
|
column, err = projectWithMultipleDefaults.MustDefaultColumn(db.DefaultContext)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Equal(t, int64(6), column.ProjectID)
|
assert.Equal(t, int64(6), column.ProjectID)
|
||||||
assert.Equal(t, int64(9), column.ID)
|
assert.Equal(t, int64(9), column.ID) // there are 2 default columns in the test data, use the latest one
|
||||||
|
|
||||||
// set 8 as default column
|
// set 8 as default column
|
||||||
assert.NoError(t, SetDefaultColumn(db.DefaultContext, column.ProjectID, 8))
|
assert.NoError(t, SetDefaultColumn(db.DefaultContext, column.ProjectID, 8))
|
||||||
|
@ -8,7 +8,6 @@ import (
|
|||||||
"fmt"
|
"fmt"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
"code.gitea.io/gitea/modules/log"
|
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
@ -34,48 +33,6 @@ func deleteProjectIssuesByProjectID(ctx context.Context, projectID int64) error
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// NumIssues return counter of all issues assigned to a project
|
|
||||||
func (p *Project) NumIssues(ctx context.Context) int {
|
|
||||||
c, err := db.GetEngine(ctx).Table("project_issue").
|
|
||||||
Where("project_id=?", p.ID).
|
|
||||||
GroupBy("issue_id").
|
|
||||||
Cols("issue_id").
|
|
||||||
Count()
|
|
||||||
if err != nil {
|
|
||||||
log.Error("NumIssues: %v", err)
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
return int(c)
|
|
||||||
}
|
|
||||||
|
|
||||||
// NumClosedIssues return counter of closed issues assigned to a project
|
|
||||||
func (p *Project) NumClosedIssues(ctx context.Context) int {
|
|
||||||
c, err := db.GetEngine(ctx).Table("project_issue").
|
|
||||||
Join("INNER", "issue", "project_issue.issue_id=issue.id").
|
|
||||||
Where("project_issue.project_id=? AND issue.is_closed=?", p.ID, true).
|
|
||||||
Cols("issue_id").
|
|
||||||
Count()
|
|
||||||
if err != nil {
|
|
||||||
log.Error("NumClosedIssues: %v", err)
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
return int(c)
|
|
||||||
}
|
|
||||||
|
|
||||||
// NumOpenIssues return counter of open issues assigned to a project
|
|
||||||
func (p *Project) NumOpenIssues(ctx context.Context) int {
|
|
||||||
c, err := db.GetEngine(ctx).Table("project_issue").
|
|
||||||
Join("INNER", "issue", "project_issue.issue_id=issue.id").
|
|
||||||
Where("project_issue.project_id=? AND issue.is_closed=?", p.ID, false).
|
|
||||||
Cols("issue_id").
|
|
||||||
Count()
|
|
||||||
if err != nil {
|
|
||||||
log.Error("NumOpenIssues: %v", err)
|
|
||||||
return 0
|
|
||||||
}
|
|
||||||
return int(c)
|
|
||||||
}
|
|
||||||
|
|
||||||
func (c *Column) moveIssuesToAnotherColumn(ctx context.Context, newColumn *Column) error {
|
func (c *Column) moveIssuesToAnotherColumn(ctx context.Context, newColumn *Column) error {
|
||||||
if c.ProjectID != newColumn.ProjectID {
|
if c.ProjectID != newColumn.ProjectID {
|
||||||
return fmt.Errorf("columns have to be in the same project")
|
return fmt.Errorf("columns have to be in the same project")
|
||||||
|
@ -97,6 +97,9 @@ type Project struct {
|
|||||||
Type Type
|
Type Type
|
||||||
|
|
||||||
RenderedContent template.HTML `xorm:"-"`
|
RenderedContent template.HTML `xorm:"-"`
|
||||||
|
NumOpenIssues int64 `xorm:"-"`
|
||||||
|
NumClosedIssues int64 `xorm:"-"`
|
||||||
|
NumIssues int64 `xorm:"-"`
|
||||||
|
|
||||||
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
|
CreatedUnix timeutil.TimeStamp `xorm:"INDEX created"`
|
||||||
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
|
UpdatedUnix timeutil.TimeStamp `xorm:"INDEX updated"`
|
||||||
@ -244,6 +247,10 @@ func GetSearchOrderByBySortType(sortType string) db.SearchOrderBy {
|
|||||||
return db.SearchOrderByRecentUpdated
|
return db.SearchOrderByRecentUpdated
|
||||||
case "leastupdate":
|
case "leastupdate":
|
||||||
return db.SearchOrderByLeastUpdated
|
return db.SearchOrderByLeastUpdated
|
||||||
|
case "alphabetically":
|
||||||
|
return "title ASC"
|
||||||
|
case "reversealphabetically":
|
||||||
|
return "title DESC"
|
||||||
default:
|
default:
|
||||||
return db.SearchOrderByNewest
|
return db.SearchOrderByNewest
|
||||||
}
|
}
|
||||||
|
@ -56,16 +56,11 @@ func repoArchiverForRelativePath(relativePath string) (*RepoArchiver, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return nil, util.SilentWrap{Message: fmt.Sprintf("invalid storage path: %s", relativePath), Err: util.ErrInvalidArgument}
|
return nil, util.SilentWrap{Message: fmt.Sprintf("invalid storage path: %s", relativePath), Err: util.ErrInvalidArgument}
|
||||||
}
|
}
|
||||||
nameExts := strings.SplitN(parts[2], ".", 2)
|
commitID, archiveType := git.SplitArchiveNameType(parts[2])
|
||||||
if len(nameExts) != 2 {
|
if archiveType == git.ArchiveUnknown {
|
||||||
return nil, util.SilentWrap{Message: fmt.Sprintf("invalid storage path: %s", relativePath), Err: util.ErrInvalidArgument}
|
return nil, util.SilentWrap{Message: fmt.Sprintf("invalid storage path: %s", relativePath), Err: util.ErrInvalidArgument}
|
||||||
}
|
}
|
||||||
|
return &RepoArchiver{RepoID: repoID, CommitID: commitID, Type: archiveType}, nil
|
||||||
return &RepoArchiver{
|
|
||||||
RepoID: repoID,
|
|
||||||
CommitID: parts[1] + nameExts[0],
|
|
||||||
Type: git.ToArchiveType(nameExts[1]),
|
|
||||||
}, nil
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetRepoArchiver get an archiver
|
// GetRepoArchiver get an archiver
|
||||||
|
@ -54,6 +54,7 @@ func UpdateRepoLicenses(ctx context.Context, repo *Repository, commitID string,
|
|||||||
for _, o := range oldLicenses {
|
for _, o := range oldLicenses {
|
||||||
// Update already existing license
|
// Update already existing license
|
||||||
if o.License == license {
|
if o.License == license {
|
||||||
|
o.CommitID = commitID
|
||||||
if _, err := db.GetEngine(ctx).ID(o.ID).Cols("`commit_id`").Update(o); err != nil {
|
if _, err := db.GetEngine(ctx).ID(o.ID).Cols("`commit_id`").Update(o); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
@ -14,6 +14,7 @@ import (
|
|||||||
"regexp"
|
"regexp"
|
||||||
"strconv"
|
"strconv"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
"code.gitea.io/gitea/models/unit"
|
"code.gitea.io/gitea/models/unit"
|
||||||
@ -61,20 +62,30 @@ func (err ErrRepoIsArchived) Error() string {
|
|||||||
return fmt.Sprintf("%s is archived", err.Repo.LogString())
|
return fmt.Sprintf("%s is archived", err.Repo.LogString())
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
type globalVarsStruct struct {
|
||||||
validRepoNamePattern = regexp.MustCompile(`[-.\w]+`)
|
validRepoNamePattern *regexp.Regexp
|
||||||
invalidRepoNamePattern = regexp.MustCompile(`[.]{2,}`)
|
invalidRepoNamePattern *regexp.Regexp
|
||||||
reservedRepoNames = []string{".", "..", "-"}
|
reservedRepoNames []string
|
||||||
reservedRepoPatterns = []string{"*.git", "*.wiki", "*.rss", "*.atom"}
|
reservedRepoPatterns []string
|
||||||
)
|
}
|
||||||
|
|
||||||
|
var globalVars = sync.OnceValue(func() *globalVarsStruct {
|
||||||
|
return &globalVarsStruct{
|
||||||
|
validRepoNamePattern: regexp.MustCompile(`[-.\w]+`),
|
||||||
|
invalidRepoNamePattern: regexp.MustCompile(`[.]{2,}`),
|
||||||
|
reservedRepoNames: []string{".", "..", "-"},
|
||||||
|
reservedRepoPatterns: []string{"*.git", "*.wiki", "*.rss", "*.atom"},
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
// IsUsableRepoName returns true when name is usable
|
// IsUsableRepoName returns true when name is usable
|
||||||
func IsUsableRepoName(name string) error {
|
func IsUsableRepoName(name string) error {
|
||||||
if !validRepoNamePattern.MatchString(name) || invalidRepoNamePattern.MatchString(name) {
|
vars := globalVars()
|
||||||
|
if !vars.validRepoNamePattern.MatchString(name) || vars.invalidRepoNamePattern.MatchString(name) {
|
||||||
// Note: usually this error is normally caught up earlier in the UI
|
// Note: usually this error is normally caught up earlier in the UI
|
||||||
return db.ErrNameCharsNotAllowed{Name: name}
|
return db.ErrNameCharsNotAllowed{Name: name}
|
||||||
}
|
}
|
||||||
return db.IsUsableName(reservedRepoNames, reservedRepoPatterns, name)
|
return db.IsUsableName(vars.reservedRepoNames, vars.reservedRepoPatterns, name)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TrustModelType defines the types of trust model for this repository
|
// TrustModelType defines the types of trust model for this repository
|
||||||
|
@ -219,4 +219,5 @@ func TestIsUsableRepoName(t *testing.T) {
|
|||||||
assert.Error(t, IsUsableRepoName("the..repo"))
|
assert.Error(t, IsUsableRepoName("the..repo"))
|
||||||
assert.Error(t, IsUsableRepoName("foo.wiki"))
|
assert.Error(t, IsUsableRepoName("foo.wiki"))
|
||||||
assert.Error(t, IsUsableRepoName("foo.git"))
|
assert.Error(t, IsUsableRepoName("foo.git"))
|
||||||
|
assert.Error(t, IsUsableRepoName("foo.RSS"))
|
||||||
}
|
}
|
||||||
|
@ -68,6 +68,7 @@ type RepoTransfer struct { //nolint
|
|||||||
RecipientID int64
|
RecipientID int64
|
||||||
Recipient *user_model.User `xorm:"-"`
|
Recipient *user_model.User `xorm:"-"`
|
||||||
RepoID int64
|
RepoID int64
|
||||||
|
Repo *Repository `xorm:"-"`
|
||||||
TeamIDs []int64
|
TeamIDs []int64
|
||||||
Teams []*organization.Team `xorm:"-"`
|
Teams []*organization.Team `xorm:"-"`
|
||||||
|
|
||||||
@ -79,48 +80,65 @@ func init() {
|
|||||||
db.RegisterModel(new(RepoTransfer))
|
db.RegisterModel(new(RepoTransfer))
|
||||||
}
|
}
|
||||||
|
|
||||||
// LoadAttributes fetches the transfer recipient from the database
|
func (r *RepoTransfer) LoadRecipient(ctx context.Context) error {
|
||||||
func (r *RepoTransfer) LoadAttributes(ctx context.Context) error {
|
|
||||||
if r.Recipient == nil {
|
if r.Recipient == nil {
|
||||||
u, err := user_model.GetUserByID(ctx, r.RecipientID)
|
u, err := user_model.GetUserByID(ctx, r.RecipientID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
r.Recipient = u
|
r.Recipient = u
|
||||||
}
|
}
|
||||||
|
|
||||||
if r.Recipient.IsOrganization() && len(r.TeamIDs) != len(r.Teams) {
|
return nil
|
||||||
for _, v := range r.TeamIDs {
|
}
|
||||||
team, err := organization.GetTeamByID(ctx, v)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
if team.OrgID != r.Recipient.ID {
|
func (r *RepoTransfer) LoadRepo(ctx context.Context) error {
|
||||||
return fmt.Errorf("team %d belongs not to org %d", v, r.Recipient.ID)
|
if r.Repo == nil {
|
||||||
}
|
repo, err := GetRepositoryByID(ctx, r.RepoID)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
r.Repo = repo
|
||||||
|
}
|
||||||
|
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
// LoadAttributes fetches the transfer recipient from the database
|
||||||
|
func (r *RepoTransfer) LoadAttributes(ctx context.Context) error {
|
||||||
|
if err := r.LoadRecipient(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
|
if r.Recipient.IsOrganization() && r.Teams == nil {
|
||||||
|
teamsMap, err := organization.GetTeamsByIDs(ctx, r.TeamIDs)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
for _, team := range teamsMap {
|
||||||
r.Teams = append(r.Teams, team)
|
r.Teams = append(r.Teams, team)
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if err := r.LoadRepo(ctx); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
if r.Doer == nil {
|
if r.Doer == nil {
|
||||||
u, err := user_model.GetUserByID(ctx, r.DoerID)
|
u, err := user_model.GetUserByID(ctx, r.DoerID)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
r.Doer = u
|
r.Doer = u
|
||||||
}
|
}
|
||||||
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// CanUserAcceptTransfer checks if the user has the rights to accept/decline a repo transfer.
|
// CanUserAcceptOrRejectTransfer checks if the user has the rights to accept/decline a repo transfer.
|
||||||
// For user, it checks if it's himself
|
// For user, it checks if it's himself
|
||||||
// For organizations, it checks if the user is able to create repos
|
// For organizations, it checks if the user is able to create repos
|
||||||
func (r *RepoTransfer) CanUserAcceptTransfer(ctx context.Context, u *user_model.User) bool {
|
func (r *RepoTransfer) CanUserAcceptOrRejectTransfer(ctx context.Context, u *user_model.User) bool {
|
||||||
if err := r.LoadAttributes(ctx); err != nil {
|
if err := r.LoadAttributes(ctx); err != nil {
|
||||||
log.Error("LoadAttributes: %v", err)
|
log.Error("LoadAttributes: %v", err)
|
||||||
return false
|
return false
|
||||||
@ -166,6 +184,10 @@ func GetPendingRepositoryTransfers(ctx context.Context, opts *PendingRepositoryT
|
|||||||
Find(&transfers)
|
Find(&transfers)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func IsRepositoryTransferExist(ctx context.Context, repoID int64) (bool, error) {
|
||||||
|
return db.GetEngine(ctx).Where("repo_id = ?", repoID).Exist(new(RepoTransfer))
|
||||||
|
}
|
||||||
|
|
||||||
// GetPendingRepositoryTransfer fetches the most recent and ongoing transfer
|
// GetPendingRepositoryTransfer fetches the most recent and ongoing transfer
|
||||||
// process for the repository
|
// process for the repository
|
||||||
func GetPendingRepositoryTransfer(ctx context.Context, repo *Repository) (*RepoTransfer, error) {
|
func GetPendingRepositoryTransfer(ctx context.Context, repo *Repository) (*RepoTransfer, error) {
|
||||||
@ -206,11 +228,26 @@ func CreatePendingRepositoryTransfer(ctx context.Context, doer, newOwner *user_m
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
if _, err := user_model.GetUserByID(ctx, newOwner.ID); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
|
||||||
// Make sure repo is ready to transfer
|
// Make sure repo is ready to transfer
|
||||||
if err := TestRepositoryReadyForTransfer(repo.Status); err != nil {
|
if err := TestRepositoryReadyForTransfer(repo.Status); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
exist, err := IsRepositoryTransferExist(ctx, repo.ID)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
if exist {
|
||||||
|
return ErrRepoTransferInProgress{
|
||||||
|
Uname: repo.Owner.LowerName,
|
||||||
|
Name: repo.Name,
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
repo.Status = RepositoryPendingTransfer
|
repo.Status = RepositoryPendingTransfer
|
||||||
if err := UpdateRepositoryCols(ctx, repo, "status"); err != nil {
|
if err := UpdateRepositoryCols(ctx, repo, "status"); err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -45,8 +45,6 @@ func TestCreateRepositoryNotice(t *testing.T) {
|
|||||||
unittest.AssertExistsAndLoadBean(t, noticeBean)
|
unittest.AssertExistsAndLoadBean(t, noticeBean)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO TestRemoveAllWithNotice
|
|
||||||
|
|
||||||
func TestCountNotices(t *testing.T) {
|
func TestCountNotices(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
assert.Equal(t, int64(3), system.CountNotices(db.DefaultContext))
|
assert.Equal(t, int64(3), system.CountNotices(db.DefaultContext))
|
||||||
|
@ -11,35 +11,13 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
// Copy copies file from source to target path.
|
// SyncFile synchronizes the two files. This is skipped if both files
|
||||||
func Copy(src, dest string) error {
|
|
||||||
// Gather file information to set back later.
|
|
||||||
si, err := os.Lstat(src)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
|
|
||||||
// Handle symbolic link.
|
|
||||||
if si.Mode()&os.ModeSymlink != 0 {
|
|
||||||
target, err := os.Readlink(src)
|
|
||||||
if err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
// NOTE: os.Chmod and os.Chtimes don't recognize symbolic link,
|
|
||||||
// which will lead "no such file or directory" error.
|
|
||||||
return os.Symlink(target, dest)
|
|
||||||
}
|
|
||||||
|
|
||||||
return util.CopyFile(src, dest)
|
|
||||||
}
|
|
||||||
|
|
||||||
// Sync synchronizes the two files. This is skipped if both files
|
|
||||||
// exist and the size, modtime, and mode match.
|
// exist and the size, modtime, and mode match.
|
||||||
func Sync(srcPath, destPath string) error {
|
func SyncFile(srcPath, destPath string) error {
|
||||||
dest, err := os.Stat(destPath)
|
dest, err := os.Stat(destPath)
|
||||||
if err != nil {
|
if err != nil {
|
||||||
if os.IsNotExist(err) {
|
if os.IsNotExist(err) {
|
||||||
return Copy(srcPath, destPath)
|
return util.CopyFile(srcPath, destPath)
|
||||||
}
|
}
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
@ -55,7 +33,7 @@ func Sync(srcPath, destPath string) error {
|
|||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
return Copy(srcPath, destPath)
|
return util.CopyFile(srcPath, destPath)
|
||||||
}
|
}
|
||||||
|
|
||||||
// SyncDirs synchronizes files recursively from source to target directory.
|
// SyncDirs synchronizes files recursively from source to target directory.
|
||||||
@ -66,6 +44,10 @@ func SyncDirs(srcPath, destPath string) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// the keep file is used to keep the directory in a git repository, it doesn't need to be synced
|
||||||
|
// and go-git doesn't work with the ".keep" file (it would report errors like "ref is empty")
|
||||||
|
const keepFile = ".keep"
|
||||||
|
|
||||||
// find and delete all untracked files
|
// find and delete all untracked files
|
||||||
destFiles, err := util.ListDirRecursively(destPath, &util.ListDirOptions{IncludeDir: true})
|
destFiles, err := util.ListDirRecursively(destPath, &util.ListDirOptions{IncludeDir: true})
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -73,16 +55,20 @@ func SyncDirs(srcPath, destPath string) error {
|
|||||||
}
|
}
|
||||||
for _, destFile := range destFiles {
|
for _, destFile := range destFiles {
|
||||||
destFilePath := filepath.Join(destPath, destFile)
|
destFilePath := filepath.Join(destPath, destFile)
|
||||||
|
shouldRemove := filepath.Base(destFilePath) == keepFile
|
||||||
if _, err = os.Stat(filepath.Join(srcPath, destFile)); err != nil {
|
if _, err = os.Stat(filepath.Join(srcPath, destFile)); err != nil {
|
||||||
if os.IsNotExist(err) {
|
if os.IsNotExist(err) {
|
||||||
// if src file does not exist, remove dest file
|
shouldRemove = true
|
||||||
if err = os.RemoveAll(destFilePath); err != nil {
|
|
||||||
return err
|
|
||||||
}
|
|
||||||
} else {
|
} else {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
// if src file does not exist, remove dest file
|
||||||
|
if shouldRemove {
|
||||||
|
if err = os.RemoveAll(destFilePath); err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
// sync src files to dest
|
// sync src files to dest
|
||||||
@ -95,8 +81,8 @@ func SyncDirs(srcPath, destPath string) error {
|
|||||||
// util.ListDirRecursively appends a slash to the directory name
|
// util.ListDirRecursively appends a slash to the directory name
|
||||||
if strings.HasSuffix(srcFile, "/") {
|
if strings.HasSuffix(srcFile, "/") {
|
||||||
err = os.MkdirAll(destFilePath, os.ModePerm)
|
err = os.MkdirAll(destFilePath, os.ModePerm)
|
||||||
} else {
|
} else if filepath.Base(destFilePath) != keepFile {
|
||||||
err = Sync(filepath.Join(srcPath, srcFile), destFilePath)
|
err = SyncFile(filepath.Join(srcPath, srcFile), destFilePath)
|
||||||
}
|
}
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
|
@ -38,27 +38,30 @@ func GenerateRandomAvatar(ctx context.Context, u *User) error {
|
|||||||
|
|
||||||
u.Avatar = avatars.HashEmail(seed)
|
u.Avatar = avatars.HashEmail(seed)
|
||||||
|
|
||||||
// Don't share the images so that we can delete them easily
|
_, err = storage.Avatars.Stat(u.CustomAvatarRelativePath())
|
||||||
if err := storage.SaveFrom(storage.Avatars, u.CustomAvatarRelativePath(), func(w io.Writer) error {
|
if err != nil {
|
||||||
if err := png.Encode(w, img); err != nil {
|
// If unable to Stat the avatar file (usually it means non-existing), then try to save a new one
|
||||||
log.Error("Encode: %v", err)
|
// Don't share the images so that we can delete them easily
|
||||||
|
if err := storage.SaveFrom(storage.Avatars, u.CustomAvatarRelativePath(), func(w io.Writer) error {
|
||||||
|
if err := png.Encode(w, img); err != nil {
|
||||||
|
log.Error("Encode: %v", err)
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}); err != nil {
|
||||||
|
return fmt.Errorf("failed to save avatar %s: %w", u.CustomAvatarRelativePath(), err)
|
||||||
}
|
}
|
||||||
return err
|
|
||||||
}); err != nil {
|
|
||||||
return fmt.Errorf("Failed to create dir %s: %w", u.CustomAvatarRelativePath(), err)
|
|
||||||
}
|
}
|
||||||
|
|
||||||
if _, err := db.GetEngine(ctx).ID(u.ID).Cols("avatar").Update(u); err != nil {
|
if _, err := db.GetEngine(ctx).ID(u.ID).Cols("avatar").Update(u); err != nil {
|
||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
log.Info("New random avatar created: %d", u.ID)
|
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// AvatarLinkWithSize returns a link to the user's avatar with size. size <= 0 means default size
|
// AvatarLinkWithSize returns a link to the user's avatar with size. size <= 0 means default size
|
||||||
func (u *User) AvatarLinkWithSize(ctx context.Context, size int) string {
|
func (u *User) AvatarLinkWithSize(ctx context.Context, size int) string {
|
||||||
if u.IsGhost() {
|
if u.IsGhost() || u.IsGiteaActions() {
|
||||||
return avatars.DefaultAvatarLink()
|
return avatars.DefaultAvatarLink()
|
||||||
}
|
}
|
||||||
|
|
||||||
|
@ -4,13 +4,19 @@
|
|||||||
package user
|
package user
|
||||||
|
|
||||||
import (
|
import (
|
||||||
|
"context"
|
||||||
|
"io"
|
||||||
|
"strings"
|
||||||
"testing"
|
"testing"
|
||||||
|
|
||||||
"code.gitea.io/gitea/models/db"
|
"code.gitea.io/gitea/models/db"
|
||||||
|
"code.gitea.io/gitea/models/unittest"
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
"code.gitea.io/gitea/modules/storage"
|
||||||
"code.gitea.io/gitea/modules/test"
|
"code.gitea.io/gitea/modules/test"
|
||||||
|
|
||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
)
|
)
|
||||||
|
|
||||||
func TestUserAvatarLink(t *testing.T) {
|
func TestUserAvatarLink(t *testing.T) {
|
||||||
@ -26,3 +32,37 @@ func TestUserAvatarLink(t *testing.T) {
|
|||||||
link = u.AvatarLink(db.DefaultContext)
|
link = u.AvatarLink(db.DefaultContext)
|
||||||
assert.Equal(t, "https://localhost/sub-path/avatars/avatar.png", link)
|
assert.Equal(t, "https://localhost/sub-path/avatars/avatar.png", link)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func TestUserAvatarGenerate(t *testing.T) {
|
||||||
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
var err error
|
||||||
|
tmpDir := t.TempDir()
|
||||||
|
storage.Avatars, err = storage.NewLocalStorage(context.Background(), &setting.Storage{Path: tmpDir})
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
u := unittest.AssertExistsAndLoadBean(t, &User{ID: 2})
|
||||||
|
|
||||||
|
// there was no avatar, generate a new one
|
||||||
|
assert.Empty(t, u.Avatar)
|
||||||
|
err = GenerateRandomAvatar(db.DefaultContext, u)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.NotEmpty(t, u.Avatar)
|
||||||
|
|
||||||
|
// make sure the generated one exists
|
||||||
|
oldAvatarPath := u.CustomAvatarRelativePath()
|
||||||
|
_, err = storage.Avatars.Stat(u.CustomAvatarRelativePath())
|
||||||
|
require.NoError(t, err)
|
||||||
|
// and try to change its content
|
||||||
|
_, err = storage.Avatars.Save(u.CustomAvatarRelativePath(), strings.NewReader("abcd"), 4)
|
||||||
|
require.NoError(t, err)
|
||||||
|
|
||||||
|
// try to generate again
|
||||||
|
err = GenerateRandomAvatar(db.DefaultContext, u)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, oldAvatarPath, u.CustomAvatarRelativePath())
|
||||||
|
f, err := storage.Avatars.Open(u.CustomAvatarRelativePath())
|
||||||
|
require.NoError(t, err)
|
||||||
|
defer f.Close()
|
||||||
|
content, _ := io.ReadAll(f)
|
||||||
|
assert.Equal(t, "abcd", string(content))
|
||||||
|
}
|
||||||
|
@ -8,7 +8,6 @@ import (
|
|||||||
"context"
|
"context"
|
||||||
"fmt"
|
"fmt"
|
||||||
"net/mail"
|
"net/mail"
|
||||||
"regexp"
|
|
||||||
"strings"
|
"strings"
|
||||||
"time"
|
"time"
|
||||||
|
|
||||||
@ -153,8 +152,6 @@ func UpdateEmailAddress(ctx context.Context, email *EmailAddress) error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
var emailRegexp = regexp.MustCompile("^[a-zA-Z0-9.!#$%&'*+-/=?^_`{|}~]*@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$")
|
|
||||||
|
|
||||||
// ValidateEmail check if email is a valid & allowed address
|
// ValidateEmail check if email is a valid & allowed address
|
||||||
func ValidateEmail(email string) error {
|
func ValidateEmail(email string) error {
|
||||||
if err := validateEmailBasic(email); err != nil {
|
if err := validateEmailBasic(email); err != nil {
|
||||||
@ -514,7 +511,7 @@ func validateEmailBasic(email string) error {
|
|||||||
return ErrEmailInvalid{email}
|
return ErrEmailInvalid{email}
|
||||||
}
|
}
|
||||||
|
|
||||||
if !emailRegexp.MatchString(email) {
|
if !globalVars().emailRegexp.MatchString(email) {
|
||||||
return ErrEmailCharIsNotSupported{email}
|
return ErrEmailCharIsNotSupported{email}
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -545,3 +542,13 @@ func IsEmailDomainAllowed(email string) bool {
|
|||||||
|
|
||||||
return validation.IsEmailDomainListed(setting.Service.EmailDomainAllowList, email)
|
return validation.IsEmailDomainListed(setting.Service.EmailDomainAllowList, email)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func GetActivatedEmailAddresses(ctx context.Context, uid int64) ([]string, error) {
|
||||||
|
emails := make([]string, 0, 2)
|
||||||
|
if err := db.GetEngine(ctx).Table("email_address").Select("email").
|
||||||
|
Where("uid=? AND is_activated=?", uid, true).Asc("id").
|
||||||
|
Find(&emails); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
return emails, nil
|
||||||
|
}
|
||||||
|
@ -11,9 +11,6 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
)
|
)
|
||||||
|
|
||||||
// ErrOpenIDNotExist openid is not known
|
|
||||||
var ErrOpenIDNotExist = util.NewNotExistErrorf("OpenID is unknown")
|
|
||||||
|
|
||||||
// UserOpenID is the list of all OpenID identities of a user.
|
// UserOpenID is the list of all OpenID identities of a user.
|
||||||
// Since this is a middle table, name it OpenID is not suitable, so we ignore the lint here
|
// Since this is a middle table, name it OpenID is not suitable, so we ignore the lint here
|
||||||
type UserOpenID struct { //revive:disable-line:exported
|
type UserOpenID struct { //revive:disable-line:exported
|
||||||
@ -99,7 +96,7 @@ func DeleteUserOpenID(ctx context.Context, openid *UserOpenID) (err error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return err
|
return err
|
||||||
} else if deleted != 1 {
|
} else if deleted != 1 {
|
||||||
return ErrOpenIDNotExist
|
return util.NewNotExistErrorf("OpenID is unknown")
|
||||||
}
|
}
|
||||||
return nil
|
return nil
|
||||||
}
|
}
|
||||||
|
@ -14,6 +14,7 @@ import (
|
|||||||
"path/filepath"
|
"path/filepath"
|
||||||
"regexp"
|
"regexp"
|
||||||
"strings"
|
"strings"
|
||||||
|
"sync"
|
||||||
"time"
|
"time"
|
||||||
"unicode"
|
"unicode"
|
||||||
|
|
||||||
@ -213,7 +214,7 @@ func (u *User) GetPlaceholderEmail() string {
|
|||||||
return fmt.Sprintf("%s@%s", u.LowerName, setting.Service.NoReplyAddress)
|
return fmt.Sprintf("%s@%s", u.LowerName, setting.Service.NoReplyAddress)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetEmail returns an noreply email, if the user has set to keep his
|
// GetEmail returns a noreply email, if the user has set to keep his
|
||||||
// email address private, otherwise the primary email address.
|
// email address private, otherwise the primary email address.
|
||||||
func (u *User) GetEmail() string {
|
func (u *User) GetEmail() string {
|
||||||
if u.KeepEmailPrivate {
|
if u.KeepEmailPrivate {
|
||||||
@ -384,11 +385,12 @@ func (u *User) ValidatePassword(passwd string) bool {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// IsPasswordSet checks if the password is set or left empty
|
// IsPasswordSet checks if the password is set or left empty
|
||||||
|
// TODO: It's better to clarify the "password" behavior for different types (individual, bot)
|
||||||
func (u *User) IsPasswordSet() bool {
|
func (u *User) IsPasswordSet() bool {
|
||||||
return len(u.Passwd) != 0
|
return u.Passwd != ""
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsOrganization returns true if user is actually a organization.
|
// IsOrganization returns true if user is actually an organization.
|
||||||
func (u *User) IsOrganization() bool {
|
func (u *User) IsOrganization() bool {
|
||||||
return u.Type == UserTypeOrganization
|
return u.Type == UserTypeOrganization
|
||||||
}
|
}
|
||||||
@ -398,13 +400,14 @@ func (u *User) IsIndividual() bool {
|
|||||||
return u.Type == UserTypeIndividual
|
return u.Type == UserTypeIndividual
|
||||||
}
|
}
|
||||||
|
|
||||||
func (u *User) IsUser() bool {
|
// IsTypeBot returns whether the user is of type bot
|
||||||
return u.Type == UserTypeIndividual || u.Type == UserTypeBot
|
func (u *User) IsTypeBot() bool {
|
||||||
|
return u.Type == UserTypeBot
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsBot returns whether or not the user is of type bot
|
// IsTokenAccessAllowed returns whether the user is an individual or a bot (which allows for token access)
|
||||||
func (u *User) IsBot() bool {
|
func (u *User) IsTokenAccessAllowed() bool {
|
||||||
return u.Type == UserTypeBot
|
return u.Type == UserTypeIndividual || u.Type == UserTypeBot
|
||||||
}
|
}
|
||||||
|
|
||||||
// DisplayName returns full name if it's not empty,
|
// DisplayName returns full name if it's not empty,
|
||||||
@ -417,19 +420,9 @@ func (u *User) DisplayName() string {
|
|||||||
return u.Name
|
return u.Name
|
||||||
}
|
}
|
||||||
|
|
||||||
var emailToReplacer = strings.NewReplacer(
|
|
||||||
"\n", "",
|
|
||||||
"\r", "",
|
|
||||||
"<", "",
|
|
||||||
">", "",
|
|
||||||
",", "",
|
|
||||||
":", "",
|
|
||||||
";", "",
|
|
||||||
)
|
|
||||||
|
|
||||||
// EmailTo returns a string suitable to be put into a e-mail `To:` header.
|
// EmailTo returns a string suitable to be put into a e-mail `To:` header.
|
||||||
func (u *User) EmailTo() string {
|
func (u *User) EmailTo() string {
|
||||||
sanitizedDisplayName := emailToReplacer.Replace(u.DisplayName())
|
sanitizedDisplayName := globalVars().emailToReplacer.Replace(u.DisplayName())
|
||||||
|
|
||||||
// should be an edge case but nice to have
|
// should be an edge case but nice to have
|
||||||
if sanitizedDisplayName == u.Email {
|
if sanitizedDisplayName == u.Email {
|
||||||
@ -502,10 +495,10 @@ func (u *User) IsMailable() bool {
|
|||||||
return u.IsActive
|
return u.IsActive
|
||||||
}
|
}
|
||||||
|
|
||||||
// IsUserExist checks if given user name exist,
|
// IsUserExist checks if given username exist,
|
||||||
// the user name should be noncased unique.
|
// the username should be non-cased unique.
|
||||||
// If uid is presented, then check will rule out that one,
|
// If uid is presented, then check will rule out that one,
|
||||||
// it is used when update a user name in settings page.
|
// it is used when update a username in settings page.
|
||||||
func IsUserExist(ctx context.Context, uid int64, name string) (bool, error) {
|
func IsUserExist(ctx context.Context, uid int64, name string) (bool, error) {
|
||||||
if len(name) == 0 {
|
if len(name) == 0 {
|
||||||
return false, nil
|
return false, nil
|
||||||
@ -515,7 +508,7 @@ func IsUserExist(ctx context.Context, uid int64, name string) (bool, error) {
|
|||||||
Get(&User{LowerName: strings.ToLower(name)})
|
Get(&User{LowerName: strings.ToLower(name)})
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: As of the beginning of 2022, it is recommended to use at least
|
// SaltByteLength as of the beginning of 2022, it is recommended to use at least
|
||||||
// 64 bits of salt, but NIST is already recommending to use to 128 bits.
|
// 64 bits of salt, but NIST is already recommending to use to 128 bits.
|
||||||
// (16 bytes = 16 * 8 = 128 bits)
|
// (16 bytes = 16 * 8 = 128 bits)
|
||||||
const SaltByteLength = 16
|
const SaltByteLength = 16
|
||||||
@ -526,28 +519,58 @@ func GetUserSalt() (string, error) {
|
|||||||
if err != nil {
|
if err != nil {
|
||||||
return "", err
|
return "", err
|
||||||
}
|
}
|
||||||
// Returns a 32 bytes long string.
|
// Returns a 32-byte long string.
|
||||||
return hex.EncodeToString(rBytes), nil
|
return hex.EncodeToString(rBytes), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// Note: The set of characters here can safely expand without a breaking change,
|
type globalVarsStruct struct {
|
||||||
// but characters removed from this set can cause user account linking to break
|
customCharsReplacement *strings.Replacer
|
||||||
var (
|
removeCharsRE *regexp.Regexp
|
||||||
customCharsReplacement = strings.NewReplacer("Æ", "AE")
|
transformDiacritics transform.Transformer
|
||||||
removeCharsRE = regexp.MustCompile("['`´]")
|
replaceCharsHyphenRE *regexp.Regexp
|
||||||
transformDiacritics = transform.Chain(norm.NFD, runes.Remove(runes.In(unicode.Mn)), norm.NFC)
|
emailToReplacer *strings.Replacer
|
||||||
replaceCharsHyphenRE = regexp.MustCompile(`[\s~+]`)
|
emailRegexp *regexp.Regexp
|
||||||
)
|
systemUserNewFuncs map[int64]func() *User
|
||||||
|
}
|
||||||
|
|
||||||
|
var globalVars = sync.OnceValue(func() *globalVarsStruct {
|
||||||
|
return &globalVarsStruct{
|
||||||
|
// Note: The set of characters here can safely expand without a breaking change,
|
||||||
|
// but characters removed from this set can cause user account linking to break
|
||||||
|
customCharsReplacement: strings.NewReplacer("Æ", "AE"),
|
||||||
|
|
||||||
|
removeCharsRE: regexp.MustCompile("['`´]"),
|
||||||
|
transformDiacritics: transform.Chain(norm.NFD, runes.Remove(runes.In(unicode.Mn)), norm.NFC),
|
||||||
|
replaceCharsHyphenRE: regexp.MustCompile(`[\s~+]`),
|
||||||
|
|
||||||
|
emailToReplacer: strings.NewReplacer(
|
||||||
|
"\n", "",
|
||||||
|
"\r", "",
|
||||||
|
"<", "",
|
||||||
|
">", "",
|
||||||
|
",", "",
|
||||||
|
":", "",
|
||||||
|
";", "",
|
||||||
|
),
|
||||||
|
emailRegexp: regexp.MustCompile("^[a-zA-Z0-9.!#$%&'*+-/=?^_`{|}~]*@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$"),
|
||||||
|
|
||||||
|
systemUserNewFuncs: map[int64]func() *User{
|
||||||
|
GhostUserID: NewGhostUser,
|
||||||
|
ActionsUserID: NewActionsUser,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
})
|
||||||
|
|
||||||
// NormalizeUserName only takes the name part if it is an email address, transforms it diacritics to ASCII characters.
|
// NormalizeUserName only takes the name part if it is an email address, transforms it diacritics to ASCII characters.
|
||||||
// It returns a string with the single-quotes removed, and any other non-supported username characters are replaced with a `-` character
|
// It returns a string with the single-quotes removed, and any other non-supported username characters are replaced with a `-` character
|
||||||
func NormalizeUserName(s string) (string, error) {
|
func NormalizeUserName(s string) (string, error) {
|
||||||
|
vars := globalVars()
|
||||||
s, _, _ = strings.Cut(s, "@")
|
s, _, _ = strings.Cut(s, "@")
|
||||||
strDiacriticsRemoved, n, err := transform.String(transformDiacritics, customCharsReplacement.Replace(s))
|
strDiacriticsRemoved, n, err := transform.String(vars.transformDiacritics, vars.customCharsReplacement.Replace(s))
|
||||||
if err != nil {
|
if err != nil {
|
||||||
return "", fmt.Errorf("failed to normalize the string of provided username %q at position %d", s, n)
|
return "", fmt.Errorf("failed to normalize the string of provided username %q at position %d", s, n)
|
||||||
}
|
}
|
||||||
return replaceCharsHyphenRE.ReplaceAllLiteralString(removeCharsRE.ReplaceAllLiteralString(strDiacriticsRemoved, ""), "-"), nil
|
return vars.replaceCharsHyphenRE.ReplaceAllLiteralString(vars.removeCharsRE.ReplaceAllLiteralString(strDiacriticsRemoved, ""), "-"), nil
|
||||||
}
|
}
|
||||||
|
|
||||||
var (
|
var (
|
||||||
@ -963,30 +986,28 @@ func GetUserByIDs(ctx context.Context, ids []int64) ([]*User, error) {
|
|||||||
return users, err
|
return users, err
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetPossibleUserByID returns the user if id > 0 or return system usrs if id < 0
|
// GetPossibleUserByID returns the user if id > 0 or returns system user if id < 0
|
||||||
func GetPossibleUserByID(ctx context.Context, id int64) (*User, error) {
|
func GetPossibleUserByID(ctx context.Context, id int64) (*User, error) {
|
||||||
switch id {
|
if id < 0 {
|
||||||
case GhostUserID:
|
if newFunc, ok := globalVars().systemUserNewFuncs[id]; ok {
|
||||||
return NewGhostUser(), nil
|
return newFunc(), nil
|
||||||
case ActionsUserID:
|
}
|
||||||
return NewActionsUser(), nil
|
return nil, ErrUserNotExist{UID: id}
|
||||||
case 0:
|
} else if id == 0 {
|
||||||
return nil, ErrUserNotExist{}
|
return nil, ErrUserNotExist{}
|
||||||
default:
|
|
||||||
return GetUserByID(ctx, id)
|
|
||||||
}
|
}
|
||||||
|
return GetUserByID(ctx, id)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetPossibleUserByIDs returns the users if id > 0 or return system users if id < 0
|
// GetPossibleUserByIDs returns the users if id > 0 or returns system users if id < 0
|
||||||
func GetPossibleUserByIDs(ctx context.Context, ids []int64) ([]*User, error) {
|
func GetPossibleUserByIDs(ctx context.Context, ids []int64) ([]*User, error) {
|
||||||
uniqueIDs := container.SetOf(ids...)
|
uniqueIDs := container.SetOf(ids...)
|
||||||
users := make([]*User, 0, len(ids))
|
users := make([]*User, 0, len(ids))
|
||||||
_ = uniqueIDs.Remove(0)
|
_ = uniqueIDs.Remove(0)
|
||||||
if uniqueIDs.Remove(GhostUserID) {
|
for systemUID, newFunc := range globalVars().systemUserNewFuncs {
|
||||||
users = append(users, NewGhostUser())
|
if uniqueIDs.Remove(systemUID) {
|
||||||
}
|
users = append(users, newFunc())
|
||||||
if uniqueIDs.Remove(ActionsUserID) {
|
}
|
||||||
users = append(users, NewActionsUser())
|
|
||||||
}
|
}
|
||||||
res, err := GetUserByIDs(ctx, uniqueIDs.Values())
|
res, err := GetUserByIDs(ctx, uniqueIDs.Values())
|
||||||
if err != nil {
|
if err != nil {
|
||||||
@ -996,7 +1017,7 @@ func GetPossibleUserByIDs(ctx context.Context, ids []int64) ([]*User, error) {
|
|||||||
return users, nil
|
return users, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetUserByNameCtx returns user by given name.
|
// GetUserByName returns user by given name.
|
||||||
func GetUserByName(ctx context.Context, name string) (*User, error) {
|
func GetUserByName(ctx context.Context, name string) (*User, error) {
|
||||||
if len(name) == 0 {
|
if len(name) == 0 {
|
||||||
return nil, ErrUserNotExist{Name: name}
|
return nil, ErrUserNotExist{Name: name}
|
||||||
@ -1027,8 +1048,8 @@ func GetUserEmailsByNames(ctx context.Context, names []string) []string {
|
|||||||
return mails
|
return mails
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetMaileableUsersByIDs gets users from ids, but only if they can receive mails
|
// GetMailableUsersByIDs gets users from ids, but only if they can receive mails
|
||||||
func GetMaileableUsersByIDs(ctx context.Context, ids []int64, isMention bool) ([]*User, error) {
|
func GetMailableUsersByIDs(ctx context.Context, ids []int64, isMention bool) ([]*User, error) {
|
||||||
if len(ids) == 0 {
|
if len(ids) == 0 {
|
||||||
return nil, nil
|
return nil, nil
|
||||||
}
|
}
|
||||||
@ -1053,17 +1074,6 @@ func GetMaileableUsersByIDs(ctx context.Context, ids []int64, isMention bool) ([
|
|||||||
Find(&ous)
|
Find(&ous)
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetUserNamesByIDs returns usernames for all resolved users from a list of Ids.
|
|
||||||
func GetUserNamesByIDs(ctx context.Context, ids []int64) ([]string, error) {
|
|
||||||
unames := make([]string, 0, len(ids))
|
|
||||||
err := db.GetEngine(ctx).In("id", ids).
|
|
||||||
Table("user").
|
|
||||||
Asc("name").
|
|
||||||
Cols("name").
|
|
||||||
Find(&unames)
|
|
||||||
return unames, err
|
|
||||||
}
|
|
||||||
|
|
||||||
// GetUserNameByID returns username for the id
|
// GetUserNameByID returns username for the id
|
||||||
func GetUserNameByID(ctx context.Context, id int64) (string, error) {
|
func GetUserNameByID(ctx context.Context, id int64) (string, error) {
|
||||||
var name string
|
var name string
|
||||||
@ -1119,28 +1129,89 @@ func ValidateCommitWithEmail(ctx context.Context, c *git.Commit) *User {
|
|||||||
}
|
}
|
||||||
|
|
||||||
// ValidateCommitsWithEmails checks if authors' e-mails of commits are corresponding to users.
|
// ValidateCommitsWithEmails checks if authors' e-mails of commits are corresponding to users.
|
||||||
func ValidateCommitsWithEmails(ctx context.Context, oldCommits []*git.Commit) []*UserCommit {
|
func ValidateCommitsWithEmails(ctx context.Context, oldCommits []*git.Commit) ([]*UserCommit, error) {
|
||||||
var (
|
var (
|
||||||
emails = make(map[string]*User)
|
|
||||||
newCommits = make([]*UserCommit, 0, len(oldCommits))
|
newCommits = make([]*UserCommit, 0, len(oldCommits))
|
||||||
|
emailSet = make(container.Set[string])
|
||||||
)
|
)
|
||||||
for _, c := range oldCommits {
|
for _, c := range oldCommits {
|
||||||
var u *User
|
|
||||||
if c.Author != nil {
|
if c.Author != nil {
|
||||||
if v, ok := emails[c.Author.Email]; !ok {
|
emailSet.Add(c.Author.Email)
|
||||||
u, _ = GetUserByEmail(ctx, c.Author.Email)
|
}
|
||||||
emails[c.Author.Email] = u
|
}
|
||||||
} else {
|
|
||||||
u = v
|
emailUserMap, err := GetUsersByEmails(ctx, emailSet.Values())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
for _, c := range oldCommits {
|
||||||
|
user, ok := emailUserMap[c.Author.Email]
|
||||||
|
if !ok {
|
||||||
|
user = &User{
|
||||||
|
Name: c.Author.Name,
|
||||||
|
Email: c.Author.Email,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
newCommits = append(newCommits, &UserCommit{
|
newCommits = append(newCommits, &UserCommit{
|
||||||
User: u,
|
User: user,
|
||||||
Commit: c,
|
Commit: c,
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
return newCommits
|
return newCommits, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetUsersByEmails(ctx context.Context, emails []string) (map[string]*User, error) {
|
||||||
|
if len(emails) == 0 {
|
||||||
|
return nil, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
needCheckEmails := make(container.Set[string])
|
||||||
|
needCheckUserNames := make(container.Set[string])
|
||||||
|
for _, email := range emails {
|
||||||
|
if strings.HasSuffix(email, fmt.Sprintf("@%s", setting.Service.NoReplyAddress)) {
|
||||||
|
username := strings.TrimSuffix(email, fmt.Sprintf("@%s", setting.Service.NoReplyAddress))
|
||||||
|
needCheckUserNames.Add(username)
|
||||||
|
} else {
|
||||||
|
needCheckEmails.Add(strings.ToLower(email))
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
emailAddresses := make([]*EmailAddress, 0, len(needCheckEmails))
|
||||||
|
if err := db.GetEngine(ctx).In("lower_email", needCheckEmails.Values()).
|
||||||
|
And("is_activated=?", true).
|
||||||
|
Find(&emailAddresses); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
userIDs := make(container.Set[int64])
|
||||||
|
for _, email := range emailAddresses {
|
||||||
|
userIDs.Add(email.UID)
|
||||||
|
}
|
||||||
|
users, err := GetUsersMapByIDs(ctx, userIDs.Values())
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
|
||||||
|
results := make(map[string]*User, len(emails))
|
||||||
|
for _, email := range emailAddresses {
|
||||||
|
user := users[email.UID]
|
||||||
|
if user != nil {
|
||||||
|
if user.KeepEmailPrivate {
|
||||||
|
results[user.LowerName+"@"+setting.Service.NoReplyAddress] = user
|
||||||
|
} else {
|
||||||
|
results[email.Email] = user
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
users = make(map[int64]*User, len(needCheckUserNames))
|
||||||
|
if err := db.GetEngine(ctx).In("lower_name", needCheckUserNames.Values()).Find(&users); err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
for _, user := range users {
|
||||||
|
results[user.LowerName+"@"+setting.Service.NoReplyAddress] = user
|
||||||
|
}
|
||||||
|
return results, nil
|
||||||
}
|
}
|
||||||
|
|
||||||
// GetUserByEmail returns the user object by given e-mail if exists.
|
// GetUserByEmail returns the user object by given e-mail if exists.
|
||||||
|
47
models/user/user_list.go
Normal file
47
models/user/user_list.go
Normal file
@ -0,0 +1,47 @@
|
|||||||
|
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package user
|
||||||
|
|
||||||
|
import (
|
||||||
|
"context"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/db"
|
||||||
|
)
|
||||||
|
|
||||||
|
func GetUsersMapByIDs(ctx context.Context, userIDs []int64) (map[int64]*User, error) {
|
||||||
|
userMaps := make(map[int64]*User, len(userIDs))
|
||||||
|
left := len(userIDs)
|
||||||
|
for left > 0 {
|
||||||
|
limit := db.DefaultMaxInSize
|
||||||
|
if left < limit {
|
||||||
|
limit = left
|
||||||
|
}
|
||||||
|
err := db.GetEngine(ctx).
|
||||||
|
In("id", userIDs[:limit]).
|
||||||
|
Find(&userMaps)
|
||||||
|
if err != nil {
|
||||||
|
return nil, err
|
||||||
|
}
|
||||||
|
left -= limit
|
||||||
|
userIDs = userIDs[limit:]
|
||||||
|
}
|
||||||
|
return userMaps, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func GetPossibleUserFromMap(userID int64, usererMaps map[int64]*User) *User {
|
||||||
|
switch userID {
|
||||||
|
case GhostUserID:
|
||||||
|
return NewGhostUser()
|
||||||
|
case ActionsUserID:
|
||||||
|
return NewActionsUser()
|
||||||
|
case 0:
|
||||||
|
return nil
|
||||||
|
default:
|
||||||
|
user, ok := usererMaps[userID]
|
||||||
|
if !ok {
|
||||||
|
return NewGhostUser()
|
||||||
|
}
|
||||||
|
return user
|
||||||
|
}
|
||||||
|
}
|
@ -10,9 +10,8 @@ import (
|
|||||||
)
|
)
|
||||||
|
|
||||||
const (
|
const (
|
||||||
GhostUserID = -1
|
GhostUserID = -1
|
||||||
GhostUserName = "Ghost"
|
GhostUserName = "Ghost"
|
||||||
GhostUserLowerName = "ghost"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// NewGhostUser creates and returns a fake user for someone has deleted their account.
|
// NewGhostUser creates and returns a fake user for someone has deleted their account.
|
||||||
@ -20,10 +19,14 @@ func NewGhostUser() *User {
|
|||||||
return &User{
|
return &User{
|
||||||
ID: GhostUserID,
|
ID: GhostUserID,
|
||||||
Name: GhostUserName,
|
Name: GhostUserName,
|
||||||
LowerName: GhostUserLowerName,
|
LowerName: strings.ToLower(GhostUserName),
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func IsGhostUserName(name string) bool {
|
||||||
|
return strings.EqualFold(name, GhostUserName)
|
||||||
|
}
|
||||||
|
|
||||||
// IsGhost check if user is fake user for a deleted account
|
// IsGhost check if user is fake user for a deleted account
|
||||||
func (u *User) IsGhost() bool {
|
func (u *User) IsGhost() bool {
|
||||||
if u == nil {
|
if u == nil {
|
||||||
@ -32,22 +35,16 @@ func (u *User) IsGhost() bool {
|
|||||||
return u.ID == GhostUserID && u.Name == GhostUserName
|
return u.ID == GhostUserID && u.Name == GhostUserName
|
||||||
}
|
}
|
||||||
|
|
||||||
// NewReplaceUser creates and returns a fake user for external user
|
|
||||||
func NewReplaceUser(name string) *User {
|
|
||||||
return &User{
|
|
||||||
ID: 0,
|
|
||||||
Name: name,
|
|
||||||
LowerName: strings.ToLower(name),
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
const (
|
const (
|
||||||
ActionsUserID = -2
|
ActionsUserID = -2
|
||||||
ActionsUserName = "gitea-actions"
|
ActionsUserName = "gitea-actions"
|
||||||
ActionsFullName = "Gitea Actions"
|
ActionsUserEmail = "teabot@gitea.io"
|
||||||
ActionsEmail = "teabot@gitea.io"
|
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func IsGiteaActionsUserName(name string) bool {
|
||||||
|
return strings.EqualFold(name, ActionsUserName)
|
||||||
|
}
|
||||||
|
|
||||||
// NewActionsUser creates and returns a fake user for running the actions.
|
// NewActionsUser creates and returns a fake user for running the actions.
|
||||||
func NewActionsUser() *User {
|
func NewActionsUser() *User {
|
||||||
return &User{
|
return &User{
|
||||||
@ -55,16 +52,26 @@ func NewActionsUser() *User {
|
|||||||
Name: ActionsUserName,
|
Name: ActionsUserName,
|
||||||
LowerName: ActionsUserName,
|
LowerName: ActionsUserName,
|
||||||
IsActive: true,
|
IsActive: true,
|
||||||
FullName: ActionsFullName,
|
FullName: "Gitea Actions",
|
||||||
Email: ActionsEmail,
|
Email: ActionsUserEmail,
|
||||||
KeepEmailPrivate: true,
|
KeepEmailPrivate: true,
|
||||||
LoginName: ActionsUserName,
|
LoginName: ActionsUserName,
|
||||||
Type: UserTypeIndividual,
|
Type: UserTypeBot,
|
||||||
AllowCreateOrganization: true,
|
AllowCreateOrganization: true,
|
||||||
Visibility: structs.VisibleTypePublic,
|
Visibility: structs.VisibleTypePublic,
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
func (u *User) IsActions() bool {
|
func (u *User) IsGiteaActions() bool {
|
||||||
return u != nil && u.ID == ActionsUserID
|
return u != nil && u.ID == ActionsUserID
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func GetSystemUserByName(name string) *User {
|
||||||
|
if IsGhostUserName(name) {
|
||||||
|
return NewGhostUser()
|
||||||
|
}
|
||||||
|
if IsGiteaActionsUserName(name) {
|
||||||
|
return NewActionsUser()
|
||||||
|
}
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
32
models/user/user_system_test.go
Normal file
32
models/user/user_system_test.go
Normal file
@ -0,0 +1,32 @@
|
|||||||
|
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package user
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/db"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
"github.com/stretchr/testify/require"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestSystemUser(t *testing.T) {
|
||||||
|
u, err := GetPossibleUserByID(db.DefaultContext, -1)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "Ghost", u.Name)
|
||||||
|
assert.Equal(t, "ghost", u.LowerName)
|
||||||
|
assert.True(t, u.IsGhost())
|
||||||
|
assert.True(t, IsGhostUserName("gHost"))
|
||||||
|
|
||||||
|
u, err = GetPossibleUserByID(db.DefaultContext, -2)
|
||||||
|
require.NoError(t, err)
|
||||||
|
assert.Equal(t, "gitea-actions", u.Name)
|
||||||
|
assert.Equal(t, "gitea-actions", u.LowerName)
|
||||||
|
assert.True(t, u.IsGiteaActions())
|
||||||
|
assert.True(t, IsGiteaActionsUserName("Gitea-actionS"))
|
||||||
|
|
||||||
|
_, err = GetPossibleUserByID(db.DefaultContext, -3)
|
||||||
|
require.Error(t, err)
|
||||||
|
}
|
@ -25,6 +25,21 @@ import (
|
|||||||
"github.com/stretchr/testify/assert"
|
"github.com/stretchr/testify/assert"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
func TestIsUsableUsername(t *testing.T) {
|
||||||
|
assert.NoError(t, user_model.IsUsableUsername("a"))
|
||||||
|
assert.NoError(t, user_model.IsUsableUsername("foo.wiki"))
|
||||||
|
assert.NoError(t, user_model.IsUsableUsername("foo.git"))
|
||||||
|
|
||||||
|
assert.Error(t, user_model.IsUsableUsername("a--b"))
|
||||||
|
assert.Error(t, user_model.IsUsableUsername("-1_."))
|
||||||
|
assert.Error(t, user_model.IsUsableUsername(".profile"))
|
||||||
|
assert.Error(t, user_model.IsUsableUsername("-"))
|
||||||
|
assert.Error(t, user_model.IsUsableUsername("🌞"))
|
||||||
|
assert.Error(t, user_model.IsUsableUsername("the..repo"))
|
||||||
|
assert.Error(t, user_model.IsUsableUsername("foo.RSS"))
|
||||||
|
assert.Error(t, user_model.IsUsableUsername("foo.PnG"))
|
||||||
|
}
|
||||||
|
|
||||||
func TestOAuth2Application_LoadUser(t *testing.T) {
|
func TestOAuth2Application_LoadUser(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
app := unittest.AssertExistsAndLoadBean(t, &auth.OAuth2Application{ID: 1})
|
app := unittest.AssertExistsAndLoadBean(t, &auth.OAuth2Application{ID: 1})
|
||||||
@ -318,14 +333,14 @@ func TestGetUserIDsByNames(t *testing.T) {
|
|||||||
func TestGetMaileableUsersByIDs(t *testing.T) {
|
func TestGetMaileableUsersByIDs(t *testing.T) {
|
||||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
results, err := user_model.GetMaileableUsersByIDs(db.DefaultContext, []int64{1, 4}, false)
|
results, err := user_model.GetMailableUsersByIDs(db.DefaultContext, []int64{1, 4}, false)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, results, 1)
|
assert.Len(t, results, 1)
|
||||||
if len(results) > 1 {
|
if len(results) > 1 {
|
||||||
assert.Equal(t, 1, results[0].ID)
|
assert.Equal(t, 1, results[0].ID)
|
||||||
}
|
}
|
||||||
|
|
||||||
results, err = user_model.GetMaileableUsersByIDs(db.DefaultContext, []int64{1, 4}, true)
|
results, err = user_model.GetMailableUsersByIDs(db.DefaultContext, []int64{1, 4}, true)
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.Len(t, results, 2)
|
assert.Len(t, results, 2)
|
||||||
if len(results) > 2 {
|
if len(results) > 2 {
|
||||||
|
@ -167,186 +167,39 @@ func (w *Webhook) UpdateEvent() error {
|
|||||||
return err
|
return err
|
||||||
}
|
}
|
||||||
|
|
||||||
// HasCreateEvent returns true if hook enabled create event.
|
func (w *Webhook) HasEvent(evt webhook_module.HookEventType) bool {
|
||||||
func (w *Webhook) HasCreateEvent() bool {
|
if w.SendEverything {
|
||||||
return w.SendEverything ||
|
return true
|
||||||
(w.ChooseEvents && w.HookEvents.Create)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasDeleteEvent returns true if hook enabled delete event.
|
|
||||||
func (w *Webhook) HasDeleteEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.Delete)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasForkEvent returns true if hook enabled fork event.
|
|
||||||
func (w *Webhook) HasForkEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.Fork)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasIssuesEvent returns true if hook enabled issues event.
|
|
||||||
func (w *Webhook) HasIssuesEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.Issues)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasIssuesAssignEvent returns true if hook enabled issues assign event.
|
|
||||||
func (w *Webhook) HasIssuesAssignEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.IssueAssign)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasIssuesLabelEvent returns true if hook enabled issues label event.
|
|
||||||
func (w *Webhook) HasIssuesLabelEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.IssueLabel)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasIssuesMilestoneEvent returns true if hook enabled issues milestone event.
|
|
||||||
func (w *Webhook) HasIssuesMilestoneEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.IssueMilestone)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasIssueCommentEvent returns true if hook enabled issue_comment event.
|
|
||||||
func (w *Webhook) HasIssueCommentEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.IssueComment)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasPushEvent returns true if hook enabled push event.
|
|
||||||
func (w *Webhook) HasPushEvent() bool {
|
|
||||||
return w.PushOnly || w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.Push)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasPullRequestEvent returns true if hook enabled pull request event.
|
|
||||||
func (w *Webhook) HasPullRequestEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.PullRequest)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasPullRequestAssignEvent returns true if hook enabled pull request assign event.
|
|
||||||
func (w *Webhook) HasPullRequestAssignEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.PullRequestAssign)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasPullRequestLabelEvent returns true if hook enabled pull request label event.
|
|
||||||
func (w *Webhook) HasPullRequestLabelEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.PullRequestLabel)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasPullRequestMilestoneEvent returns true if hook enabled pull request milestone event.
|
|
||||||
func (w *Webhook) HasPullRequestMilestoneEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.PullRequestMilestone)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasPullRequestCommentEvent returns true if hook enabled pull_request_comment event.
|
|
||||||
func (w *Webhook) HasPullRequestCommentEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.PullRequestComment)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasPullRequestApprovedEvent returns true if hook enabled pull request review event.
|
|
||||||
func (w *Webhook) HasPullRequestApprovedEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.PullRequestReview)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasPullRequestRejectedEvent returns true if hook enabled pull request review event.
|
|
||||||
func (w *Webhook) HasPullRequestRejectedEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.PullRequestReview)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasPullRequestReviewCommentEvent returns true if hook enabled pull request review event.
|
|
||||||
func (w *Webhook) HasPullRequestReviewCommentEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.PullRequestReview)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasPullRequestSyncEvent returns true if hook enabled pull request sync event.
|
|
||||||
func (w *Webhook) HasPullRequestSyncEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.PullRequestSync)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasWikiEvent returns true if hook enabled wiki event.
|
|
||||||
func (w *Webhook) HasWikiEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvent.Wiki)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasReleaseEvent returns if hook enabled release event.
|
|
||||||
func (w *Webhook) HasReleaseEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.Release)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasRepositoryEvent returns if hook enabled repository event.
|
|
||||||
func (w *Webhook) HasRepositoryEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.Repository)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasPackageEvent returns if hook enabled package event.
|
|
||||||
func (w *Webhook) HasPackageEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.Package)
|
|
||||||
}
|
|
||||||
|
|
||||||
// HasPullRequestReviewRequestEvent returns true if hook enabled pull request review request event.
|
|
||||||
func (w *Webhook) HasPullRequestReviewRequestEvent() bool {
|
|
||||||
return w.SendEverything ||
|
|
||||||
(w.ChooseEvents && w.HookEvents.PullRequestReviewRequest)
|
|
||||||
}
|
|
||||||
|
|
||||||
// EventCheckers returns event checkers
|
|
||||||
func (w *Webhook) EventCheckers() []struct {
|
|
||||||
Has func() bool
|
|
||||||
Type webhook_module.HookEventType
|
|
||||||
} {
|
|
||||||
return []struct {
|
|
||||||
Has func() bool
|
|
||||||
Type webhook_module.HookEventType
|
|
||||||
}{
|
|
||||||
{w.HasCreateEvent, webhook_module.HookEventCreate},
|
|
||||||
{w.HasDeleteEvent, webhook_module.HookEventDelete},
|
|
||||||
{w.HasForkEvent, webhook_module.HookEventFork},
|
|
||||||
{w.HasPushEvent, webhook_module.HookEventPush},
|
|
||||||
{w.HasIssuesEvent, webhook_module.HookEventIssues},
|
|
||||||
{w.HasIssuesAssignEvent, webhook_module.HookEventIssueAssign},
|
|
||||||
{w.HasIssuesLabelEvent, webhook_module.HookEventIssueLabel},
|
|
||||||
{w.HasIssuesMilestoneEvent, webhook_module.HookEventIssueMilestone},
|
|
||||||
{w.HasIssueCommentEvent, webhook_module.HookEventIssueComment},
|
|
||||||
{w.HasPullRequestEvent, webhook_module.HookEventPullRequest},
|
|
||||||
{w.HasPullRequestAssignEvent, webhook_module.HookEventPullRequestAssign},
|
|
||||||
{w.HasPullRequestLabelEvent, webhook_module.HookEventPullRequestLabel},
|
|
||||||
{w.HasPullRequestMilestoneEvent, webhook_module.HookEventPullRequestMilestone},
|
|
||||||
{w.HasPullRequestCommentEvent, webhook_module.HookEventPullRequestComment},
|
|
||||||
{w.HasPullRequestApprovedEvent, webhook_module.HookEventPullRequestReviewApproved},
|
|
||||||
{w.HasPullRequestRejectedEvent, webhook_module.HookEventPullRequestReviewRejected},
|
|
||||||
{w.HasPullRequestCommentEvent, webhook_module.HookEventPullRequestReviewComment},
|
|
||||||
{w.HasPullRequestSyncEvent, webhook_module.HookEventPullRequestSync},
|
|
||||||
{w.HasWikiEvent, webhook_module.HookEventWiki},
|
|
||||||
{w.HasRepositoryEvent, webhook_module.HookEventRepository},
|
|
||||||
{w.HasReleaseEvent, webhook_module.HookEventRelease},
|
|
||||||
{w.HasPackageEvent, webhook_module.HookEventPackage},
|
|
||||||
{w.HasPullRequestReviewRequestEvent, webhook_module.HookEventPullRequestReviewRequest},
|
|
||||||
}
|
}
|
||||||
|
if w.PushOnly {
|
||||||
|
return evt == webhook_module.HookEventPush
|
||||||
|
}
|
||||||
|
checkEvt := evt
|
||||||
|
switch evt {
|
||||||
|
case webhook_module.HookEventPullRequestReviewApproved, webhook_module.HookEventPullRequestReviewRejected, webhook_module.HookEventPullRequestReviewComment:
|
||||||
|
checkEvt = webhook_module.HookEventPullRequestReview
|
||||||
|
}
|
||||||
|
return w.HookEvents[checkEvt]
|
||||||
}
|
}
|
||||||
|
|
||||||
// EventsArray returns an array of hook events
|
// EventsArray returns an array of hook events
|
||||||
func (w *Webhook) EventsArray() []string {
|
func (w *Webhook) EventsArray() []string {
|
||||||
events := make([]string, 0, 7)
|
if w.SendEverything {
|
||||||
|
events := make([]string, 0, len(webhook_module.AllEvents()))
|
||||||
|
for _, evt := range webhook_module.AllEvents() {
|
||||||
|
events = append(events, string(evt))
|
||||||
|
}
|
||||||
|
return events
|
||||||
|
}
|
||||||
|
|
||||||
for _, c := range w.EventCheckers() {
|
if w.PushOnly {
|
||||||
if c.Has() {
|
return []string{string(webhook_module.HookEventPush)}
|
||||||
events = append(events, string(c.Type))
|
}
|
||||||
|
|
||||||
|
events := make([]string, 0, len(w.HookEvents))
|
||||||
|
for event, enabled := range w.HookEvents {
|
||||||
|
if enabled {
|
||||||
|
events = append(events, string(event))
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
return events
|
return events
|
||||||
|
@ -11,6 +11,19 @@ import (
|
|||||||
"code.gitea.io/gitea/modules/optional"
|
"code.gitea.io/gitea/modules/optional"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// GetSystemOrDefaultWebhooks returns webhooks by given argument or all if argument is missing.
|
||||||
|
func GetSystemOrDefaultWebhooks(ctx context.Context, isSystemWebhook optional.Option[bool]) ([]*Webhook, error) {
|
||||||
|
webhooks := make([]*Webhook, 0, 5)
|
||||||
|
if !isSystemWebhook.Has() {
|
||||||
|
return webhooks, db.GetEngine(ctx).Where("repo_id=? AND owner_id=?", 0, 0).
|
||||||
|
Find(&webhooks)
|
||||||
|
}
|
||||||
|
|
||||||
|
return webhooks, db.GetEngine(ctx).
|
||||||
|
Where("repo_id=? AND owner_id=? AND is_system_webhook=?", 0, 0, isSystemWebhook.Value()).
|
||||||
|
Find(&webhooks)
|
||||||
|
}
|
||||||
|
|
||||||
// GetDefaultWebhooks returns all admin-default webhooks.
|
// GetDefaultWebhooks returns all admin-default webhooks.
|
||||||
func GetDefaultWebhooks(ctx context.Context) ([]*Webhook, error) {
|
func GetDefaultWebhooks(ctx context.Context) ([]*Webhook, error) {
|
||||||
webhooks := make([]*Webhook, 0, 5)
|
webhooks := make([]*Webhook, 0, 5)
|
||||||
|
37
models/webhook/webhook_system_test.go
Normal file
37
models/webhook/webhook_system_test.go
Normal file
@ -0,0 +1,37 @@
|
|||||||
|
// Copyright 2017 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package webhook
|
||||||
|
|
||||||
|
import (
|
||||||
|
"testing"
|
||||||
|
|
||||||
|
"code.gitea.io/gitea/models/db"
|
||||||
|
"code.gitea.io/gitea/models/unittest"
|
||||||
|
"code.gitea.io/gitea/modules/optional"
|
||||||
|
|
||||||
|
"github.com/stretchr/testify/assert"
|
||||||
|
)
|
||||||
|
|
||||||
|
func TestGetSystemOrDefaultWebhooks(t *testing.T) {
|
||||||
|
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||||
|
|
||||||
|
hooks, err := GetSystemOrDefaultWebhooks(db.DefaultContext, optional.None[bool]())
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if assert.Len(t, hooks, 2) {
|
||||||
|
assert.Equal(t, int64(5), hooks[0].ID)
|
||||||
|
assert.Equal(t, int64(6), hooks[1].ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
hooks, err = GetSystemOrDefaultWebhooks(db.DefaultContext, optional.Some(true))
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if assert.Len(t, hooks, 1) {
|
||||||
|
assert.Equal(t, int64(5), hooks[0].ID)
|
||||||
|
}
|
||||||
|
|
||||||
|
hooks, err = GetSystemOrDefaultWebhooks(db.DefaultContext, optional.Some(false))
|
||||||
|
assert.NoError(t, err)
|
||||||
|
if assert.Len(t, hooks, 1) {
|
||||||
|
assert.Equal(t, int64(6), hooks[0].ID)
|
||||||
|
}
|
||||||
|
}
|
@ -54,9 +54,9 @@ func TestWebhook_UpdateEvent(t *testing.T) {
|
|||||||
SendEverything: false,
|
SendEverything: false,
|
||||||
ChooseEvents: false,
|
ChooseEvents: false,
|
||||||
HookEvents: webhook_module.HookEvents{
|
HookEvents: webhook_module.HookEvents{
|
||||||
Create: false,
|
webhook_module.HookEventCreate: false,
|
||||||
Push: true,
|
webhook_module.HookEventPush: true,
|
||||||
PullRequest: false,
|
webhook_module.HookEventPullRequest: false,
|
||||||
},
|
},
|
||||||
}
|
}
|
||||||
webhook.HookEvent = hookEvent
|
webhook.HookEvent = hookEvent
|
||||||
@ -68,13 +68,13 @@ func TestWebhook_UpdateEvent(t *testing.T) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
func TestWebhook_EventsArray(t *testing.T) {
|
func TestWebhook_EventsArray(t *testing.T) {
|
||||||
assert.Equal(t, []string{
|
assert.EqualValues(t, []string{
|
||||||
"create", "delete", "fork", "push",
|
"create", "delete", "fork", "push",
|
||||||
"issues", "issue_assign", "issue_label", "issue_milestone", "issue_comment",
|
"issues", "issue_assign", "issue_label", "issue_milestone", "issue_comment",
|
||||||
"pull_request", "pull_request_assign", "pull_request_label", "pull_request_milestone",
|
"pull_request", "pull_request_assign", "pull_request_label", "pull_request_milestone",
|
||||||
"pull_request_comment", "pull_request_review_approved", "pull_request_review_rejected",
|
"pull_request_comment", "pull_request_review_approved", "pull_request_review_rejected",
|
||||||
"pull_request_review_comment", "pull_request_sync", "wiki", "repository", "release",
|
"pull_request_review_comment", "pull_request_sync", "pull_request_review_request", "wiki", "repository", "release",
|
||||||
"package", "pull_request_review_request",
|
"package", "status",
|
||||||
},
|
},
|
||||||
(&Webhook{
|
(&Webhook{
|
||||||
HookEvent: &webhook_module.HookEvent{SendEverything: true},
|
HookEvent: &webhook_module.HookEvent{SendEverything: true},
|
||||||
|
48
modules/actions/artifacts.go
Normal file
48
modules/actions/artifacts.go
Normal file
@ -0,0 +1,48 @@
|
|||||||
|
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||||
|
// SPDX-License-Identifier: MIT
|
||||||
|
|
||||||
|
package actions
|
||||||
|
|
||||||
|
import (
|
||||||
|
"net/http"
|
||||||
|
|
||||||
|
actions_model "code.gitea.io/gitea/models/actions"
|
||||||
|
"code.gitea.io/gitea/modules/setting"
|
||||||
|
"code.gitea.io/gitea/modules/storage"
|
||||||
|
"code.gitea.io/gitea/services/context"
|
||||||
|
)
|
||||||
|
|
||||||
|
// Artifacts using the v4 backend are stored as a single combined zip file per artifact on the backend
|
||||||
|
// The v4 backend ensures ContentEncoding is set to "application/zip", which is not the case for the old backend
|
||||||
|
func IsArtifactV4(art *actions_model.ActionArtifact) bool {
|
||||||
|
return art.ArtifactName+".zip" == art.ArtifactPath && art.ContentEncoding == "application/zip"
|
||||||
|
}
|
||||||
|
|
||||||
|
func DownloadArtifactV4ServeDirectOnly(ctx *context.Base, art *actions_model.ActionArtifact) (bool, error) {
|
||||||
|
if setting.Actions.ArtifactStorage.ServeDirect() {
|
||||||
|
u, err := storage.ActionsArtifacts.URL(art.StoragePath, art.ArtifactPath, nil)
|
||||||
|
if u != nil && err == nil {
|
||||||
|
ctx.Redirect(u.String(), http.StatusFound)
|
||||||
|
return true, nil
|
||||||
|
}
|
||||||
|
}
|
||||||
|
return false, nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func DownloadArtifactV4Fallback(ctx *context.Base, art *actions_model.ActionArtifact) error {
|
||||||
|
f, err := storage.ActionsArtifacts.Open(art.StoragePath)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
defer f.Close()
|
||||||
|
http.ServeContent(ctx.Resp, ctx.Req, art.ArtifactName+".zip", art.CreatedUnix.AsLocalTime(), f)
|
||||||
|
return nil
|
||||||
|
}
|
||||||
|
|
||||||
|
func DownloadArtifactV4(ctx *context.Base, art *actions_model.ActionArtifact) error {
|
||||||
|
ok, err := DownloadArtifactV4ServeDirectOnly(ctx, art)
|
||||||
|
if ok || err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
|
return DownloadArtifactV4Fallback(ctx, art)
|
||||||
|
}
|
@ -18,7 +18,6 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/git"
|
"code.gitea.io/gitea/modules/git"
|
||||||
"code.gitea.io/gitea/modules/log"
|
|
||||||
"code.gitea.io/gitea/modules/setting"
|
"code.gitea.io/gitea/modules/setting"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
|
|
||||||
@ -64,10 +63,7 @@ func VerifyTimeLimitCode(now time.Time, data string, minutes int, code string) b
|
|||||||
// check code
|
// check code
|
||||||
retCode := CreateTimeLimitCode(data, aliveTime, startTimeStr, nil)
|
retCode := CreateTimeLimitCode(data, aliveTime, startTimeStr, nil)
|
||||||
if subtle.ConstantTimeCompare([]byte(retCode), []byte(code)) != 1 {
|
if subtle.ConstantTimeCompare([]byte(retCode), []byte(code)) != 1 {
|
||||||
retCode = CreateTimeLimitCode(data, aliveTime, startTimeStr, sha1.New()) // TODO: this is only for the support of legacy codes, remove this in/after 1.23
|
return false
|
||||||
if subtle.ConstantTimeCompare([]byte(retCode), []byte(code)) != 1 {
|
|
||||||
return false
|
|
||||||
}
|
|
||||||
}
|
}
|
||||||
|
|
||||||
// check time is expired or not: startTime <= now && now < startTime + minutes
|
// check time is expired or not: startTime <= now && now < startTime + minutes
|
||||||
@ -144,13 +140,12 @@ func Int64sToStrings(ints []int64) []string {
|
|||||||
return strs
|
return strs
|
||||||
}
|
}
|
||||||
|
|
||||||
// EntryIcon returns the octicon class for displaying files/directories
|
// EntryIcon returns the octicon name for displaying files/directories
|
||||||
func EntryIcon(entry *git.TreeEntry) string {
|
func EntryIcon(entry *git.TreeEntry) string {
|
||||||
switch {
|
switch {
|
||||||
case entry.IsLink():
|
case entry.IsLink():
|
||||||
te, err := entry.FollowLink()
|
te, err := entry.FollowLink()
|
||||||
if err != nil {
|
if err != nil {
|
||||||
log.Debug(err.Error())
|
|
||||||
return "file-symlink-file"
|
return "file-symlink-file"
|
||||||
}
|
}
|
||||||
if te.IsDir() {
|
if te.IsDir() {
|
||||||
|
@ -86,13 +86,10 @@ JWT_SECRET = %s
|
|||||||
verifyDataCode := func(c string) bool {
|
verifyDataCode := func(c string) bool {
|
||||||
return VerifyTimeLimitCode(now, "data", 2, c)
|
return VerifyTimeLimitCode(now, "data", 2, c)
|
||||||
}
|
}
|
||||||
code1 := CreateTimeLimitCode("data", 2, now, sha1.New())
|
code := CreateTimeLimitCode("data", 2, now, nil)
|
||||||
code2 := CreateTimeLimitCode("data", 2, now, nil)
|
assert.True(t, verifyDataCode(code))
|
||||||
assert.True(t, verifyDataCode(code1))
|
|
||||||
assert.True(t, verifyDataCode(code2))
|
|
||||||
initGeneralSecret("000_QLUd4fYVyxetjxC4eZkrBgWM2SndOOWDNtgUUko")
|
initGeneralSecret("000_QLUd4fYVyxetjxC4eZkrBgWM2SndOOWDNtgUUko")
|
||||||
assert.False(t, verifyDataCode(code1))
|
assert.False(t, verifyDataCode(code))
|
||||||
assert.False(t, verifyDataCode(code2))
|
|
||||||
})
|
})
|
||||||
}
|
}
|
||||||
|
|
||||||
@ -137,5 +134,3 @@ func TestInt64sToStrings(t *testing.T) {
|
|||||||
Int64sToStrings([]int64{1, 4, 16, 64, 256}),
|
Int64sToStrings([]int64{1, 4, 16, 64, 256}),
|
||||||
)
|
)
|
||||||
}
|
}
|
||||||
|
|
||||||
// TODO: Test EntryIcon
|
|
||||||
|
9
modules/cache/cache.go
vendored
9
modules/cache/cache.go
vendored
@ -37,10 +37,15 @@ func Init() error {
|
|||||||
}
|
}
|
||||||
|
|
||||||
const (
|
const (
|
||||||
testCacheKey = "DefaultCache.TestKey"
|
testCacheKey = "DefaultCache.TestKey"
|
||||||
SlowCacheThreshold = 100 * time.Microsecond
|
// SlowCacheThreshold marks cache tests as slow
|
||||||
|
// set to 30ms per discussion: https://github.com/go-gitea/gitea/issues/33190
|
||||||
|
// TODO: Replace with metrics histogram
|
||||||
|
SlowCacheThreshold = 30 * time.Millisecond
|
||||||
)
|
)
|
||||||
|
|
||||||
|
// Test performs delete, put and get operations on a predefined key
|
||||||
|
// returns
|
||||||
func Test() (time.Duration, error) {
|
func Test() (time.Duration, error) {
|
||||||
if defaultCache == nil {
|
if defaultCache == nil {
|
||||||
return 0, fmt.Errorf("default cache not initialized")
|
return 0, fmt.Errorf("default cache not initialized")
|
||||||
|
3
modules/cache/cache_test.go
vendored
3
modules/cache/cache_test.go
vendored
@ -43,7 +43,8 @@ func TestTest(t *testing.T) {
|
|||||||
elapsed, err := Test()
|
elapsed, err := Test()
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
// mem cache should take from 300ns up to 1ms on modern hardware ...
|
// mem cache should take from 300ns up to 1ms on modern hardware ...
|
||||||
assert.Less(t, elapsed, time.Millisecond)
|
assert.Positive(t, elapsed)
|
||||||
|
assert.Less(t, elapsed, SlowCacheThreshold)
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestGetCache(t *testing.T) {
|
func TestGetCache(t *testing.T) {
|
||||||
|
@ -18,6 +18,7 @@ import (
|
|||||||
"time"
|
"time"
|
||||||
|
|
||||||
"code.gitea.io/gitea/modules/git/internal" //nolint:depguard // only this file can use the internal type CmdArg, other files and packages should use AddXxx functions
|
"code.gitea.io/gitea/modules/git/internal" //nolint:depguard // only this file can use the internal type CmdArg, other files and packages should use AddXxx functions
|
||||||
|
"code.gitea.io/gitea/modules/gtprof"
|
||||||
"code.gitea.io/gitea/modules/log"
|
"code.gitea.io/gitea/modules/log"
|
||||||
"code.gitea.io/gitea/modules/process"
|
"code.gitea.io/gitea/modules/process"
|
||||||
"code.gitea.io/gitea/modules/util"
|
"code.gitea.io/gitea/modules/util"
|
||||||
@ -54,7 +55,7 @@ func logArgSanitize(arg string) string {
|
|||||||
} else if filepath.IsAbs(arg) {
|
} else if filepath.IsAbs(arg) {
|
||||||
base := filepath.Base(arg)
|
base := filepath.Base(arg)
|
||||||
dir := filepath.Dir(arg)
|
dir := filepath.Dir(arg)
|
||||||
return filepath.Join(filepath.Base(dir), base)
|
return ".../" + filepath.Join(filepath.Base(dir), base)
|
||||||
}
|
}
|
||||||
return arg
|
return arg
|
||||||
}
|
}
|
||||||
@ -295,15 +296,20 @@ func (c *Command) run(skip int, opts *RunOpts) error {
|
|||||||
timeout = defaultCommandExecutionTimeout
|
timeout = defaultCommandExecutionTimeout
|
||||||
}
|
}
|
||||||
|
|
||||||
var desc string
|
cmdLogString := c.LogString()
|
||||||
callerInfo := util.CallerFuncName(1 /* util */ + 1 /* this */ + skip /* parent */)
|
callerInfo := util.CallerFuncName(1 /* util */ + 1 /* this */ + skip /* parent */)
|
||||||
if pos := strings.LastIndex(callerInfo, "/"); pos >= 0 {
|
if pos := strings.LastIndex(callerInfo, "/"); pos >= 0 {
|
||||||
callerInfo = callerInfo[pos+1:]
|
callerInfo = callerInfo[pos+1:]
|
||||||
}
|
}
|
||||||
// these logs are for debugging purposes only, so no guarantee of correctness or stability
|
// these logs are for debugging purposes only, so no guarantee of correctness or stability
|
||||||
desc = fmt.Sprintf("git.Run(by:%s, repo:%s): %s", callerInfo, logArgSanitize(opts.Dir), c.LogString())
|
desc := fmt.Sprintf("git.Run(by:%s, repo:%s): %s", callerInfo, logArgSanitize(opts.Dir), cmdLogString)
|
||||||
log.Debug("git.Command: %s", desc)
|
log.Debug("git.Command: %s", desc)
|
||||||
|
|
||||||
|
_, span := gtprof.GetTracer().Start(c.parentContext, gtprof.TraceSpanGitRun)
|
||||||
|
defer span.End()
|
||||||
|
span.SetAttributeString(gtprof.TraceAttrFuncCaller, callerInfo)
|
||||||
|
span.SetAttributeString(gtprof.TraceAttrGitCommand, cmdLogString)
|
||||||
|
|
||||||
var ctx context.Context
|
var ctx context.Context
|
||||||
var cancel context.CancelFunc
|
var cancel context.CancelFunc
|
||||||
var finished context.CancelFunc
|
var finished context.CancelFunc
|
||||||
|
@ -58,5 +58,5 @@ func TestCommandString(t *testing.T) {
|
|||||||
assert.EqualValues(t, cmd.prog+` a "-m msg" "it's a test" "say \"hello\""`, cmd.LogString())
|
assert.EqualValues(t, cmd.prog+` a "-m msg" "it's a test" "say \"hello\""`, cmd.LogString())
|
||||||
|
|
||||||
cmd = NewCommandContextNoGlobals(context.Background(), "url: https://a:b@c/", "/root/dir-a/dir-b")
|
cmd = NewCommandContextNoGlobals(context.Background(), "url: https://a:b@c/", "/root/dir-a/dir-b")
|
||||||
assert.EqualValues(t, cmd.prog+` "url: https://sanitized-credential@c/" dir-a/dir-b`, cmd.LogString())
|
assert.EqualValues(t, cmd.prog+` "url: https://sanitized-credential@c/" .../dir-a/dir-b`, cmd.LogString())
|
||||||
}
|
}
|
||||||
|
@ -476,8 +476,12 @@ func (c *Commit) GetRepositoryDefaultPublicGPGKey(forceUpdate bool) (*GPGSetting
|
|||||||
}
|
}
|
||||||
|
|
||||||
func IsStringLikelyCommitID(objFmt ObjectFormat, s string, minLength ...int) bool {
|
func IsStringLikelyCommitID(objFmt ObjectFormat, s string, minLength ...int) bool {
|
||||||
minLen := util.OptionalArg(minLength, objFmt.FullLength())
|
maxLen := 64 // sha256
|
||||||
if len(s) < minLen || len(s) > objFmt.FullLength() {
|
if objFmt != nil {
|
||||||
|
maxLen = objFmt.FullLength()
|
||||||
|
}
|
||||||
|
minLen := util.OptionalArg(minLength, maxLen)
|
||||||
|
if len(s) < minLen || len(s) > maxLen {
|
||||||
return false
|
return false
|
||||||
}
|
}
|
||||||
for _, c := range s {
|
for _, c := range s {
|
||||||
|
@ -46,9 +46,9 @@ func (sf *CommitSubmoduleFile) SubmoduleWebLink(ctx context.Context, optCommitID
|
|||||||
if len(optCommitID) == 2 {
|
if len(optCommitID) == 2 {
|
||||||
commitLink = sf.repoLink + "/compare/" + optCommitID[0] + "..." + optCommitID[1]
|
commitLink = sf.repoLink + "/compare/" + optCommitID[0] + "..." + optCommitID[1]
|
||||||
} else if len(optCommitID) == 1 {
|
} else if len(optCommitID) == 1 {
|
||||||
commitLink = sf.repoLink + "/commit/" + optCommitID[0]
|
commitLink = sf.repoLink + "/tree/" + optCommitID[0]
|
||||||
} else {
|
} else {
|
||||||
commitLink = sf.repoLink + "/commit/" + sf.refID
|
commitLink = sf.repoLink + "/tree/" + sf.refID
|
||||||
}
|
}
|
||||||
return &SubmoduleWebLink{RepoWebLink: sf.repoLink, CommitWebLink: commitLink}
|
return &SubmoduleWebLink{RepoWebLink: sf.repoLink, CommitWebLink: commitLink}
|
||||||
}
|
}
|
||||||
|
@ -15,11 +15,11 @@ func TestCommitSubmoduleLink(t *testing.T) {
|
|||||||
|
|
||||||
wl := sf.SubmoduleWebLink(context.Background())
|
wl := sf.SubmoduleWebLink(context.Background())
|
||||||
assert.Equal(t, "https://github.com/user/repo", wl.RepoWebLink)
|
assert.Equal(t, "https://github.com/user/repo", wl.RepoWebLink)
|
||||||
assert.Equal(t, "https://github.com/user/repo/commit/aaaa", wl.CommitWebLink)
|
assert.Equal(t, "https://github.com/user/repo/tree/aaaa", wl.CommitWebLink)
|
||||||
|
|
||||||
wl = sf.SubmoduleWebLink(context.Background(), "1111")
|
wl = sf.SubmoduleWebLink(context.Background(), "1111")
|
||||||
assert.Equal(t, "https://github.com/user/repo", wl.RepoWebLink)
|
assert.Equal(t, "https://github.com/user/repo", wl.RepoWebLink)
|
||||||
assert.Equal(t, "https://github.com/user/repo/commit/1111", wl.CommitWebLink)
|
assert.Equal(t, "https://github.com/user/repo/tree/1111", wl.CommitWebLink)
|
||||||
|
|
||||||
wl = sf.SubmoduleWebLink(context.Background(), "1111", "2222")
|
wl = sf.SubmoduleWebLink(context.Background(), "1111", "2222")
|
||||||
assert.Equal(t, "https://github.com/user/repo", wl.RepoWebLink)
|
assert.Equal(t, "https://github.com/user/repo", wl.RepoWebLink)
|
||||||
|
@ -357,5 +357,5 @@ func Test_GetCommitBranchStart(t *testing.T) {
|
|||||||
startCommitID, err := repo.GetCommitBranchStart(os.Environ(), "branch1", commit.ID.String())
|
startCommitID, err := repo.GetCommitBranchStart(os.Environ(), "branch1", commit.ID.String())
|
||||||
assert.NoError(t, err)
|
assert.NoError(t, err)
|
||||||
assert.NotEmpty(t, startCommitID)
|
assert.NotEmpty(t, startCommitID)
|
||||||
assert.EqualValues(t, "9c9aef8dd84e02bc7ec12641deb4c930a7c30185", startCommitID)
|
assert.EqualValues(t, "95bb4d39648ee7e325106df01a621c530863a653", startCommitID)
|
||||||
}
|
}
|
||||||
|
@ -64,7 +64,10 @@ func GetRepoRawDiffForFile(repo *Repository, startCommit, endCommit string, diff
|
|||||||
} else if commit.ParentCount() == 0 {
|
} else if commit.ParentCount() == 0 {
|
||||||
cmd.AddArguments("show").AddDynamicArguments(endCommit).AddDashesAndList(files...)
|
cmd.AddArguments("show").AddDynamicArguments(endCommit).AddDashesAndList(files...)
|
||||||
} else {
|
} else {
|
||||||
c, _ := commit.Parent(0)
|
c, err := commit.Parent(0)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
cmd.AddArguments("diff", "-M").AddDynamicArguments(c.ID.String(), endCommit).AddDashesAndList(files...)
|
cmd.AddArguments("diff", "-M").AddDynamicArguments(c.ID.String(), endCommit).AddDashesAndList(files...)
|
||||||
}
|
}
|
||||||
case RawDiffPatch:
|
case RawDiffPatch:
|
||||||
@ -74,7 +77,10 @@ func GetRepoRawDiffForFile(repo *Repository, startCommit, endCommit string, diff
|
|||||||
} else if commit.ParentCount() == 0 {
|
} else if commit.ParentCount() == 0 {
|
||||||
cmd.AddArguments("format-patch", "--no-signature", "--stdout", "--root").AddDynamicArguments(endCommit).AddDashesAndList(files...)
|
cmd.AddArguments("format-patch", "--no-signature", "--stdout", "--root").AddDynamicArguments(endCommit).AddDashesAndList(files...)
|
||||||
} else {
|
} else {
|
||||||
c, _ := commit.Parent(0)
|
c, err := commit.Parent(0)
|
||||||
|
if err != nil {
|
||||||
|
return err
|
||||||
|
}
|
||||||
query := fmt.Sprintf("%s...%s", endCommit, c.ID.String())
|
query := fmt.Sprintf("%s...%s", endCommit, c.ID.String())
|
||||||
cmd.AddArguments("format-patch", "--no-signature", "--stdout").AddDynamicArguments(query).AddDashesAndList(files...)
|
cmd.AddArguments("format-patch", "--no-signature", "--stdout").AddDynamicArguments(query).AddDashesAndList(files...)
|
||||||
}
|
}
|
||||||
|
@ -46,19 +46,9 @@ func parseLsTreeLine(line []byte) (*LsTreeEntry, error) {
|
|||||||
entry.Size = optional.Some(size)
|
entry.Size = optional.Some(size)
|
||||||
}
|
}
|
||||||
|
|
||||||
switch string(entryMode) {
|
entry.EntryMode, err = ParseEntryMode(string(entryMode))
|
||||||
case "100644":
|
if err != nil || entry.EntryMode == EntryModeNoEntry {
|
||||||
entry.EntryMode = EntryModeBlob
|
return nil, fmt.Errorf("invalid ls-tree output (invalid mode): %q, err: %w", line, err)
|
||||||
case "100755":
|
|
||||||
entry.EntryMode = EntryModeExec
|
|
||||||
case "120000":
|
|
||||||
entry.EntryMode = EntryModeSymlink
|
|
||||||
case "160000":
|
|
||||||
entry.EntryMode = EntryModeCommit
|
|
||||||
case "040000", "040755": // git uses 040000 for tree object, but some users may get 040755 for unknown reasons
|
|
||||||
entry.EntryMode = EntryModeTree
|
|
||||||
default:
|
|
||||||
return nil, fmt.Errorf("unknown type: %v", string(entryMode))
|
|
||||||
}
|
}
|
||||||
|
|
||||||
entry.ID, err = NewIDFromString(string(entryObjectID))
|
entry.ID, err = NewIDFromString(string(entryObjectID))
|
||||||
|
@ -80,6 +80,10 @@ func RefNameFromTag(shortName string) RefName {
|
|||||||
return RefName(TagPrefix + shortName)
|
return RefName(TagPrefix + shortName)
|
||||||
}
|
}
|
||||||
|
|
||||||
|
func RefNameFromCommit(shortName string) RefName {
|
||||||
|
return RefName(shortName)
|
||||||
|
}
|
||||||
|
|
||||||
func (ref RefName) String() string {
|
func (ref RefName) String() string {
|
||||||
return string(ref)
|
return string(ref)
|
||||||
}
|
}
|
||||||
@ -181,32 +185,38 @@ func (ref RefName) RefGroup() string {
|
|||||||
return ""
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
|
// RefType is a simple ref type of the reference, it is used for UI and webhooks
|
||||||
|
type RefType string
|
||||||
|
|
||||||
|
const (
|
||||||
|
RefTypeBranch RefType = "branch"
|
||||||
|
RefTypeTag RefType = "tag"
|
||||||
|
RefTypeCommit RefType = "commit"
|
||||||
|
)
|
||||||
|
|
||||||
// RefType returns the simple ref type of the reference, e.g. branch, tag
|
// RefType returns the simple ref type of the reference, e.g. branch, tag
|
||||||
// It's different from RefGroup, which is using the name of the directory under .git/refs
|
// It's different from RefGroup, which is using the name of the directory under .git/refs
|
||||||
// Here we using branch but not heads, using tag but not tags
|
func (ref RefName) RefType() RefType {
|
||||||
func (ref RefName) RefType() string {
|
switch {
|
||||||
var refType string
|
case ref.IsBranch():
|
||||||
if ref.IsBranch() {
|
return RefTypeBranch
|
||||||
refType = "branch"
|
case ref.IsTag():
|
||||||
} else if ref.IsTag() {
|
return RefTypeTag
|
||||||
refType = "tag"
|
case IsStringLikelyCommitID(nil, string(ref), 6):
|
||||||
|
return RefTypeCommit
|
||||||
}
|
}
|
||||||
return refType
|
return ""
|
||||||
}
|
}
|
||||||
|
|
||||||
// RefURL returns the absolute URL for a ref in a repository
|
// RefWebLinkPath returns a path for the reference that can be used in a web link:
|
||||||
func RefURL(repoURL, ref string) string {
|
// * "branch/<branch_name>"
|
||||||
refFullName := RefName(ref)
|
// * "tag/<tag_name>"
|
||||||
refName := util.PathEscapeSegments(refFullName.ShortName())
|
// * "commit/<commit_id>"
|
||||||
switch {
|
// It returns an empty string if the reference is not a branch, tag or commit.
|
||||||
case refFullName.IsBranch():
|
func (ref RefName) RefWebLinkPath() string {
|
||||||
return repoURL + "/src/branch/" + refName
|
refType := ref.RefType()
|
||||||
case refFullName.IsTag():
|
if refType == "" {
|
||||||
return repoURL + "/src/tag/" + refName
|
return ""
|
||||||
case !Sha1ObjectFormat.IsValid(ref):
|
|
||||||
// assume they mean a branch
|
|
||||||
return repoURL + "/src/branch/" + refName
|
|
||||||
default:
|
|
||||||
return repoURL + "/src/commit/" + refName
|
|
||||||
}
|
}
|
||||||
|
return string(refType) + "/" + util.PathEscapeSegments(ref.ShortName())
|
||||||
}
|
}
|
||||||
|
@ -20,6 +20,8 @@ func TestRefName(t *testing.T) {
|
|||||||
|
|
||||||
// Test pull names
|
// Test pull names
|
||||||
assert.Equal(t, "1", RefName("refs/pull/1/head").PullName())
|
assert.Equal(t, "1", RefName("refs/pull/1/head").PullName())
|
||||||
|
assert.True(t, RefName("refs/pull/1/head").IsPull())
|
||||||
|
assert.True(t, RefName("refs/pull/1/merge").IsPull())
|
||||||
assert.Equal(t, "my/pull", RefName("refs/pull/my/pull/head").PullName())
|
assert.Equal(t, "my/pull", RefName("refs/pull/my/pull/head").PullName())
|
||||||
|
|
||||||
// Test for branch names
|
// Test for branch names
|
||||||
@ -30,9 +32,8 @@ func TestRefName(t *testing.T) {
|
|||||||
assert.Equal(t, "c0ffee", RefName("c0ffee").ShortName())
|
assert.Equal(t, "c0ffee", RefName("c0ffee").ShortName())
|
||||||
}
|
}
|
||||||
|
|
||||||
func TestRefURL(t *testing.T) {
|
func TestRefWebLinkPath(t *testing.T) {
|
||||||
repoURL := "/user/repo"
|
assert.Equal(t, "branch/foo", RefName("refs/heads/foo").RefWebLinkPath())
|
||||||
assert.Equal(t, repoURL+"/src/branch/foo", RefURL(repoURL, "refs/heads/foo"))
|
assert.Equal(t, "tag/foo", RefName("refs/tags/foo").RefWebLinkPath())
|
||||||
assert.Equal(t, repoURL+"/src/tag/foo", RefURL(repoURL, "refs/tags/foo"))
|
assert.Equal(t, "commit/c0ffee", RefName("c0ffee").RefWebLinkPath())
|
||||||
assert.Equal(t, repoURL+"/src/commit/c0ffee", RefURL(repoURL, "c0ffee"))
|
|
||||||
}
|
}
|
||||||
|
@ -16,37 +16,35 @@ import (
|
|||||||
type ArchiveType int
|
type ArchiveType int
|
||||||
|
|
||||||
const (
|
const (
|
||||||
// ZIP zip archive type
|
ArchiveUnknown ArchiveType = iota
|
||||||
ZIP ArchiveType = iota + 1
|
ArchiveZip // 1
|
||||||
// TARGZ tar gz archive type
|
ArchiveTarGz // 2
|
||||||
TARGZ
|
ArchiveBundle // 3
|
||||||
// BUNDLE bundle archive type
|
|
||||||
BUNDLE
|
|
||||||
)
|
)
|
||||||
|
|
||||||
// String converts an ArchiveType to string
|
// String converts an ArchiveType to string: the extension of the archive file without prefix dot
|
||||||
func (a ArchiveType) String() string {
|
func (a ArchiveType) String() string {
|
||||||
switch a {
|
switch a {
|
||||||
case ZIP:
|
case ArchiveZip:
|
||||||
return "zip"
|
return "zip"
|
||||||
case TARGZ:
|
case ArchiveTarGz:
|
||||||
return "tar.gz"
|
return "tar.gz"
|
||||||
case BUNDLE:
|
case ArchiveBundle:
|
||||||
return "bundle"
|
return "bundle"
|
||||||
}
|
}
|
||||||
return "unknown"
|
return "unknown"
|
||||||
}
|
}
|
||||||
|
|
||||||
func ToArchiveType(s string) ArchiveType {
|
func SplitArchiveNameType(s string) (string, ArchiveType) {
|
||||||
switch s {
|
switch {
|
||||||
case "zip":
|
case strings.HasSuffix(s, ".zip"):
|
||||||
return ZIP
|
return strings.TrimSuffix(s, ".zip"), ArchiveZip
|
||||||
case "tar.gz":
|
case strings.HasSuffix(s, ".tar.gz"):
|
||||||
return TARGZ
|
return strings.TrimSuffix(s, ".tar.gz"), ArchiveTarGz
|
||||||
case "bundle":
|
case strings.HasSuffix(s, ".bundle"):
|
||||||
return BUNDLE
|
return strings.TrimSuffix(s, ".bundle"), ArchiveBundle
|
||||||
}
|
}
|
||||||
return 0
|
return s, ArchiveUnknown
|
||||||
}
|
}
|
||||||
|
|
||||||
// CreateArchive create archive content to the target path
|
// CreateArchive create archive content to the target path
|
||||||
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user