mirror of
https://github.com/go-gitea/gitea.git
synced 2025-07-27 07:44:43 +02:00
Merge branch 'main' into add-file-tree-to-file-view-page
This commit is contained in:
commit
40f8de2e13
@ -22,20 +22,25 @@ groups:
|
||||
name: FEATURES
|
||||
labels:
|
||||
- type/feature
|
||||
-
|
||||
name: API
|
||||
labels:
|
||||
- modifies/api
|
||||
-
|
||||
name: ENHANCEMENTS
|
||||
labels:
|
||||
- type/enhancement
|
||||
- type/refactoring
|
||||
- topic/ui
|
||||
-
|
||||
name: PERFORMANCE
|
||||
labels:
|
||||
- performance/memory
|
||||
- performance/speed
|
||||
- performance/bigrepo
|
||||
- performance/cpu
|
||||
-
|
||||
name: BUGFIXES
|
||||
labels:
|
||||
- type/bug
|
||||
-
|
||||
name: API
|
||||
labels:
|
||||
- modifies/api
|
||||
-
|
||||
name: TESTING
|
||||
labels:
|
||||
|
@ -403,7 +403,7 @@ module.exports = {
|
||||
'github/a11y-svg-has-accessible-name': [0],
|
||||
'github/array-foreach': [0],
|
||||
'github/async-currenttarget': [2],
|
||||
'github/async-preventdefault': [2],
|
||||
'github/async-preventdefault': [0], // https://github.com/github/eslint-plugin-github/issues/599
|
||||
'github/authenticity-token': [0],
|
||||
'github/get-attribute': [0],
|
||||
'github/js-class-name': [0],
|
||||
|
4
.github/workflows/cron-licenses.yml
vendored
4
.github/workflows/cron-licenses.yml
vendored
@ -1,8 +1,8 @@
|
||||
name: cron-licenses
|
||||
|
||||
on:
|
||||
schedule:
|
||||
- cron: "7 0 * * 1" # every Monday at 00:07 UTC
|
||||
#schedule:
|
||||
# - cron: "7 0 * * 1" # every Monday at 00:07 UTC
|
||||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
|
182
Makefile
182
Makefile
@ -26,9 +26,9 @@ COMMA := ,
|
||||
XGO_VERSION := go-1.23.x
|
||||
|
||||
AIR_PACKAGE ?= github.com/air-verse/air@v1
|
||||
EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/v3/cmd/editorconfig-checker@v3.0.3
|
||||
EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/v3/cmd/editorconfig-checker@v3.1.2
|
||||
GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.7.0
|
||||
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.62.2
|
||||
GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.63.4
|
||||
GXZ_PACKAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.12
|
||||
MISSPELL_PACKAGE ?= github.com/golangci/misspell/cmd/misspell@v0.6.0
|
||||
SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.31.0
|
||||
@ -36,7 +36,7 @@ XGO_PACKAGE ?= src.techknowlogick.com/xgo@latest
|
||||
GO_LICENSES_PACKAGE ?= github.com/google/go-licenses@v1
|
||||
GOVULNCHECK_PACKAGE ?= golang.org/x/vuln/cmd/govulncheck@v1
|
||||
ACTIONLINT_PACKAGE ?= github.com/rhysd/actionlint/cmd/actionlint@v1
|
||||
GOPLS_PACKAGE ?= golang.org/x/tools/gopls@v0.17.0
|
||||
GOPLS_PACKAGE ?= golang.org/x/tools/gopls@v0.17.1
|
||||
|
||||
DOCKER_IMAGE ?= gitea/gitea
|
||||
DOCKER_TAG ?= latest
|
||||
@ -189,67 +189,11 @@ TEST_MSSQL_PASSWORD ?= MwantsaSecurePassword1
|
||||
all: build
|
||||
|
||||
.PHONY: help
|
||||
help:
|
||||
@echo "Make Routines:"
|
||||
@echo " - \"\" equivalent to \"build\""
|
||||
@echo " - build build everything"
|
||||
@echo " - frontend build frontend files"
|
||||
@echo " - backend build backend files"
|
||||
@echo " - watch watch everything and continuously rebuild"
|
||||
@echo " - watch-frontend watch frontend files and continuously rebuild"
|
||||
@echo " - watch-backend watch backend files and continuously rebuild"
|
||||
@echo " - clean delete backend and integration files"
|
||||
@echo " - clean-all delete backend, frontend and integration files"
|
||||
@echo " - deps install dependencies"
|
||||
@echo " - deps-frontend install frontend dependencies"
|
||||
@echo " - deps-backend install backend dependencies"
|
||||
@echo " - deps-tools install tool dependencies"
|
||||
@echo " - deps-py install python dependencies"
|
||||
@echo " - lint lint everything"
|
||||
@echo " - lint-fix lint everything and fix issues"
|
||||
@echo " - lint-actions lint action workflow files"
|
||||
@echo " - lint-frontend lint frontend files"
|
||||
@echo " - lint-frontend-fix lint frontend files and fix issues"
|
||||
@echo " - lint-backend lint backend files"
|
||||
@echo " - lint-backend-fix lint backend files and fix issues"
|
||||
@echo " - lint-go lint go files"
|
||||
@echo " - lint-go-fix lint go files and fix issues"
|
||||
@echo " - lint-go-vet lint go files with vet"
|
||||
@echo " - lint-go-gopls lint go files with gopls"
|
||||
@echo " - lint-js lint js files"
|
||||
@echo " - lint-js-fix lint js files and fix issues"
|
||||
@echo " - lint-css lint css files"
|
||||
@echo " - lint-css-fix lint css files and fix issues"
|
||||
@echo " - lint-md lint markdown files"
|
||||
@echo " - lint-swagger lint swagger files"
|
||||
@echo " - lint-templates lint template files"
|
||||
@echo " - lint-yaml lint yaml files"
|
||||
@echo " - lint-spell lint spelling"
|
||||
@echo " - lint-spell-fix lint spelling and fix issues"
|
||||
@echo " - checks run various consistency checks"
|
||||
@echo " - checks-frontend check frontend files"
|
||||
@echo " - checks-backend check backend files"
|
||||
@echo " - test test everything"
|
||||
@echo " - test-frontend test frontend files"
|
||||
@echo " - test-backend test backend files"
|
||||
@echo " - test-e2e[\#TestSpecificName] test end to end using playwright"
|
||||
@echo " - update update js and py dependencies"
|
||||
@echo " - update-js update js dependencies"
|
||||
@echo " - update-py update py dependencies"
|
||||
@echo " - webpack build webpack files"
|
||||
@echo " - svg build svg files"
|
||||
@echo " - fomantic build fomantic files"
|
||||
@echo " - generate run \"go generate\""
|
||||
@echo " - fmt format the Go code"
|
||||
@echo " - generate-license update license files"
|
||||
@echo " - generate-gitignore update gitignore files"
|
||||
@echo " - generate-manpage generate manpage"
|
||||
@echo " - generate-swagger generate the swagger spec from code comments"
|
||||
@echo " - swagger-validate check if the swagger spec is valid"
|
||||
@echo " - go-licenses regenerate go licenses"
|
||||
@echo " - tidy run go mod tidy"
|
||||
@echo " - test[\#TestSpecificName] run unit test"
|
||||
@echo " - test-sqlite[\#TestSpecificName] run integration test for sqlite"
|
||||
help: Makefile ## print Makefile help information.
|
||||
@awk 'BEGIN {FS = ":.*##"; printf "\nUsage:\n make \033[36m[TARGETS] default target: build\033[0m\n\n\033[35mTargets:\033[0m\n"} /^[0-9A-Za-z._-]+:.*?##/ { printf " \033[36m%-45s\033[0m %s\n", $$1, $$2 }' Makefile #$(MAKEFILE_LIST)
|
||||
@printf " \033[36m%-46s\033[0m %s\n" "test-e2e[#TestSpecificName]" "test end to end using playwright"
|
||||
@printf " \033[36m%-46s\033[0m %s\n" "test[#TestSpecificName]" "run unit test"
|
||||
@printf " \033[36m%-46s\033[0m %s\n" "test-sqlite[#TestSpecificName]" "run integration test for sqlite"
|
||||
|
||||
.PHONY: go-check
|
||||
go-check:
|
||||
@ -280,11 +224,11 @@ node-check:
|
||||
fi
|
||||
|
||||
.PHONY: clean-all
|
||||
clean-all: clean
|
||||
clean-all: clean ## delete backend, frontend and integration files
|
||||
rm -rf $(WEBPACK_DEST_ENTRIES) node_modules
|
||||
|
||||
.PHONY: clean
|
||||
clean:
|
||||
clean: ## delete backend and integration files
|
||||
rm -rf $(EXECUTABLE) $(DIST) $(BINDATA_DEST) $(BINDATA_HASH) \
|
||||
integrations*.test \
|
||||
e2e*.test \
|
||||
@ -296,7 +240,7 @@ clean:
|
||||
tests/e2e/reports/ tests/e2e/test-artifacts/ tests/e2e/test-snapshots/
|
||||
|
||||
.PHONY: fmt
|
||||
fmt:
|
||||
fmt: ## format the Go code
|
||||
@GOFUMPT_PACKAGE=$(GOFUMPT_PACKAGE) $(GO) run build/code-batch-process.go gitea-fmt -w '{file-list}'
|
||||
$(eval TEMPLATES := $(shell find templates -type f -name '*.tmpl'))
|
||||
@# strip whitespace after '{{' or '(' and before '}}' or ')' unless there is only
|
||||
@ -325,7 +269,7 @@ TAGS_PREREQ := $(TAGS_EVIDENCE)
|
||||
endif
|
||||
|
||||
.PHONY: generate-swagger
|
||||
generate-swagger: $(SWAGGER_SPEC)
|
||||
generate-swagger: $(SWAGGER_SPEC) ## generate the swagger spec from code comments
|
||||
|
||||
$(SWAGGER_SPEC): $(GO_SOURCES_NO_BINDATA)
|
||||
$(GO) run $(SWAGGER_PACKAGE) generate spec -x "$(SWAGGER_EXCLUDE)" -o './$(SWAGGER_SPEC)'
|
||||
@ -342,78 +286,78 @@ swagger-check: generate-swagger
|
||||
fi
|
||||
|
||||
.PHONY: swagger-validate
|
||||
swagger-validate:
|
||||
swagger-validate: ## check if the swagger spec is valid
|
||||
$(SED_INPLACE) '$(SWAGGER_SPEC_S_JSON)' './$(SWAGGER_SPEC)'
|
||||
$(GO) run $(SWAGGER_PACKAGE) validate './$(SWAGGER_SPEC)'
|
||||
$(SED_INPLACE) '$(SWAGGER_SPEC_S_TMPL)' './$(SWAGGER_SPEC)'
|
||||
|
||||
.PHONY: checks
|
||||
checks: checks-frontend checks-backend
|
||||
checks: checks-frontend checks-backend ## run various consistency checks
|
||||
|
||||
.PHONY: checks-frontend
|
||||
checks-frontend: lockfile-check svg-check
|
||||
checks-frontend: lockfile-check svg-check ## check frontend files
|
||||
|
||||
.PHONY: checks-backend
|
||||
checks-backend: tidy-check swagger-check fmt-check swagger-validate security-check
|
||||
checks-backend: tidy-check swagger-check fmt-check swagger-validate security-check ## check backend files
|
||||
|
||||
.PHONY: lint
|
||||
lint: lint-frontend lint-backend lint-spell
|
||||
lint: lint-frontend lint-backend lint-spell ## lint everything
|
||||
|
||||
.PHONY: lint-fix
|
||||
lint-fix: lint-frontend-fix lint-backend-fix lint-spell-fix
|
||||
lint-fix: lint-frontend-fix lint-backend-fix lint-spell-fix ## lint everything and fix issues
|
||||
|
||||
.PHONY: lint-frontend
|
||||
lint-frontend: lint-js lint-css
|
||||
lint-frontend: lint-js lint-css ## lint frontend files
|
||||
|
||||
.PHONY: lint-frontend-fix
|
||||
lint-frontend-fix: lint-js-fix lint-css-fix
|
||||
lint-frontend-fix: lint-js-fix lint-css-fix ## lint frontend files and fix issues
|
||||
|
||||
.PHONY: lint-backend
|
||||
lint-backend: lint-go lint-go-vet lint-go-gopls lint-editorconfig
|
||||
lint-backend: lint-go lint-go-vet lint-go-gopls lint-editorconfig ## lint backend files
|
||||
|
||||
.PHONY: lint-backend-fix
|
||||
lint-backend-fix: lint-go-fix lint-go-vet lint-editorconfig
|
||||
lint-backend-fix: lint-go-fix lint-go-vet lint-editorconfig ## lint backend files and fix issues
|
||||
|
||||
.PHONY: lint-js
|
||||
lint-js: node_modules
|
||||
lint-js: node_modules ## lint js files
|
||||
npx eslint --color --max-warnings=0 --ext js,ts,vue $(ESLINT_FILES)
|
||||
npx vue-tsc
|
||||
|
||||
.PHONY: lint-js-fix
|
||||
lint-js-fix: node_modules
|
||||
lint-js-fix: node_modules ## lint js files and fix issues
|
||||
npx eslint --color --max-warnings=0 --ext js,ts,vue $(ESLINT_FILES) --fix
|
||||
npx vue-tsc
|
||||
|
||||
.PHONY: lint-css
|
||||
lint-css: node_modules
|
||||
lint-css: node_modules ## lint css files
|
||||
npx stylelint --color --max-warnings=0 $(STYLELINT_FILES)
|
||||
|
||||
.PHONY: lint-css-fix
|
||||
lint-css-fix: node_modules
|
||||
lint-css-fix: node_modules ## lint css files and fix issues
|
||||
npx stylelint --color --max-warnings=0 $(STYLELINT_FILES) --fix
|
||||
|
||||
.PHONY: lint-swagger
|
||||
lint-swagger: node_modules
|
||||
lint-swagger: node_modules ## lint swagger files
|
||||
npx spectral lint -q -F hint $(SWAGGER_SPEC)
|
||||
|
||||
.PHONY: lint-md
|
||||
lint-md: node_modules
|
||||
lint-md: node_modules ## lint markdown files
|
||||
npx markdownlint *.md
|
||||
|
||||
.PHONY: lint-spell
|
||||
lint-spell:
|
||||
lint-spell: ## lint spelling
|
||||
@go run $(MISSPELL_PACKAGE) -dict tools/misspellings.csv -error $(SPELLCHECK_FILES)
|
||||
|
||||
.PHONY: lint-spell-fix
|
||||
lint-spell-fix:
|
||||
lint-spell-fix: ## lint spelling and fix issues
|
||||
@go run $(MISSPELL_PACKAGE) -dict tools/misspellings.csv -w $(SPELLCHECK_FILES)
|
||||
|
||||
.PHONY: lint-go
|
||||
lint-go:
|
||||
lint-go: ## lint go files
|
||||
$(GO) run $(GOLANGCI_LINT_PACKAGE) run
|
||||
|
||||
.PHONY: lint-go-fix
|
||||
lint-go-fix:
|
||||
lint-go-fix: ## lint go files and fix issues
|
||||
$(GO) run $(GOLANGCI_LINT_PACKAGE) run --fix
|
||||
|
||||
# workaround step for the lint-go-windows CI task because 'go run' can not
|
||||
@ -424,13 +368,13 @@ lint-go-windows:
|
||||
golangci-lint run
|
||||
|
||||
.PHONY: lint-go-vet
|
||||
lint-go-vet:
|
||||
lint-go-vet: ## lint go files with vet
|
||||
@echo "Running go vet..."
|
||||
@GOOS= GOARCH= $(GO) build code.gitea.io/gitea-vet
|
||||
@$(GO) vet -vettool=gitea-vet ./...
|
||||
|
||||
.PHONY: lint-go-gopls
|
||||
lint-go-gopls:
|
||||
lint-go-gopls: ## lint go files with gopls
|
||||
@echo "Running gopls check..."
|
||||
@GO=$(GO) GOPLS_PACKAGE=$(GOPLS_PACKAGE) tools/lint-go-gopls.sh $(GO_SOURCES_NO_BINDATA)
|
||||
|
||||
@ -439,41 +383,41 @@ lint-editorconfig:
|
||||
@$(GO) run $(EDITORCONFIG_CHECKER_PACKAGE) $(EDITORCONFIG_FILES)
|
||||
|
||||
.PHONY: lint-actions
|
||||
lint-actions:
|
||||
lint-actions: ## lint action workflow files
|
||||
$(GO) run $(ACTIONLINT_PACKAGE)
|
||||
|
||||
.PHONY: lint-templates
|
||||
lint-templates: .venv node_modules
|
||||
lint-templates: .venv node_modules ## lint template files
|
||||
@node tools/lint-templates-svg.js
|
||||
@poetry run djlint $(shell find templates -type f -iname '*.tmpl')
|
||||
|
||||
.PHONY: lint-yaml
|
||||
lint-yaml: .venv
|
||||
lint-yaml: .venv ## lint yaml files
|
||||
@poetry run yamllint .
|
||||
|
||||
.PHONY: watch
|
||||
watch:
|
||||
watch: ## watch everything and continuously rebuild
|
||||
@bash tools/watch.sh
|
||||
|
||||
.PHONY: watch-frontend
|
||||
watch-frontend: node-check node_modules
|
||||
watch-frontend: node-check node_modules ## watch frontend files and continuously rebuild
|
||||
@rm -rf $(WEBPACK_DEST_ENTRIES)
|
||||
NODE_ENV=development npx webpack --watch --progress
|
||||
|
||||
.PHONY: watch-backend
|
||||
watch-backend: go-check
|
||||
watch-backend: go-check ## watch backend files and continuously rebuild
|
||||
GITEA_RUN_MODE=dev $(GO) run $(AIR_PACKAGE) -c .air.toml
|
||||
|
||||
.PHONY: test
|
||||
test: test-frontend test-backend
|
||||
test: test-frontend test-backend ## test everything
|
||||
|
||||
.PHONY: test-backend
|
||||
test-backend:
|
||||
test-backend: ## test frontend files
|
||||
@echo "Running go test with $(GOTESTFLAGS) -tags '$(TEST_TAGS)'..."
|
||||
@$(GO) test $(GOTESTFLAGS) -tags='$(TEST_TAGS)' $(GO_TEST_PACKAGES)
|
||||
|
||||
.PHONY: test-frontend
|
||||
test-frontend: node_modules
|
||||
test-frontend: node_modules ## test backend files
|
||||
npx vitest
|
||||
|
||||
.PHONY: test-check
|
||||
@ -505,7 +449,7 @@ unit-test-coverage:
|
||||
@$(GO) test $(GOTESTFLAGS) -timeout=20m -tags='$(TEST_TAGS)' -cover -coverprofile coverage.out $(GO_TEST_PACKAGES) && echo "\n==>\033[32m Ok\033[m\n" || exit 1
|
||||
|
||||
.PHONY: tidy
|
||||
tidy:
|
||||
tidy: ## run go mod tidy
|
||||
$(eval MIN_GO_VERSION := $(shell grep -Eo '^go\s+[0-9]+\.[0-9.]+' go.mod | cut -d' ' -f2))
|
||||
$(GO) mod tidy -compat=$(MIN_GO_VERSION)
|
||||
@$(MAKE) --no-print-directory $(GO_LICENSE_FILE)
|
||||
@ -524,7 +468,7 @@ tidy-check: tidy
|
||||
fi
|
||||
|
||||
.PHONY: go-licenses
|
||||
go-licenses: $(GO_LICENSE_FILE)
|
||||
go-licenses: $(GO_LICENSE_FILE) ## regenerate go licenses
|
||||
|
||||
$(GO_LICENSE_FILE): go.mod go.sum
|
||||
-$(GO) run $(GO_LICENSES_PACKAGE) save . --force --save_path=$(GO_LICENSE_TMP_DIR) 2>/dev/null
|
||||
@ -771,17 +715,17 @@ install: $(wildcard *.go)
|
||||
CGO_CFLAGS="$(CGO_CFLAGS)" $(GO) install -v -tags '$(TAGS)' -ldflags '-s -w $(LDFLAGS)'
|
||||
|
||||
.PHONY: build
|
||||
build: frontend backend
|
||||
build: frontend backend ## build everything
|
||||
|
||||
.PHONY: frontend
|
||||
frontend: $(WEBPACK_DEST)
|
||||
frontend: $(WEBPACK_DEST) ## build frontend files
|
||||
|
||||
.PHONY: backend
|
||||
backend: go-check generate-backend $(EXECUTABLE)
|
||||
backend: go-check generate-backend $(EXECUTABLE) ## build backend files
|
||||
|
||||
# We generate the backend before the frontend in case we in future we want to generate things in the frontend from generated files in backend
|
||||
.PHONY: generate
|
||||
generate: generate-backend
|
||||
generate: generate-backend ## run "go generate"
|
||||
|
||||
.PHONY: generate-backend
|
||||
generate-backend: $(TAGS_PREREQ) generate-go
|
||||
@ -846,20 +790,20 @@ release-sources: | $(DIST_DIRS)
|
||||
rm -f $(STORED_VERSION_FILE)
|
||||
|
||||
.PHONY: deps
|
||||
deps: deps-frontend deps-backend deps-tools deps-py
|
||||
deps: deps-frontend deps-backend deps-tools deps-py ## install dependencies
|
||||
|
||||
.PHONY: deps-py
|
||||
deps-py: .venv
|
||||
deps-py: .venv ## install python dependencies
|
||||
|
||||
.PHONY: deps-frontend
|
||||
deps-frontend: node_modules
|
||||
deps-frontend: node_modules ## install frontend dependencies
|
||||
|
||||
.PHONY: deps-backend
|
||||
deps-backend:
|
||||
deps-backend: ## install backend dependencies
|
||||
$(GO) mod download
|
||||
|
||||
.PHONY: deps-tools
|
||||
deps-tools:
|
||||
deps-tools: ## install tool dependencies
|
||||
$(GO) install $(AIR_PACKAGE) & \
|
||||
$(GO) install $(EDITORCONFIG_CHECKER_PACKAGE) & \
|
||||
$(GO) install $(GOFUMPT_PACKAGE) & \
|
||||
@ -883,10 +827,10 @@ node_modules: package-lock.json
|
||||
@touch .venv
|
||||
|
||||
.PHONY: update
|
||||
update: update-js update-py
|
||||
update: update-js update-py ## update js and py dependencies
|
||||
|
||||
.PHONY: update-js
|
||||
update-js: node-check | node_modules
|
||||
update-js: node-check | node_modules ## update js dependencies
|
||||
npx updates -u -f package.json
|
||||
rm -rf node_modules package-lock.json
|
||||
npm install --package-lock
|
||||
@ -895,14 +839,14 @@ update-js: node-check | node_modules
|
||||
@touch node_modules
|
||||
|
||||
.PHONY: update-py
|
||||
update-py: node-check | node_modules
|
||||
update-py: node-check | node_modules ## update py dependencies
|
||||
npx updates -u -f pyproject.toml
|
||||
rm -rf .venv poetry.lock
|
||||
poetry install
|
||||
@touch .venv
|
||||
|
||||
.PHONY: fomantic
|
||||
fomantic:
|
||||
fomantic: ## build fomantic files
|
||||
rm -rf $(FOMANTIC_WORK_DIR)/build
|
||||
cd $(FOMANTIC_WORK_DIR) && npm install --no-save
|
||||
cp -f $(FOMANTIC_WORK_DIR)/theme.config.less $(FOMANTIC_WORK_DIR)/node_modules/fomantic-ui/src/theme.config
|
||||
@ -915,7 +859,7 @@ fomantic:
|
||||
rm -f $(FOMANTIC_WORK_DIR)/build/*.min.*
|
||||
|
||||
.PHONY: webpack
|
||||
webpack: $(WEBPACK_DEST)
|
||||
webpack: $(WEBPACK_DEST) ## build webpack files
|
||||
|
||||
$(WEBPACK_DEST): $(WEBPACK_SOURCES) $(WEBPACK_CONFIGS) package-lock.json
|
||||
@$(MAKE) -s node-check node_modules
|
||||
@ -925,7 +869,7 @@ $(WEBPACK_DEST): $(WEBPACK_SOURCES) $(WEBPACK_CONFIGS) package-lock.json
|
||||
@touch $(WEBPACK_DEST)
|
||||
|
||||
.PHONY: svg
|
||||
svg: node-check | node_modules
|
||||
svg: node-check | node_modules ## build svg files
|
||||
rm -rf $(SVG_DEST_DIR)
|
||||
node tools/generate-svg.js
|
||||
|
||||
@ -961,11 +905,11 @@ update-translations:
|
||||
rmdir ./translations
|
||||
|
||||
.PHONY: generate-license
|
||||
generate-license:
|
||||
generate-license: ## update license files
|
||||
$(GO) run build/generate-licenses.go
|
||||
|
||||
.PHONY: generate-gitignore
|
||||
generate-gitignore:
|
||||
generate-gitignore: ## update gitignore files
|
||||
$(GO) run build/generate-gitignores.go
|
||||
|
||||
.PHONY: generate-images
|
||||
@ -974,7 +918,7 @@ generate-images: | node_modules
|
||||
node tools/generate-images.js $(TAGS)
|
||||
|
||||
.PHONY: generate-manpage
|
||||
generate-manpage:
|
||||
generate-manpage: ## generate manpage
|
||||
@[ -f gitea ] || make backend
|
||||
@mkdir -p man/man1/ man/man5
|
||||
@./gitea docs --man > man/man1/gitea.1
|
||||
|
5
assets/go-licenses.json
generated
5
assets/go-licenses.json
generated
@ -744,11 +744,6 @@
|
||||
"path": "github.com/kevinburke/ssh_config/LICENSE",
|
||||
"licenseText": "Copyright (c) 2017 Kevin Burke.\n\nPermission is hereby granted, free of charge, to any person\nobtaining a copy of this software and associated documentation\nfiles (the \"Software\"), to deal in the Software without\nrestriction, including without limitation the rights to use,\ncopy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the\nSoftware is furnished to do so, subject to the following\nconditions:\n\nThe above copyright notice and this permission notice shall be\nincluded in all copies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND,\nEXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES\nOF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND\nNONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT\nHOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,\nWHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING\nFROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR\nOTHER DEALINGS IN THE SOFTWARE.\n\n===================\n\nThe lexer and parser borrow heavily from github.com/pelletier/go-toml. The\nlicense for that project is copied below.\n\nThe MIT License (MIT)\n\nCopyright (c) 2013 - 2017 Thomas Pelletier, Eric Anderton\n\nPermission is hereby granted, free of charge, to any person obtaining a copy\nof this software and associated documentation files (the \"Software\"), to deal\nin the Software without restriction, including without limitation the rights\nto use, copy, modify, merge, publish, distribute, sublicense, and/or sell\ncopies of the Software, and to permit persons to whom the Software is\nfurnished to do so, subject to the following conditions:\n\nThe above copyright notice and this permission notice shall be included in all\ncopies or substantial portions of the Software.\n\nTHE SOFTWARE IS PROVIDED \"AS IS\", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR\nIMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,\nFITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE\nAUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER\nLIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,\nOUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE\nSOFTWARE.\n"
|
||||
},
|
||||
{
|
||||
"name": "github.com/keybase/go-crypto",
|
||||
"path": "github.com/keybase/go-crypto/LICENSE",
|
||||
"licenseText": "Copyright (c) 2009 The Go Authors. All rights reserved.\n\nRedistribution and use in source and binary forms, with or without\nmodification, are permitted provided that the following conditions are\nmet:\n\n * Redistributions of source code must retain the above copyright\nnotice, this list of conditions and the following disclaimer.\n * Redistributions in binary form must reproduce the above\ncopyright notice, this list of conditions and the following disclaimer\nin the documentation and/or other materials provided with the\ndistribution.\n * Neither the name of Google Inc. nor the names of its\ncontributors may be used to endorse or promote products derived from\nthis software without specific prior written permission.\n\nTHIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS\n\"AS IS\" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT\nLIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR\nA PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT\nOWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,\nSPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT\nLIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,\nDATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY\nTHEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT\n(INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE\nOF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.\n"
|
||||
},
|
||||
{
|
||||
"name": "github.com/klauspost/compress",
|
||||
"path": "github.com/klauspost/compress/LICENSE",
|
||||
|
@ -104,7 +104,10 @@ func fail(ctx context.Context, userMessage, logMsgFmt string, args ...any) error
|
||||
// There appears to be a chance to cause a zombie process and failure to read the Exit status
|
||||
// if nothing is outputted on stdout.
|
||||
_, _ = fmt.Fprintln(os.Stdout, "")
|
||||
_, _ = fmt.Fprintln(os.Stderr, "Gitea:", userMessage)
|
||||
// add extra empty lines to separate our message from other git errors to get more attention
|
||||
_, _ = fmt.Fprintln(os.Stderr, "error:")
|
||||
_, _ = fmt.Fprintln(os.Stderr, "error:", userMessage)
|
||||
_, _ = fmt.Fprintln(os.Stderr, "error:")
|
||||
|
||||
if logMsgFmt != "" {
|
||||
logMsg := fmt.Sprintf(logMsgFmt, args...)
|
||||
|
@ -18,10 +18,12 @@ import (
|
||||
|
||||
"code.gitea.io/gitea/modules/container"
|
||||
"code.gitea.io/gitea/modules/graceful"
|
||||
"code.gitea.io/gitea/modules/gtprof"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/process"
|
||||
"code.gitea.io/gitea/modules/public"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
"code.gitea.io/gitea/routers"
|
||||
"code.gitea.io/gitea/routers/install"
|
||||
|
||||
@ -218,6 +220,8 @@ func serveInstalled(ctx *cli.Context) error {
|
||||
}
|
||||
}
|
||||
|
||||
gtprof.EnableBuiltinTracer(util.Iif(setting.IsProd, 2000*time.Millisecond, 100*time.Millisecond))
|
||||
|
||||
// Set up Chi routes
|
||||
webRoutes := routers.NormalRoutes()
|
||||
err := listen(webRoutes, true)
|
||||
|
@ -790,10 +790,13 @@ LEVEL = Info
|
||||
;; Please note that setting this to false will not disable OAuth Basic or Basic authentication using a token
|
||||
;ENABLE_BASIC_AUTHENTICATION = true
|
||||
;;
|
||||
;; Show the password sign-in form (for password-based login), otherwise, only show OAuth2 login methods.
|
||||
;; Show the password sign-in form (for password-based login), otherwise, only show OAuth2 or passkey login methods if they are enabled.
|
||||
;; If you set it to false, maybe it also needs to set ENABLE_BASIC_AUTHENTICATION to false to completely disable password-based authentication.
|
||||
;ENABLE_PASSWORD_SIGNIN_FORM = true
|
||||
;;
|
||||
;; Allow users to sign-in with a passkey
|
||||
;ENABLE_PASSKEY_AUTHENTICATION = true
|
||||
;;
|
||||
;; More detail: https://github.com/gogits/gogs/issues/165
|
||||
;ENABLE_REVERSE_PROXY_AUTHENTICATION = false
|
||||
; Enable this to allow reverse proxy authentication for API requests, the reverse proxy is responsible for ensuring that no CSRF is possible.
|
||||
@ -1126,6 +1129,9 @@ LEVEL = Info
|
||||
;; In default merge messages only include approvers who are official
|
||||
;DEFAULT_MERGE_MESSAGE_OFFICIAL_APPROVERS_ONLY = true
|
||||
;;
|
||||
;; In default squash-merge messages include the commit message of all commits comprising the pull request.
|
||||
;POPULATE_SQUASH_COMMENT_WITH_COMMIT_MESSAGES = false
|
||||
;;
|
||||
;; Add co-authored-by and co-committed-by trailers if committer does not match author
|
||||
;ADD_CO_COMMITTER_TRAILERS = true
|
||||
;;
|
||||
|
12
flake.lock
generated
12
flake.lock
generated
@ -5,11 +5,11 @@
|
||||
"systems": "systems"
|
||||
},
|
||||
"locked": {
|
||||
"lastModified": 1726560853,
|
||||
"narHash": "sha256-X6rJYSESBVr3hBoH0WbKE5KvhPU5bloyZ2L4K60/fPQ=",
|
||||
"lastModified": 1731533236,
|
||||
"narHash": "sha256-l0KFg5HjrsfsO/JpG+r7fRrqm12kzFHyUHqHCVpMMbI=",
|
||||
"owner": "numtide",
|
||||
"repo": "flake-utils",
|
||||
"rev": "c1dfcf08411b08f6b8615f7d8971a2bfa81d5e8a",
|
||||
"rev": "11707dc2f618dd54ca8739b309ec4fc024de578b",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
@ -20,11 +20,11 @@
|
||||
},
|
||||
"nixpkgs": {
|
||||
"locked": {
|
||||
"lastModified": 1731139594,
|
||||
"narHash": "sha256-IigrKK3vYRpUu+HEjPL/phrfh7Ox881er1UEsZvw9Q4=",
|
||||
"lastModified": 1736798957,
|
||||
"narHash": "sha256-qwpCtZhSsSNQtK4xYGzMiyEDhkNzOCz/Vfu4oL2ETsQ=",
|
||||
"owner": "nixos",
|
||||
"repo": "nixpkgs",
|
||||
"rev": "76612b17c0ce71689921ca12d9ffdc9c23ce40b2",
|
||||
"rev": "9abb87b552b7f55ac8916b6fc9e5cb486656a2f3",
|
||||
"type": "github"
|
||||
},
|
||||
"original": {
|
||||
|
@ -29,9 +29,14 @@
|
||||
poetry
|
||||
|
||||
# backend
|
||||
go_1_23
|
||||
gofumpt
|
||||
sqlite
|
||||
];
|
||||
shellHook = ''
|
||||
export GO="${pkgs.go_1_23}/bin/go"
|
||||
export GOROOT="${pkgs.go_1_23}/share/go"
|
||||
'';
|
||||
};
|
||||
}
|
||||
);
|
||||
|
1
go.mod
1
go.mod
@ -78,7 +78,6 @@ require (
|
||||
github.com/jhillyerd/enmime v1.3.0
|
||||
github.com/json-iterator/go v1.1.12
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51
|
||||
github.com/keybase/go-crypto v0.0.0-20200123153347-de78d2cb44f4
|
||||
github.com/klauspost/compress v1.17.11
|
||||
github.com/klauspost/cpuid/v2 v2.2.8
|
||||
github.com/lib/pq v1.10.9
|
||||
|
2
go.sum
2
go.sum
@ -506,8 +506,6 @@ github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51 h1:Z9n2FFNU
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
||||
github.com/kevinburke/ssh_config v1.2.0 h1:x584FjTGwHzMwvHx18PXxbBVzfnxogHaAReU4gf13a4=
|
||||
github.com/kevinburke/ssh_config v1.2.0/go.mod h1:CT57kijsi8u/K/BOFA39wgDQJ9CxiF4nAY/ojJ6r6mM=
|
||||
github.com/keybase/go-crypto v0.0.0-20200123153347-de78d2cb44f4 h1:cTxwSmnaqLoo+4tLukHoB9iqHOu3LmLhRmgUxZo6Vp4=
|
||||
github.com/keybase/go-crypto v0.0.0-20200123153347-de78d2cb44f4/go.mod h1:ghbZscTyKdM07+Fw3KSi0hcJm+AlEUWj8QLlPtijN/M=
|
||||
github.com/klauspost/compress v1.4.1/go.mod h1:RyIbtBH6LamlWaDj8nUwkbUhJ87Yi3uG0guNDohfE1A=
|
||||
github.com/klauspost/compress v1.11.4/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=
|
||||
github.com/klauspost/compress v1.17.11 h1:In6xLpyWOi1+C7tXUUWv2ot1QvBjxevKAaI6IXrJmUc=
|
||||
|
@ -72,9 +72,9 @@ func (at ActionType) String() string {
|
||||
case ActionRenameRepo:
|
||||
return "rename_repo"
|
||||
case ActionStarRepo:
|
||||
return "star_repo"
|
||||
return "star_repo" // will not displayed in feeds.tmpl
|
||||
case ActionWatchRepo:
|
||||
return "watch_repo"
|
||||
return "watch_repo" // will not displayed in feeds.tmpl
|
||||
case ActionCommitRepo:
|
||||
return "commit_repo"
|
||||
case ActionCreateIssue:
|
||||
|
@ -13,8 +13,8 @@ import (
|
||||
user_model "code.gitea.io/gitea/models/user"
|
||||
"code.gitea.io/gitea/modules/timeutil"
|
||||
|
||||
"github.com/keybase/go-crypto/openpgp"
|
||||
"github.com/keybase/go-crypto/openpgp/packet"
|
||||
"github.com/ProtonMail/go-crypto/openpgp"
|
||||
"github.com/ProtonMail/go-crypto/openpgp/packet"
|
||||
"xorm.io/builder"
|
||||
)
|
||||
|
||||
@ -141,7 +141,11 @@ func parseGPGKey(ctx context.Context, ownerID int64, e *openpgp.Entity, verified
|
||||
// Parse Subkeys
|
||||
subkeys := make([]*GPGKey, len(e.Subkeys))
|
||||
for i, k := range e.Subkeys {
|
||||
subs, err := parseSubGPGKey(ownerID, pubkey.KeyIdString(), k.PublicKey, expiry)
|
||||
subkeyExpiry := expiry
|
||||
if k.Sig.KeyLifetimeSecs != nil {
|
||||
subkeyExpiry = k.PublicKey.CreationTime.Add(time.Duration(*k.Sig.KeyLifetimeSecs) * time.Second)
|
||||
}
|
||||
subs, err := parseSubGPGKey(ownerID, pubkey.KeyIdString(), k.PublicKey, subkeyExpiry)
|
||||
if err != nil {
|
||||
return nil, ErrGPGKeyParsing{ParseError: err}
|
||||
}
|
||||
@ -156,7 +160,7 @@ func parseGPGKey(ctx context.Context, ownerID int64, e *openpgp.Entity, verified
|
||||
|
||||
emails := make([]*user_model.EmailAddress, 0, len(e.Identities))
|
||||
for _, ident := range e.Identities {
|
||||
if ident.Revocation != nil {
|
||||
if ident.Revoked(time.Now()) {
|
||||
continue
|
||||
}
|
||||
email := strings.ToLower(strings.TrimSpace(ident.UserId.Email))
|
||||
|
@ -10,7 +10,7 @@ import (
|
||||
"code.gitea.io/gitea/models/db"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
|
||||
"github.com/keybase/go-crypto/openpgp"
|
||||
"github.com/ProtonMail/go-crypto/openpgp"
|
||||
)
|
||||
|
||||
// __________________ ________ ____ __.
|
||||
@ -83,12 +83,12 @@ func AddGPGKey(ctx context.Context, ownerID int64, content, token, signature str
|
||||
verified := false
|
||||
// Handle provided signature
|
||||
if signature != "" {
|
||||
signer, err := openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token), strings.NewReader(signature))
|
||||
signer, err := openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token), strings.NewReader(signature), nil)
|
||||
if err != nil {
|
||||
signer, err = openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token+"\n"), strings.NewReader(signature))
|
||||
signer, err = openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token+"\n"), strings.NewReader(signature), nil)
|
||||
}
|
||||
if err != nil {
|
||||
signer, err = openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token+"\r\n"), strings.NewReader(signature))
|
||||
signer, err = openpgp.CheckArmoredDetachedSignature(ekeys, strings.NewReader(token+"\r\n"), strings.NewReader(signature), nil)
|
||||
}
|
||||
if err != nil {
|
||||
log.Error("Unable to validate token signature. Error: %v", err)
|
||||
|
@ -16,7 +16,7 @@ import (
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
|
||||
"github.com/keybase/go-crypto/openpgp/packet"
|
||||
"github.com/ProtonMail/go-crypto/openpgp/packet"
|
||||
)
|
||||
|
||||
// __________________ ________ ____ __.
|
||||
|
@ -13,9 +13,9 @@ import (
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
"github.com/keybase/go-crypto/openpgp"
|
||||
"github.com/keybase/go-crypto/openpgp/armor"
|
||||
"github.com/keybase/go-crypto/openpgp/packet"
|
||||
"github.com/ProtonMail/go-crypto/openpgp"
|
||||
"github.com/ProtonMail/go-crypto/openpgp/armor"
|
||||
"github.com/ProtonMail/go-crypto/openpgp/packet"
|
||||
)
|
||||
|
||||
// __________________ ________ ____ __.
|
||||
@ -80,7 +80,7 @@ func base64DecPubKey(content string) (*packet.PublicKey, error) {
|
||||
return pkey, nil
|
||||
}
|
||||
|
||||
// getExpiryTime extract the expire time of primary key based on sig
|
||||
// getExpiryTime extract the expiry time of primary key based on sig
|
||||
func getExpiryTime(e *openpgp.Entity) time.Time {
|
||||
expiry := time.Time{}
|
||||
// Extract self-sign for expire date based on : https://github.com/golang/crypto/blob/master/openpgp/keys.go#L165
|
||||
@ -88,12 +88,12 @@ func getExpiryTime(e *openpgp.Entity) time.Time {
|
||||
for _, ident := range e.Identities {
|
||||
if selfSig == nil {
|
||||
selfSig = ident.SelfSignature
|
||||
} else if ident.SelfSignature.IsPrimaryId != nil && *ident.SelfSignature.IsPrimaryId {
|
||||
} else if ident.SelfSignature != nil && ident.SelfSignature.IsPrimaryId != nil && *ident.SelfSignature.IsPrimaryId {
|
||||
selfSig = ident.SelfSignature
|
||||
break
|
||||
}
|
||||
}
|
||||
if selfSig.KeyLifetimeSecs != nil {
|
||||
if selfSig != nil && selfSig.KeyLifetimeSecs != nil {
|
||||
expiry = e.PrimaryKey.CreationTime.Add(time.Duration(*selfSig.KeyLifetimeSecs) * time.Second)
|
||||
}
|
||||
return expiry
|
||||
|
@ -13,7 +13,8 @@ import (
|
||||
"code.gitea.io/gitea/modules/timeutil"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
"github.com/keybase/go-crypto/openpgp/packet"
|
||||
"github.com/ProtonMail/go-crypto/openpgp"
|
||||
"github.com/ProtonMail/go-crypto/openpgp/packet"
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
@ -403,3 +404,25 @@ func TestTryGetKeyIDFromSignature(t *testing.T) {
|
||||
IssuerFingerprint: []uint8{0xb, 0x23, 0x24, 0xc7, 0xe6, 0xfe, 0x4f, 0x3a, 0x6, 0x26, 0xc1, 0x21, 0x3, 0x8d, 0x1a, 0x3e, 0xad, 0xdb, 0xea, 0x9c},
|
||||
}))
|
||||
}
|
||||
|
||||
func TestParseGPGKey(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
assert.NoError(t, db.Insert(db.DefaultContext, &user_model.EmailAddress{UID: 1, Email: "email1@example.com", IsActivated: true}))
|
||||
|
||||
// create a key for test email
|
||||
e, err := openpgp.NewEntity("name", "comment", "email1@example.com", nil)
|
||||
require.NoError(t, err)
|
||||
k, err := parseGPGKey(db.DefaultContext, 1, e, true)
|
||||
require.NoError(t, err)
|
||||
assert.NotEmpty(t, k.KeyID)
|
||||
assert.NotEmpty(t, k.Emails) // the key is valid, matches the email
|
||||
|
||||
// then revoke the key
|
||||
for _, id := range e.Identities {
|
||||
id.Revocations = append(id.Revocations, &packet.Signature{RevocationReason: util.ToPointer(packet.KeyCompromised)})
|
||||
}
|
||||
k, err = parseGPGKey(db.DefaultContext, 1, e, true)
|
||||
require.NoError(t, err)
|
||||
assert.NotEmpty(t, k.KeyID)
|
||||
assert.Empty(t, k.Emails) // the key is revoked, matches no email
|
||||
}
|
||||
|
@ -7,23 +7,36 @@ import (
|
||||
"context"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/gtprof"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
|
||||
"xorm.io/xorm/contexts"
|
||||
)
|
||||
|
||||
type SlowQueryHook struct {
|
||||
type EngineHook struct {
|
||||
Threshold time.Duration
|
||||
Logger log.Logger
|
||||
}
|
||||
|
||||
var _ contexts.Hook = (*SlowQueryHook)(nil)
|
||||
var _ contexts.Hook = (*EngineHook)(nil)
|
||||
|
||||
func (*SlowQueryHook) BeforeProcess(c *contexts.ContextHook) (context.Context, error) {
|
||||
return c.Ctx, nil
|
||||
func (*EngineHook) BeforeProcess(c *contexts.ContextHook) (context.Context, error) {
|
||||
ctx, _ := gtprof.GetTracer().Start(c.Ctx, gtprof.TraceSpanDatabase)
|
||||
return ctx, nil
|
||||
}
|
||||
|
||||
func (h *SlowQueryHook) AfterProcess(c *contexts.ContextHook) error {
|
||||
func (h *EngineHook) AfterProcess(c *contexts.ContextHook) error {
|
||||
span := gtprof.GetContextSpan(c.Ctx)
|
||||
if span != nil {
|
||||
// Do not record SQL parameters here:
|
||||
// * It shouldn't expose the parameters because they contain sensitive information, end users need to report the trace details safely.
|
||||
// * Some parameters contain quite long texts, waste memory and are difficult to display.
|
||||
span.SetAttributeString(gtprof.TraceAttrDbSQL, c.SQL)
|
||||
span.End()
|
||||
} else {
|
||||
setting.PanicInDevOrTesting("span in database engine hook is nil")
|
||||
}
|
||||
if c.ExecuteTime >= h.Threshold {
|
||||
// 8 is the amount of skips passed to runtime.Caller, so that in the log the correct function
|
||||
// is being displayed (the function that ultimately wants to execute the query in the code)
|
||||
|
@ -72,7 +72,7 @@ func InitEngine(ctx context.Context) error {
|
||||
xe.SetDefaultContext(ctx)
|
||||
|
||||
if setting.Database.SlowQueryThreshold > 0 {
|
||||
xe.AddHook(&SlowQueryHook{
|
||||
xe.AddHook(&EngineHook{
|
||||
Threshold: setting.Database.SlowQueryThreshold,
|
||||
Logger: log.GetLogger("xorm"),
|
||||
})
|
||||
|
@ -11,8 +11,6 @@ import (
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
var ErrNameEmpty = util.SilentWrap{Message: "name is empty", Err: util.ErrInvalidArgument}
|
||||
|
||||
// ErrNameReserved represents a "reserved name" error.
|
||||
type ErrNameReserved struct {
|
||||
Name string
|
||||
@ -79,7 +77,7 @@ func (err ErrNameCharsNotAllowed) Unwrap() error {
|
||||
func IsUsableName(reservedNames, reservedPatterns []string, name string) error {
|
||||
name = strings.TrimSpace(strings.ToLower(name))
|
||||
if utf8.RuneCountInString(name) == 0 {
|
||||
return ErrNameEmpty
|
||||
return util.SilentWrap{Message: "name is empty", Err: util.ErrInvalidArgument}
|
||||
}
|
||||
|
||||
for i := range reservedNames {
|
||||
|
@ -1694,19 +1694,6 @@
|
||||
is_fsck_enabled: true
|
||||
close_issues_via_commit_in_any_branch: false
|
||||
|
||||
-
|
||||
id: 59
|
||||
owner_id: 2
|
||||
owner_name: user2
|
||||
lower_name: test_commit_revert
|
||||
name: test_commit_revert
|
||||
default_branch: main
|
||||
is_empty: false
|
||||
is_archived: false
|
||||
is_private: true
|
||||
status: 0
|
||||
num_issues: 0
|
||||
|
||||
-
|
||||
id: 60
|
||||
owner_id: 40
|
||||
|
@ -67,7 +67,7 @@
|
||||
num_followers: 2
|
||||
num_following: 1
|
||||
num_stars: 2
|
||||
num_repos: 15
|
||||
num_repos: 14
|
||||
num_teams: 0
|
||||
num_members: 0
|
||||
visibility: 0
|
||||
|
@ -167,6 +167,9 @@ func GetBranch(ctx context.Context, repoID int64, branchName string) (*Branch, e
|
||||
BranchName: branchName,
|
||||
}
|
||||
}
|
||||
// FIXME: this design is not right: it doesn't check `branch.IsDeleted`, it doesn't make sense to make callers to check IsDeleted again and again.
|
||||
// It causes inconsistency with `GetBranches` and `git.GetBranch`, and will lead to strange bugs
|
||||
// In the future, there should be 2 functions: `GetBranchExisting` and `GetBranchWithDeleted`
|
||||
return &branch, nil
|
||||
}
|
||||
|
||||
@ -440,6 +443,8 @@ type FindRecentlyPushedNewBranchesOptions struct {
|
||||
}
|
||||
|
||||
type RecentlyPushedNewBranch struct {
|
||||
BranchRepo *repo_model.Repository
|
||||
BranchName string
|
||||
BranchDisplayName string
|
||||
BranchLink string
|
||||
BranchCompareURL string
|
||||
@ -540,7 +545,9 @@ func FindRecentlyPushedNewBranches(ctx context.Context, doer *user_model.User, o
|
||||
branchDisplayName = fmt.Sprintf("%s:%s", branch.Repo.FullName(), branchDisplayName)
|
||||
}
|
||||
newBranches = append(newBranches, &RecentlyPushedNewBranch{
|
||||
BranchRepo: branch.Repo,
|
||||
BranchDisplayName: branchDisplayName,
|
||||
BranchName: branch.Name,
|
||||
BranchLink: fmt.Sprintf("%s/src/branch/%s", branch.Repo.Link(), util.PathEscapeSegments(branch.Name)),
|
||||
BranchCompareURL: branch.Repo.ComposeBranchCompareURL(opts.BaseRepo, branch.Name),
|
||||
CommitTime: branch.CommitTime,
|
||||
|
@ -107,7 +107,7 @@ func GetIssueStats(ctx context.Context, opts *IssuesOptions) (*IssueStats, error
|
||||
accum.YourRepositoriesCount += stats.YourRepositoriesCount
|
||||
accum.AssignCount += stats.AssignCount
|
||||
accum.CreateCount += stats.CreateCount
|
||||
accum.OpenCount += stats.MentionCount
|
||||
accum.MentionCount += stats.MentionCount
|
||||
accum.ReviewRequestedCount += stats.ReviewRequestedCount
|
||||
accum.ReviewedCount += stats.ReviewedCount
|
||||
i = chunk
|
||||
|
@ -7,6 +7,7 @@ import (
|
||||
"bytes"
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
repo_model "code.gitea.io/gitea/models/repo"
|
||||
@ -321,6 +322,11 @@ func valuesUser(m map[int64]*user_model.User) []*user_model.User {
|
||||
return values
|
||||
}
|
||||
|
||||
// newMigrationOriginalUser creates and returns a fake user for external user
|
||||
func newMigrationOriginalUser(name string) *user_model.User {
|
||||
return &user_model.User{ID: 0, Name: name, LowerName: strings.ToLower(name)}
|
||||
}
|
||||
|
||||
// LoadUsers loads reactions' all users
|
||||
func (list ReactionList) LoadUsers(ctx context.Context, repo *repo_model.Repository) ([]*user_model.User, error) {
|
||||
if len(list) == 0 {
|
||||
@ -338,7 +344,7 @@ func (list ReactionList) LoadUsers(ctx context.Context, repo *repo_model.Reposit
|
||||
|
||||
for _, reaction := range list {
|
||||
if reaction.OriginalAuthor != "" {
|
||||
reaction.User = user_model.NewReplaceUser(fmt.Sprintf("%s(%s)", reaction.OriginalAuthor, repo.OriginalServiceType.Name()))
|
||||
reaction.User = newMigrationOriginalUser(fmt.Sprintf("%s(%s)", reaction.OriginalAuthor, repo.OriginalServiceType.Name()))
|
||||
} else if user, ok := userMaps[reaction.UserID]; ok {
|
||||
reaction.User = user
|
||||
} else {
|
||||
|
@ -930,17 +930,19 @@ func MarkConversation(ctx context.Context, comment *Comment, doer *user_model.Us
|
||||
}
|
||||
|
||||
// CanMarkConversation Add or remove Conversation mark for a code comment permission check
|
||||
// the PR writer , offfcial reviewer and poster can do it
|
||||
// the PR writer , official reviewer and poster can do it
|
||||
func CanMarkConversation(ctx context.Context, issue *Issue, doer *user_model.User) (permResult bool, err error) {
|
||||
if doer == nil || issue == nil {
|
||||
return false, fmt.Errorf("issue or doer is nil")
|
||||
}
|
||||
|
||||
if err = issue.LoadRepo(ctx); err != nil {
|
||||
return false, err
|
||||
}
|
||||
if issue.Repo.IsArchived {
|
||||
return false, nil
|
||||
}
|
||||
if doer.ID != issue.PosterID {
|
||||
if err = issue.LoadRepo(ctx); err != nil {
|
||||
return false, err
|
||||
}
|
||||
|
||||
p, err := access_model.GetUserRepoPermission(ctx, issue.Repo, doer)
|
||||
if err != nil {
|
||||
return false, err
|
||||
|
@ -46,11 +46,6 @@ func (s Stopwatch) Seconds() int64 {
|
||||
return int64(timeutil.TimeStampNow() - s.CreatedUnix)
|
||||
}
|
||||
|
||||
// Duration returns a human-readable duration string based on local server time
|
||||
func (s Stopwatch) Duration() string {
|
||||
return util.SecToTime(s.Seconds())
|
||||
}
|
||||
|
||||
func getStopwatch(ctx context.Context, userID, issueID int64) (sw *Stopwatch, exists bool, err error) {
|
||||
sw = new(Stopwatch)
|
||||
exists, err = db.GetEngine(ctx).
|
||||
@ -201,7 +196,7 @@ func FinishIssueStopwatch(ctx context.Context, user *user_model.User, issue *Iss
|
||||
Doer: user,
|
||||
Issue: issue,
|
||||
Repo: issue.Repo,
|
||||
Content: util.SecToTime(timediff),
|
||||
Content: util.SecToHours(timediff),
|
||||
Type: CommentTypeStopTracking,
|
||||
TimeID: tt.ID,
|
||||
}); err != nil {
|
||||
|
103
models/organization/org_worktime.go
Normal file
103
models/organization/org_worktime.go
Normal file
@ -0,0 +1,103 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package organization
|
||||
|
||||
import (
|
||||
"sort"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
|
||||
"xorm.io/builder"
|
||||
)
|
||||
|
||||
type WorktimeSumByRepos struct {
|
||||
RepoName string
|
||||
SumTime int64
|
||||
}
|
||||
|
||||
func GetWorktimeByRepos(org *Organization, unitFrom, unixTo int64) (results []WorktimeSumByRepos, err error) {
|
||||
err = db.GetEngine(db.DefaultContext).
|
||||
Select("repository.name AS repo_name, SUM(tracked_time.time) AS sum_time").
|
||||
Table("tracked_time").
|
||||
Join("INNER", "issue", "tracked_time.issue_id = issue.id").
|
||||
Join("INNER", "repository", "issue.repo_id = repository.id").
|
||||
Where(builder.Eq{"repository.owner_id": org.ID}).
|
||||
And(builder.Eq{"tracked_time.deleted": false}).
|
||||
And(builder.Gte{"tracked_time.created_unix": unitFrom}).
|
||||
And(builder.Lte{"tracked_time.created_unix": unixTo}).
|
||||
GroupBy("repository.name").
|
||||
OrderBy("repository.name").
|
||||
Find(&results)
|
||||
return results, err
|
||||
}
|
||||
|
||||
type WorktimeSumByMilestones struct {
|
||||
RepoName string
|
||||
MilestoneName string
|
||||
MilestoneID int64
|
||||
MilestoneDeadline int64
|
||||
SumTime int64
|
||||
HideRepoName bool
|
||||
}
|
||||
|
||||
func GetWorktimeByMilestones(org *Organization, unitFrom, unixTo int64) (results []WorktimeSumByMilestones, err error) {
|
||||
err = db.GetEngine(db.DefaultContext).
|
||||
Select("repository.name AS repo_name, milestone.name AS milestone_name, milestone.id AS milestone_id, milestone.deadline_unix as milestone_deadline, SUM(tracked_time.time) AS sum_time").
|
||||
Table("tracked_time").
|
||||
Join("INNER", "issue", "tracked_time.issue_id = issue.id").
|
||||
Join("INNER", "repository", "issue.repo_id = repository.id").
|
||||
Join("LEFT", "milestone", "issue.milestone_id = milestone.id").
|
||||
Where(builder.Eq{"repository.owner_id": org.ID}).
|
||||
And(builder.Eq{"tracked_time.deleted": false}).
|
||||
And(builder.Gte{"tracked_time.created_unix": unitFrom}).
|
||||
And(builder.Lte{"tracked_time.created_unix": unixTo}).
|
||||
GroupBy("repository.name, milestone.name, milestone.deadline_unix, milestone.id").
|
||||
OrderBy("repository.name, milestone.deadline_unix, milestone.id").
|
||||
Find(&results)
|
||||
|
||||
// TODO: pgsql: NULL values are sorted last in default ascending order, so we need to sort them manually again.
|
||||
sort.Slice(results, func(i, j int) bool {
|
||||
if results[i].RepoName != results[j].RepoName {
|
||||
return results[i].RepoName < results[j].RepoName
|
||||
}
|
||||
if results[i].MilestoneDeadline != results[j].MilestoneDeadline {
|
||||
return results[i].MilestoneDeadline < results[j].MilestoneDeadline
|
||||
}
|
||||
return results[i].MilestoneID < results[j].MilestoneID
|
||||
})
|
||||
|
||||
// Show only the first RepoName, for nicer output.
|
||||
prevRepoName := ""
|
||||
for i := 0; i < len(results); i++ {
|
||||
res := &results[i]
|
||||
res.MilestoneDeadline = 0 // clear the deadline because we do not really need it
|
||||
if prevRepoName == res.RepoName {
|
||||
res.HideRepoName = true
|
||||
}
|
||||
prevRepoName = res.RepoName
|
||||
}
|
||||
return results, err
|
||||
}
|
||||
|
||||
type WorktimeSumByMembers struct {
|
||||
UserName string
|
||||
SumTime int64
|
||||
}
|
||||
|
||||
func GetWorktimeByMembers(org *Organization, unitFrom, unixTo int64) (results []WorktimeSumByMembers, err error) {
|
||||
err = db.GetEngine(db.DefaultContext).
|
||||
Select("`user`.name AS user_name, SUM(tracked_time.time) AS sum_time").
|
||||
Table("tracked_time").
|
||||
Join("INNER", "issue", "tracked_time.issue_id = issue.id").
|
||||
Join("INNER", "repository", "issue.repo_id = repository.id").
|
||||
Join("INNER", "`user`", "tracked_time.user_id = `user`.id").
|
||||
Where(builder.Eq{"repository.owner_id": org.ID}).
|
||||
And(builder.Eq{"tracked_time.deleted": false}).
|
||||
And(builder.Gte{"tracked_time.created_unix": unitFrom}).
|
||||
And(builder.Lte{"tracked_time.created_unix": unixTo}).
|
||||
GroupBy("`user`.name").
|
||||
OrderBy("sum_time DESC").
|
||||
Find(&results)
|
||||
return results, err
|
||||
}
|
@ -14,6 +14,7 @@ import (
|
||||
"regexp"
|
||||
"strconv"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
"code.gitea.io/gitea/models/unit"
|
||||
@ -61,20 +62,30 @@ func (err ErrRepoIsArchived) Error() string {
|
||||
return fmt.Sprintf("%s is archived", err.Repo.LogString())
|
||||
}
|
||||
|
||||
var (
|
||||
validRepoNamePattern = regexp.MustCompile(`[-.\w]+`)
|
||||
invalidRepoNamePattern = regexp.MustCompile(`[.]{2,}`)
|
||||
reservedRepoNames = []string{".", "..", "-"}
|
||||
reservedRepoPatterns = []string{"*.git", "*.wiki", "*.rss", "*.atom"}
|
||||
)
|
||||
type globalVarsStruct struct {
|
||||
validRepoNamePattern *regexp.Regexp
|
||||
invalidRepoNamePattern *regexp.Regexp
|
||||
reservedRepoNames []string
|
||||
reservedRepoPatterns []string
|
||||
}
|
||||
|
||||
var globalVars = sync.OnceValue(func() *globalVarsStruct {
|
||||
return &globalVarsStruct{
|
||||
validRepoNamePattern: regexp.MustCompile(`[-.\w]+`),
|
||||
invalidRepoNamePattern: regexp.MustCompile(`[.]{2,}`),
|
||||
reservedRepoNames: []string{".", "..", "-"},
|
||||
reservedRepoPatterns: []string{"*.git", "*.wiki", "*.rss", "*.atom"},
|
||||
}
|
||||
})
|
||||
|
||||
// IsUsableRepoName returns true when name is usable
|
||||
func IsUsableRepoName(name string) error {
|
||||
if !validRepoNamePattern.MatchString(name) || invalidRepoNamePattern.MatchString(name) {
|
||||
vars := globalVars()
|
||||
if !vars.validRepoNamePattern.MatchString(name) || vars.invalidRepoNamePattern.MatchString(name) {
|
||||
// Note: usually this error is normally caught up earlier in the UI
|
||||
return db.ErrNameCharsNotAllowed{Name: name}
|
||||
}
|
||||
return db.IsUsableName(reservedRepoNames, reservedRepoPatterns, name)
|
||||
return db.IsUsableName(vars.reservedRepoNames, vars.reservedRepoPatterns, name)
|
||||
}
|
||||
|
||||
// TrustModelType defines the types of trust model for this repository
|
||||
|
@ -219,4 +219,5 @@ func TestIsUsableRepoName(t *testing.T) {
|
||||
assert.Error(t, IsUsableRepoName("the..repo"))
|
||||
assert.Error(t, IsUsableRepoName("foo.wiki"))
|
||||
assert.Error(t, IsUsableRepoName("foo.git"))
|
||||
assert.Error(t, IsUsableRepoName("foo.RSS"))
|
||||
}
|
||||
|
@ -68,6 +68,7 @@ type RepoTransfer struct { //nolint
|
||||
RecipientID int64
|
||||
Recipient *user_model.User `xorm:"-"`
|
||||
RepoID int64
|
||||
Repo *Repository `xorm:"-"`
|
||||
TeamIDs []int64
|
||||
Teams []*organization.Team `xorm:"-"`
|
||||
|
||||
@ -79,48 +80,65 @@ func init() {
|
||||
db.RegisterModel(new(RepoTransfer))
|
||||
}
|
||||
|
||||
// LoadAttributes fetches the transfer recipient from the database
|
||||
func (r *RepoTransfer) LoadAttributes(ctx context.Context) error {
|
||||
func (r *RepoTransfer) LoadRecipient(ctx context.Context) error {
|
||||
if r.Recipient == nil {
|
||||
u, err := user_model.GetUserByID(ctx, r.RecipientID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r.Recipient = u
|
||||
}
|
||||
|
||||
if r.Recipient.IsOrganization() && len(r.TeamIDs) != len(r.Teams) {
|
||||
for _, v := range r.TeamIDs {
|
||||
team, err := organization.GetTeamByID(ctx, v)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
if team.OrgID != r.Recipient.ID {
|
||||
return fmt.Errorf("team %d belongs not to org %d", v, r.Recipient.ID)
|
||||
}
|
||||
func (r *RepoTransfer) LoadRepo(ctx context.Context) error {
|
||||
if r.Repo == nil {
|
||||
repo, err := GetRepositoryByID(ctx, r.RepoID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
r.Repo = repo
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// LoadAttributes fetches the transfer recipient from the database
|
||||
func (r *RepoTransfer) LoadAttributes(ctx context.Context) error {
|
||||
if err := r.LoadRecipient(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if r.Recipient.IsOrganization() && r.Teams == nil {
|
||||
teamsMap, err := organization.GetTeamsByIDs(ctx, r.TeamIDs)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
for _, team := range teamsMap {
|
||||
r.Teams = append(r.Teams, team)
|
||||
}
|
||||
}
|
||||
|
||||
if err := r.LoadRepo(ctx); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
if r.Doer == nil {
|
||||
u, err := user_model.GetUserByID(ctx, r.DoerID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
r.Doer = u
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// CanUserAcceptTransfer checks if the user has the rights to accept/decline a repo transfer.
|
||||
// CanUserAcceptOrRejectTransfer checks if the user has the rights to accept/decline a repo transfer.
|
||||
// For user, it checks if it's himself
|
||||
// For organizations, it checks if the user is able to create repos
|
||||
func (r *RepoTransfer) CanUserAcceptTransfer(ctx context.Context, u *user_model.User) bool {
|
||||
func (r *RepoTransfer) CanUserAcceptOrRejectTransfer(ctx context.Context, u *user_model.User) bool {
|
||||
if err := r.LoadAttributes(ctx); err != nil {
|
||||
log.Error("LoadAttributes: %v", err)
|
||||
return false
|
||||
@ -166,6 +184,10 @@ func GetPendingRepositoryTransfers(ctx context.Context, opts *PendingRepositoryT
|
||||
Find(&transfers)
|
||||
}
|
||||
|
||||
func IsRepositoryTransferExist(ctx context.Context, repoID int64) (bool, error) {
|
||||
return db.GetEngine(ctx).Where("repo_id = ?", repoID).Exist(new(RepoTransfer))
|
||||
}
|
||||
|
||||
// GetPendingRepositoryTransfer fetches the most recent and ongoing transfer
|
||||
// process for the repository
|
||||
func GetPendingRepositoryTransfer(ctx context.Context, repo *Repository) (*RepoTransfer, error) {
|
||||
@ -206,11 +228,26 @@ func CreatePendingRepositoryTransfer(ctx context.Context, doer, newOwner *user_m
|
||||
return err
|
||||
}
|
||||
|
||||
if _, err := user_model.GetUserByID(ctx, newOwner.ID); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Make sure repo is ready to transfer
|
||||
if err := TestRepositoryReadyForTransfer(repo.Status); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
exist, err := IsRepositoryTransferExist(ctx, repo.ID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
if exist {
|
||||
return ErrRepoTransferInProgress{
|
||||
Uname: repo.Owner.LowerName,
|
||||
Name: repo.Name,
|
||||
}
|
||||
}
|
||||
|
||||
repo.Status = RepositoryPendingTransfer
|
||||
if err := UpdateRepositoryCols(ctx, repo, "status"); err != nil {
|
||||
return err
|
||||
|
@ -45,8 +45,6 @@ func TestCreateRepositoryNotice(t *testing.T) {
|
||||
unittest.AssertExistsAndLoadBean(t, noticeBean)
|
||||
}
|
||||
|
||||
// TODO TestRemoveAllWithNotice
|
||||
|
||||
func TestCountNotices(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
assert.Equal(t, int64(3), system.CountNotices(db.DefaultContext))
|
||||
|
@ -38,27 +38,30 @@ func GenerateRandomAvatar(ctx context.Context, u *User) error {
|
||||
|
||||
u.Avatar = avatars.HashEmail(seed)
|
||||
|
||||
// Don't share the images so that we can delete them easily
|
||||
if err := storage.SaveFrom(storage.Avatars, u.CustomAvatarRelativePath(), func(w io.Writer) error {
|
||||
if err := png.Encode(w, img); err != nil {
|
||||
log.Error("Encode: %v", err)
|
||||
_, err = storage.Avatars.Stat(u.CustomAvatarRelativePath())
|
||||
if err != nil {
|
||||
// If unable to Stat the avatar file (usually it means non-existing), then try to save a new one
|
||||
// Don't share the images so that we can delete them easily
|
||||
if err := storage.SaveFrom(storage.Avatars, u.CustomAvatarRelativePath(), func(w io.Writer) error {
|
||||
if err := png.Encode(w, img); err != nil {
|
||||
log.Error("Encode: %v", err)
|
||||
}
|
||||
return nil
|
||||
}); err != nil {
|
||||
return fmt.Errorf("failed to save avatar %s: %w", u.CustomAvatarRelativePath(), err)
|
||||
}
|
||||
return err
|
||||
}); err != nil {
|
||||
return fmt.Errorf("Failed to create dir %s: %w", u.CustomAvatarRelativePath(), err)
|
||||
}
|
||||
|
||||
if _, err := db.GetEngine(ctx).ID(u.ID).Cols("avatar").Update(u); err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
log.Info("New random avatar created: %d", u.ID)
|
||||
return nil
|
||||
}
|
||||
|
||||
// AvatarLinkWithSize returns a link to the user's avatar with size. size <= 0 means default size
|
||||
func (u *User) AvatarLinkWithSize(ctx context.Context, size int) string {
|
||||
if u.IsGhost() {
|
||||
if u.IsGhost() || u.IsGiteaActions() {
|
||||
return avatars.DefaultAvatarLink()
|
||||
}
|
||||
|
||||
|
@ -4,13 +4,19 @@
|
||||
package user
|
||||
|
||||
import (
|
||||
"context"
|
||||
"io"
|
||||
"strings"
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
"code.gitea.io/gitea/models/unittest"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/storage"
|
||||
"code.gitea.io/gitea/modules/test"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestUserAvatarLink(t *testing.T) {
|
||||
@ -26,3 +32,37 @@ func TestUserAvatarLink(t *testing.T) {
|
||||
link = u.AvatarLink(db.DefaultContext)
|
||||
assert.Equal(t, "https://localhost/sub-path/avatars/avatar.png", link)
|
||||
}
|
||||
|
||||
func TestUserAvatarGenerate(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
var err error
|
||||
tmpDir := t.TempDir()
|
||||
storage.Avatars, err = storage.NewLocalStorage(context.Background(), &setting.Storage{Path: tmpDir})
|
||||
require.NoError(t, err)
|
||||
|
||||
u := unittest.AssertExistsAndLoadBean(t, &User{ID: 2})
|
||||
|
||||
// there was no avatar, generate a new one
|
||||
assert.Empty(t, u.Avatar)
|
||||
err = GenerateRandomAvatar(db.DefaultContext, u)
|
||||
require.NoError(t, err)
|
||||
assert.NotEmpty(t, u.Avatar)
|
||||
|
||||
// make sure the generated one exists
|
||||
oldAvatarPath := u.CustomAvatarRelativePath()
|
||||
_, err = storage.Avatars.Stat(u.CustomAvatarRelativePath())
|
||||
require.NoError(t, err)
|
||||
// and try to change its content
|
||||
_, err = storage.Avatars.Save(u.CustomAvatarRelativePath(), strings.NewReader("abcd"), 4)
|
||||
require.NoError(t, err)
|
||||
|
||||
// try to generate again
|
||||
err = GenerateRandomAvatar(db.DefaultContext, u)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, oldAvatarPath, u.CustomAvatarRelativePath())
|
||||
f, err := storage.Avatars.Open(u.CustomAvatarRelativePath())
|
||||
require.NoError(t, err)
|
||||
defer f.Close()
|
||||
content, _ := io.ReadAll(f)
|
||||
assert.Equal(t, "abcd", string(content))
|
||||
}
|
||||
|
@ -8,7 +8,6 @@ import (
|
||||
"context"
|
||||
"fmt"
|
||||
"net/mail"
|
||||
"regexp"
|
||||
"strings"
|
||||
"time"
|
||||
|
||||
@ -153,8 +152,6 @@ func UpdateEmailAddress(ctx context.Context, email *EmailAddress) error {
|
||||
return err
|
||||
}
|
||||
|
||||
var emailRegexp = regexp.MustCompile("^[a-zA-Z0-9.!#$%&'*+-/=?^_`{|}~]*@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$")
|
||||
|
||||
// ValidateEmail check if email is a valid & allowed address
|
||||
func ValidateEmail(email string) error {
|
||||
if err := validateEmailBasic(email); err != nil {
|
||||
@ -514,7 +511,7 @@ func validateEmailBasic(email string) error {
|
||||
return ErrEmailInvalid{email}
|
||||
}
|
||||
|
||||
if !emailRegexp.MatchString(email) {
|
||||
if !globalVars().emailRegexp.MatchString(email) {
|
||||
return ErrEmailCharIsNotSupported{email}
|
||||
}
|
||||
|
||||
@ -545,3 +542,13 @@ func IsEmailDomainAllowed(email string) bool {
|
||||
|
||||
return validation.IsEmailDomainListed(setting.Service.EmailDomainAllowList, email)
|
||||
}
|
||||
|
||||
func GetActivatedEmailAddresses(ctx context.Context, uid int64) ([]string, error) {
|
||||
emails := make([]string, 0, 2)
|
||||
if err := db.GetEngine(ctx).Table("email_address").Select("email").
|
||||
Where("uid=? AND is_activated=?", uid, true).Asc("id").
|
||||
Find(&emails); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
return emails, nil
|
||||
}
|
||||
|
@ -11,9 +11,6 @@ import (
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
// ErrOpenIDNotExist openid is not known
|
||||
var ErrOpenIDNotExist = util.NewNotExistErrorf("OpenID is unknown")
|
||||
|
||||
// UserOpenID is the list of all OpenID identities of a user.
|
||||
// Since this is a middle table, name it OpenID is not suitable, so we ignore the lint here
|
||||
type UserOpenID struct { //revive:disable-line:exported
|
||||
@ -99,7 +96,7 @@ func DeleteUserOpenID(ctx context.Context, openid *UserOpenID) (err error) {
|
||||
if err != nil {
|
||||
return err
|
||||
} else if deleted != 1 {
|
||||
return ErrOpenIDNotExist
|
||||
return util.NewNotExistErrorf("OpenID is unknown")
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
@ -14,6 +14,7 @@ import (
|
||||
"path/filepath"
|
||||
"regexp"
|
||||
"strings"
|
||||
"sync"
|
||||
"time"
|
||||
"unicode"
|
||||
|
||||
@ -213,7 +214,7 @@ func (u *User) GetPlaceholderEmail() string {
|
||||
return fmt.Sprintf("%s@%s", u.LowerName, setting.Service.NoReplyAddress)
|
||||
}
|
||||
|
||||
// GetEmail returns an noreply email, if the user has set to keep his
|
||||
// GetEmail returns a noreply email, if the user has set to keep his
|
||||
// email address private, otherwise the primary email address.
|
||||
func (u *User) GetEmail() string {
|
||||
if u.KeepEmailPrivate {
|
||||
@ -417,19 +418,9 @@ func (u *User) DisplayName() string {
|
||||
return u.Name
|
||||
}
|
||||
|
||||
var emailToReplacer = strings.NewReplacer(
|
||||
"\n", "",
|
||||
"\r", "",
|
||||
"<", "",
|
||||
">", "",
|
||||
",", "",
|
||||
":", "",
|
||||
";", "",
|
||||
)
|
||||
|
||||
// EmailTo returns a string suitable to be put into a e-mail `To:` header.
|
||||
func (u *User) EmailTo() string {
|
||||
sanitizedDisplayName := emailToReplacer.Replace(u.DisplayName())
|
||||
sanitizedDisplayName := globalVars().emailToReplacer.Replace(u.DisplayName())
|
||||
|
||||
// should be an edge case but nice to have
|
||||
if sanitizedDisplayName == u.Email {
|
||||
@ -502,10 +493,10 @@ func (u *User) IsMailable() bool {
|
||||
return u.IsActive
|
||||
}
|
||||
|
||||
// IsUserExist checks if given user name exist,
|
||||
// the user name should be noncased unique.
|
||||
// IsUserExist checks if given username exist,
|
||||
// the username should be non-cased unique.
|
||||
// If uid is presented, then check will rule out that one,
|
||||
// it is used when update a user name in settings page.
|
||||
// it is used when update a username in settings page.
|
||||
func IsUserExist(ctx context.Context, uid int64, name string) (bool, error) {
|
||||
if len(name) == 0 {
|
||||
return false, nil
|
||||
@ -515,7 +506,7 @@ func IsUserExist(ctx context.Context, uid int64, name string) (bool, error) {
|
||||
Get(&User{LowerName: strings.ToLower(name)})
|
||||
}
|
||||
|
||||
// Note: As of the beginning of 2022, it is recommended to use at least
|
||||
// SaltByteLength as of the beginning of 2022, it is recommended to use at least
|
||||
// 64 bits of salt, but NIST is already recommending to use to 128 bits.
|
||||
// (16 bytes = 16 * 8 = 128 bits)
|
||||
const SaltByteLength = 16
|
||||
@ -526,28 +517,58 @@ func GetUserSalt() (string, error) {
|
||||
if err != nil {
|
||||
return "", err
|
||||
}
|
||||
// Returns a 32 bytes long string.
|
||||
// Returns a 32-byte long string.
|
||||
return hex.EncodeToString(rBytes), nil
|
||||
}
|
||||
|
||||
// Note: The set of characters here can safely expand without a breaking change,
|
||||
// but characters removed from this set can cause user account linking to break
|
||||
var (
|
||||
customCharsReplacement = strings.NewReplacer("Æ", "AE")
|
||||
removeCharsRE = regexp.MustCompile("['`´]")
|
||||
transformDiacritics = transform.Chain(norm.NFD, runes.Remove(runes.In(unicode.Mn)), norm.NFC)
|
||||
replaceCharsHyphenRE = regexp.MustCompile(`[\s~+]`)
|
||||
)
|
||||
type globalVarsStruct struct {
|
||||
customCharsReplacement *strings.Replacer
|
||||
removeCharsRE *regexp.Regexp
|
||||
transformDiacritics transform.Transformer
|
||||
replaceCharsHyphenRE *regexp.Regexp
|
||||
emailToReplacer *strings.Replacer
|
||||
emailRegexp *regexp.Regexp
|
||||
systemUserNewFuncs map[int64]func() *User
|
||||
}
|
||||
|
||||
var globalVars = sync.OnceValue(func() *globalVarsStruct {
|
||||
return &globalVarsStruct{
|
||||
// Note: The set of characters here can safely expand without a breaking change,
|
||||
// but characters removed from this set can cause user account linking to break
|
||||
customCharsReplacement: strings.NewReplacer("Æ", "AE"),
|
||||
|
||||
removeCharsRE: regexp.MustCompile("['`´]"),
|
||||
transformDiacritics: transform.Chain(norm.NFD, runes.Remove(runes.In(unicode.Mn)), norm.NFC),
|
||||
replaceCharsHyphenRE: regexp.MustCompile(`[\s~+]`),
|
||||
|
||||
emailToReplacer: strings.NewReplacer(
|
||||
"\n", "",
|
||||
"\r", "",
|
||||
"<", "",
|
||||
">", "",
|
||||
",", "",
|
||||
":", "",
|
||||
";", "",
|
||||
),
|
||||
emailRegexp: regexp.MustCompile("^[a-zA-Z0-9.!#$%&'*+-/=?^_`{|}~]*@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\.[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)*$"),
|
||||
|
||||
systemUserNewFuncs: map[int64]func() *User{
|
||||
GhostUserID: NewGhostUser,
|
||||
ActionsUserID: NewActionsUser,
|
||||
},
|
||||
}
|
||||
})
|
||||
|
||||
// NormalizeUserName only takes the name part if it is an email address, transforms it diacritics to ASCII characters.
|
||||
// It returns a string with the single-quotes removed, and any other non-supported username characters are replaced with a `-` character
|
||||
func NormalizeUserName(s string) (string, error) {
|
||||
vars := globalVars()
|
||||
s, _, _ = strings.Cut(s, "@")
|
||||
strDiacriticsRemoved, n, err := transform.String(transformDiacritics, customCharsReplacement.Replace(s))
|
||||
strDiacriticsRemoved, n, err := transform.String(vars.transformDiacritics, vars.customCharsReplacement.Replace(s))
|
||||
if err != nil {
|
||||
return "", fmt.Errorf("failed to normalize the string of provided username %q at position %d", s, n)
|
||||
}
|
||||
return replaceCharsHyphenRE.ReplaceAllLiteralString(removeCharsRE.ReplaceAllLiteralString(strDiacriticsRemoved, ""), "-"), nil
|
||||
return vars.replaceCharsHyphenRE.ReplaceAllLiteralString(vars.removeCharsRE.ReplaceAllLiteralString(strDiacriticsRemoved, ""), "-"), nil
|
||||
}
|
||||
|
||||
var (
|
||||
@ -963,30 +984,28 @@ func GetUserByIDs(ctx context.Context, ids []int64) ([]*User, error) {
|
||||
return users, err
|
||||
}
|
||||
|
||||
// GetPossibleUserByID returns the user if id > 0 or return system usrs if id < 0
|
||||
// GetPossibleUserByID returns the user if id > 0 or returns system user if id < 0
|
||||
func GetPossibleUserByID(ctx context.Context, id int64) (*User, error) {
|
||||
switch id {
|
||||
case GhostUserID:
|
||||
return NewGhostUser(), nil
|
||||
case ActionsUserID:
|
||||
return NewActionsUser(), nil
|
||||
case 0:
|
||||
if id < 0 {
|
||||
if newFunc, ok := globalVars().systemUserNewFuncs[id]; ok {
|
||||
return newFunc(), nil
|
||||
}
|
||||
return nil, ErrUserNotExist{UID: id}
|
||||
} else if id == 0 {
|
||||
return nil, ErrUserNotExist{}
|
||||
default:
|
||||
return GetUserByID(ctx, id)
|
||||
}
|
||||
return GetUserByID(ctx, id)
|
||||
}
|
||||
|
||||
// GetPossibleUserByIDs returns the users if id > 0 or return system users if id < 0
|
||||
// GetPossibleUserByIDs returns the users if id > 0 or returns system users if id < 0
|
||||
func GetPossibleUserByIDs(ctx context.Context, ids []int64) ([]*User, error) {
|
||||
uniqueIDs := container.SetOf(ids...)
|
||||
users := make([]*User, 0, len(ids))
|
||||
_ = uniqueIDs.Remove(0)
|
||||
if uniqueIDs.Remove(GhostUserID) {
|
||||
users = append(users, NewGhostUser())
|
||||
}
|
||||
if uniqueIDs.Remove(ActionsUserID) {
|
||||
users = append(users, NewActionsUser())
|
||||
for systemUID, newFunc := range globalVars().systemUserNewFuncs {
|
||||
if uniqueIDs.Remove(systemUID) {
|
||||
users = append(users, newFunc())
|
||||
}
|
||||
}
|
||||
res, err := GetUserByIDs(ctx, uniqueIDs.Values())
|
||||
if err != nil {
|
||||
@ -996,7 +1015,7 @@ func GetPossibleUserByIDs(ctx context.Context, ids []int64) ([]*User, error) {
|
||||
return users, nil
|
||||
}
|
||||
|
||||
// GetUserByNameCtx returns user by given name.
|
||||
// GetUserByName returns user by given name.
|
||||
func GetUserByName(ctx context.Context, name string) (*User, error) {
|
||||
if len(name) == 0 {
|
||||
return nil, ErrUserNotExist{Name: name}
|
||||
@ -1027,8 +1046,8 @@ func GetUserEmailsByNames(ctx context.Context, names []string) []string {
|
||||
return mails
|
||||
}
|
||||
|
||||
// GetMaileableUsersByIDs gets users from ids, but only if they can receive mails
|
||||
func GetMaileableUsersByIDs(ctx context.Context, ids []int64, isMention bool) ([]*User, error) {
|
||||
// GetMailableUsersByIDs gets users from ids, but only if they can receive mails
|
||||
func GetMailableUsersByIDs(ctx context.Context, ids []int64, isMention bool) ([]*User, error) {
|
||||
if len(ids) == 0 {
|
||||
return nil, nil
|
||||
}
|
||||
@ -1053,17 +1072,6 @@ func GetMaileableUsersByIDs(ctx context.Context, ids []int64, isMention bool) ([
|
||||
Find(&ous)
|
||||
}
|
||||
|
||||
// GetUserNamesByIDs returns usernames for all resolved users from a list of Ids.
|
||||
func GetUserNamesByIDs(ctx context.Context, ids []int64) ([]string, error) {
|
||||
unames := make([]string, 0, len(ids))
|
||||
err := db.GetEngine(ctx).In("id", ids).
|
||||
Table("user").
|
||||
Asc("name").
|
||||
Cols("name").
|
||||
Find(&unames)
|
||||
return unames, err
|
||||
}
|
||||
|
||||
// GetUserNameByID returns username for the id
|
||||
func GetUserNameByID(ctx context.Context, id int64) (string, error) {
|
||||
var name string
|
||||
|
@ -10,9 +10,8 @@ import (
|
||||
)
|
||||
|
||||
const (
|
||||
GhostUserID = -1
|
||||
GhostUserName = "Ghost"
|
||||
GhostUserLowerName = "ghost"
|
||||
GhostUserID = -1
|
||||
GhostUserName = "Ghost"
|
||||
)
|
||||
|
||||
// NewGhostUser creates and returns a fake user for someone has deleted their account.
|
||||
@ -20,10 +19,14 @@ func NewGhostUser() *User {
|
||||
return &User{
|
||||
ID: GhostUserID,
|
||||
Name: GhostUserName,
|
||||
LowerName: GhostUserLowerName,
|
||||
LowerName: strings.ToLower(GhostUserName),
|
||||
}
|
||||
}
|
||||
|
||||
func IsGhostUserName(name string) bool {
|
||||
return strings.EqualFold(name, GhostUserName)
|
||||
}
|
||||
|
||||
// IsGhost check if user is fake user for a deleted account
|
||||
func (u *User) IsGhost() bool {
|
||||
if u == nil {
|
||||
@ -32,22 +35,16 @@ func (u *User) IsGhost() bool {
|
||||
return u.ID == GhostUserID && u.Name == GhostUserName
|
||||
}
|
||||
|
||||
// NewReplaceUser creates and returns a fake user for external user
|
||||
func NewReplaceUser(name string) *User {
|
||||
return &User{
|
||||
ID: 0,
|
||||
Name: name,
|
||||
LowerName: strings.ToLower(name),
|
||||
}
|
||||
}
|
||||
|
||||
const (
|
||||
ActionsUserID = -2
|
||||
ActionsUserName = "gitea-actions"
|
||||
ActionsFullName = "Gitea Actions"
|
||||
ActionsEmail = "teabot@gitea.io"
|
||||
ActionsUserID = -2
|
||||
ActionsUserName = "gitea-actions"
|
||||
ActionsUserEmail = "teabot@gitea.io"
|
||||
)
|
||||
|
||||
func IsGiteaActionsUserName(name string) bool {
|
||||
return strings.EqualFold(name, ActionsUserName)
|
||||
}
|
||||
|
||||
// NewActionsUser creates and returns a fake user for running the actions.
|
||||
func NewActionsUser() *User {
|
||||
return &User{
|
||||
@ -55,8 +52,8 @@ func NewActionsUser() *User {
|
||||
Name: ActionsUserName,
|
||||
LowerName: ActionsUserName,
|
||||
IsActive: true,
|
||||
FullName: ActionsFullName,
|
||||
Email: ActionsEmail,
|
||||
FullName: "Gitea Actions",
|
||||
Email: ActionsUserEmail,
|
||||
KeepEmailPrivate: true,
|
||||
LoginName: ActionsUserName,
|
||||
Type: UserTypeIndividual,
|
||||
@ -65,6 +62,16 @@ func NewActionsUser() *User {
|
||||
}
|
||||
}
|
||||
|
||||
func (u *User) IsActions() bool {
|
||||
func (u *User) IsGiteaActions() bool {
|
||||
return u != nil && u.ID == ActionsUserID
|
||||
}
|
||||
|
||||
func GetSystemUserByName(name string) *User {
|
||||
if IsGhostUserName(name) {
|
||||
return NewGhostUser()
|
||||
}
|
||||
if IsGiteaActionsUserName(name) {
|
||||
return NewActionsUser()
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
32
models/user/user_system_test.go
Normal file
32
models/user/user_system_test.go
Normal file
@ -0,0 +1,32 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package user
|
||||
|
||||
import (
|
||||
"testing"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
"github.com/stretchr/testify/require"
|
||||
)
|
||||
|
||||
func TestSystemUser(t *testing.T) {
|
||||
u, err := GetPossibleUserByID(db.DefaultContext, -1)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "Ghost", u.Name)
|
||||
assert.Equal(t, "ghost", u.LowerName)
|
||||
assert.True(t, u.IsGhost())
|
||||
assert.True(t, IsGhostUserName("gHost"))
|
||||
|
||||
u, err = GetPossibleUserByID(db.DefaultContext, -2)
|
||||
require.NoError(t, err)
|
||||
assert.Equal(t, "gitea-actions", u.Name)
|
||||
assert.Equal(t, "gitea-actions", u.LowerName)
|
||||
assert.True(t, u.IsGiteaActions())
|
||||
assert.True(t, IsGiteaActionsUserName("Gitea-actionS"))
|
||||
|
||||
_, err = GetPossibleUserByID(db.DefaultContext, -3)
|
||||
require.Error(t, err)
|
||||
}
|
@ -25,6 +25,21 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestIsUsableUsername(t *testing.T) {
|
||||
assert.NoError(t, user_model.IsUsableUsername("a"))
|
||||
assert.NoError(t, user_model.IsUsableUsername("foo.wiki"))
|
||||
assert.NoError(t, user_model.IsUsableUsername("foo.git"))
|
||||
|
||||
assert.Error(t, user_model.IsUsableUsername("a--b"))
|
||||
assert.Error(t, user_model.IsUsableUsername("-1_."))
|
||||
assert.Error(t, user_model.IsUsableUsername(".profile"))
|
||||
assert.Error(t, user_model.IsUsableUsername("-"))
|
||||
assert.Error(t, user_model.IsUsableUsername("🌞"))
|
||||
assert.Error(t, user_model.IsUsableUsername("the..repo"))
|
||||
assert.Error(t, user_model.IsUsableUsername("foo.RSS"))
|
||||
assert.Error(t, user_model.IsUsableUsername("foo.PnG"))
|
||||
}
|
||||
|
||||
func TestOAuth2Application_LoadUser(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
app := unittest.AssertExistsAndLoadBean(t, &auth.OAuth2Application{ID: 1})
|
||||
@ -318,14 +333,14 @@ func TestGetUserIDsByNames(t *testing.T) {
|
||||
func TestGetMaileableUsersByIDs(t *testing.T) {
|
||||
assert.NoError(t, unittest.PrepareTestDatabase())
|
||||
|
||||
results, err := user_model.GetMaileableUsersByIDs(db.DefaultContext, []int64{1, 4}, false)
|
||||
results, err := user_model.GetMailableUsersByIDs(db.DefaultContext, []int64{1, 4}, false)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, results, 1)
|
||||
if len(results) > 1 {
|
||||
assert.Equal(t, 1, results[0].ID)
|
||||
}
|
||||
|
||||
results, err = user_model.GetMaileableUsersByIDs(db.DefaultContext, []int64{1, 4}, true)
|
||||
results, err = user_model.GetMailableUsersByIDs(db.DefaultContext, []int64{1, 4}, true)
|
||||
assert.NoError(t, err)
|
||||
assert.Len(t, results, 2)
|
||||
if len(results) > 2 {
|
||||
|
@ -167,186 +167,39 @@ func (w *Webhook) UpdateEvent() error {
|
||||
return err
|
||||
}
|
||||
|
||||
// HasCreateEvent returns true if hook enabled create event.
|
||||
func (w *Webhook) HasCreateEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.Create)
|
||||
}
|
||||
|
||||
// HasDeleteEvent returns true if hook enabled delete event.
|
||||
func (w *Webhook) HasDeleteEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.Delete)
|
||||
}
|
||||
|
||||
// HasForkEvent returns true if hook enabled fork event.
|
||||
func (w *Webhook) HasForkEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.Fork)
|
||||
}
|
||||
|
||||
// HasIssuesEvent returns true if hook enabled issues event.
|
||||
func (w *Webhook) HasIssuesEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.Issues)
|
||||
}
|
||||
|
||||
// HasIssuesAssignEvent returns true if hook enabled issues assign event.
|
||||
func (w *Webhook) HasIssuesAssignEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.IssueAssign)
|
||||
}
|
||||
|
||||
// HasIssuesLabelEvent returns true if hook enabled issues label event.
|
||||
func (w *Webhook) HasIssuesLabelEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.IssueLabel)
|
||||
}
|
||||
|
||||
// HasIssuesMilestoneEvent returns true if hook enabled issues milestone event.
|
||||
func (w *Webhook) HasIssuesMilestoneEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.IssueMilestone)
|
||||
}
|
||||
|
||||
// HasIssueCommentEvent returns true if hook enabled issue_comment event.
|
||||
func (w *Webhook) HasIssueCommentEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.IssueComment)
|
||||
}
|
||||
|
||||
// HasPushEvent returns true if hook enabled push event.
|
||||
func (w *Webhook) HasPushEvent() bool {
|
||||
return w.PushOnly || w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.Push)
|
||||
}
|
||||
|
||||
// HasPullRequestEvent returns true if hook enabled pull request event.
|
||||
func (w *Webhook) HasPullRequestEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.PullRequest)
|
||||
}
|
||||
|
||||
// HasPullRequestAssignEvent returns true if hook enabled pull request assign event.
|
||||
func (w *Webhook) HasPullRequestAssignEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.PullRequestAssign)
|
||||
}
|
||||
|
||||
// HasPullRequestLabelEvent returns true if hook enabled pull request label event.
|
||||
func (w *Webhook) HasPullRequestLabelEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.PullRequestLabel)
|
||||
}
|
||||
|
||||
// HasPullRequestMilestoneEvent returns true if hook enabled pull request milestone event.
|
||||
func (w *Webhook) HasPullRequestMilestoneEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.PullRequestMilestone)
|
||||
}
|
||||
|
||||
// HasPullRequestCommentEvent returns true if hook enabled pull_request_comment event.
|
||||
func (w *Webhook) HasPullRequestCommentEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.PullRequestComment)
|
||||
}
|
||||
|
||||
// HasPullRequestApprovedEvent returns true if hook enabled pull request review event.
|
||||
func (w *Webhook) HasPullRequestApprovedEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.PullRequestReview)
|
||||
}
|
||||
|
||||
// HasPullRequestRejectedEvent returns true if hook enabled pull request review event.
|
||||
func (w *Webhook) HasPullRequestRejectedEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.PullRequestReview)
|
||||
}
|
||||
|
||||
// HasPullRequestReviewCommentEvent returns true if hook enabled pull request review event.
|
||||
func (w *Webhook) HasPullRequestReviewCommentEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.PullRequestReview)
|
||||
}
|
||||
|
||||
// HasPullRequestSyncEvent returns true if hook enabled pull request sync event.
|
||||
func (w *Webhook) HasPullRequestSyncEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.PullRequestSync)
|
||||
}
|
||||
|
||||
// HasWikiEvent returns true if hook enabled wiki event.
|
||||
func (w *Webhook) HasWikiEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvent.Wiki)
|
||||
}
|
||||
|
||||
// HasReleaseEvent returns if hook enabled release event.
|
||||
func (w *Webhook) HasReleaseEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.Release)
|
||||
}
|
||||
|
||||
// HasRepositoryEvent returns if hook enabled repository event.
|
||||
func (w *Webhook) HasRepositoryEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.Repository)
|
||||
}
|
||||
|
||||
// HasPackageEvent returns if hook enabled package event.
|
||||
func (w *Webhook) HasPackageEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.Package)
|
||||
}
|
||||
|
||||
// HasPullRequestReviewRequestEvent returns true if hook enabled pull request review request event.
|
||||
func (w *Webhook) HasPullRequestReviewRequestEvent() bool {
|
||||
return w.SendEverything ||
|
||||
(w.ChooseEvents && w.HookEvents.PullRequestReviewRequest)
|
||||
}
|
||||
|
||||
// EventCheckers returns event checkers
|
||||
func (w *Webhook) EventCheckers() []struct {
|
||||
Has func() bool
|
||||
Type webhook_module.HookEventType
|
||||
} {
|
||||
return []struct {
|
||||
Has func() bool
|
||||
Type webhook_module.HookEventType
|
||||
}{
|
||||
{w.HasCreateEvent, webhook_module.HookEventCreate},
|
||||
{w.HasDeleteEvent, webhook_module.HookEventDelete},
|
||||
{w.HasForkEvent, webhook_module.HookEventFork},
|
||||
{w.HasPushEvent, webhook_module.HookEventPush},
|
||||
{w.HasIssuesEvent, webhook_module.HookEventIssues},
|
||||
{w.HasIssuesAssignEvent, webhook_module.HookEventIssueAssign},
|
||||
{w.HasIssuesLabelEvent, webhook_module.HookEventIssueLabel},
|
||||
{w.HasIssuesMilestoneEvent, webhook_module.HookEventIssueMilestone},
|
||||
{w.HasIssueCommentEvent, webhook_module.HookEventIssueComment},
|
||||
{w.HasPullRequestEvent, webhook_module.HookEventPullRequest},
|
||||
{w.HasPullRequestAssignEvent, webhook_module.HookEventPullRequestAssign},
|
||||
{w.HasPullRequestLabelEvent, webhook_module.HookEventPullRequestLabel},
|
||||
{w.HasPullRequestMilestoneEvent, webhook_module.HookEventPullRequestMilestone},
|
||||
{w.HasPullRequestCommentEvent, webhook_module.HookEventPullRequestComment},
|
||||
{w.HasPullRequestApprovedEvent, webhook_module.HookEventPullRequestReviewApproved},
|
||||
{w.HasPullRequestRejectedEvent, webhook_module.HookEventPullRequestReviewRejected},
|
||||
{w.HasPullRequestCommentEvent, webhook_module.HookEventPullRequestReviewComment},
|
||||
{w.HasPullRequestSyncEvent, webhook_module.HookEventPullRequestSync},
|
||||
{w.HasWikiEvent, webhook_module.HookEventWiki},
|
||||
{w.HasRepositoryEvent, webhook_module.HookEventRepository},
|
||||
{w.HasReleaseEvent, webhook_module.HookEventRelease},
|
||||
{w.HasPackageEvent, webhook_module.HookEventPackage},
|
||||
{w.HasPullRequestReviewRequestEvent, webhook_module.HookEventPullRequestReviewRequest},
|
||||
func (w *Webhook) HasEvent(evt webhook_module.HookEventType) bool {
|
||||
if w.SendEverything {
|
||||
return true
|
||||
}
|
||||
if w.PushOnly {
|
||||
return evt == webhook_module.HookEventPush
|
||||
}
|
||||
checkEvt := evt
|
||||
switch evt {
|
||||
case webhook_module.HookEventPullRequestReviewApproved, webhook_module.HookEventPullRequestReviewRejected, webhook_module.HookEventPullRequestReviewComment:
|
||||
checkEvt = webhook_module.HookEventPullRequestReview
|
||||
}
|
||||
return w.HookEvents[checkEvt]
|
||||
}
|
||||
|
||||
// EventsArray returns an array of hook events
|
||||
func (w *Webhook) EventsArray() []string {
|
||||
events := make([]string, 0, 7)
|
||||
if w.SendEverything {
|
||||
events := make([]string, 0, len(webhook_module.AllEvents()))
|
||||
for _, evt := range webhook_module.AllEvents() {
|
||||
events = append(events, string(evt))
|
||||
}
|
||||
return events
|
||||
}
|
||||
|
||||
for _, c := range w.EventCheckers() {
|
||||
if c.Has() {
|
||||
events = append(events, string(c.Type))
|
||||
if w.PushOnly {
|
||||
return []string{string(webhook_module.HookEventPush)}
|
||||
}
|
||||
|
||||
events := make([]string, 0, len(w.HookEvents))
|
||||
for event, enabled := range w.HookEvents {
|
||||
if enabled {
|
||||
events = append(events, string(event))
|
||||
}
|
||||
}
|
||||
return events
|
||||
|
@ -54,9 +54,9 @@ func TestWebhook_UpdateEvent(t *testing.T) {
|
||||
SendEverything: false,
|
||||
ChooseEvents: false,
|
||||
HookEvents: webhook_module.HookEvents{
|
||||
Create: false,
|
||||
Push: true,
|
||||
PullRequest: false,
|
||||
webhook_module.HookEventCreate: false,
|
||||
webhook_module.HookEventPush: true,
|
||||
webhook_module.HookEventPullRequest: false,
|
||||
},
|
||||
}
|
||||
webhook.HookEvent = hookEvent
|
||||
@ -68,13 +68,13 @@ func TestWebhook_UpdateEvent(t *testing.T) {
|
||||
}
|
||||
|
||||
func TestWebhook_EventsArray(t *testing.T) {
|
||||
assert.Equal(t, []string{
|
||||
assert.EqualValues(t, []string{
|
||||
"create", "delete", "fork", "push",
|
||||
"issues", "issue_assign", "issue_label", "issue_milestone", "issue_comment",
|
||||
"pull_request", "pull_request_assign", "pull_request_label", "pull_request_milestone",
|
||||
"pull_request_comment", "pull_request_review_approved", "pull_request_review_rejected",
|
||||
"pull_request_review_comment", "pull_request_sync", "wiki", "repository", "release",
|
||||
"package", "pull_request_review_request",
|
||||
"pull_request_review_comment", "pull_request_sync", "pull_request_review_request", "wiki", "repository", "release",
|
||||
"package", "status",
|
||||
},
|
||||
(&Webhook{
|
||||
HookEvent: &webhook_module.HookEvent{SendEverything: true},
|
||||
|
@ -18,7 +18,6 @@ import (
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/git"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
|
||||
@ -64,10 +63,7 @@ func VerifyTimeLimitCode(now time.Time, data string, minutes int, code string) b
|
||||
// check code
|
||||
retCode := CreateTimeLimitCode(data, aliveTime, startTimeStr, nil)
|
||||
if subtle.ConstantTimeCompare([]byte(retCode), []byte(code)) != 1 {
|
||||
retCode = CreateTimeLimitCode(data, aliveTime, startTimeStr, sha1.New()) // TODO: this is only for the support of legacy codes, remove this in/after 1.23
|
||||
if subtle.ConstantTimeCompare([]byte(retCode), []byte(code)) != 1 {
|
||||
return false
|
||||
}
|
||||
return false
|
||||
}
|
||||
|
||||
// check time is expired or not: startTime <= now && now < startTime + minutes
|
||||
@ -144,13 +140,12 @@ func Int64sToStrings(ints []int64) []string {
|
||||
return strs
|
||||
}
|
||||
|
||||
// EntryIcon returns the octicon class for displaying files/directories
|
||||
// EntryIcon returns the octicon name for displaying files/directories
|
||||
func EntryIcon(entry *git.TreeEntry) string {
|
||||
switch {
|
||||
case entry.IsLink():
|
||||
te, err := entry.FollowLink()
|
||||
if err != nil {
|
||||
log.Debug(err.Error())
|
||||
return "file-symlink-file"
|
||||
}
|
||||
if te.IsDir() {
|
||||
|
@ -86,13 +86,10 @@ JWT_SECRET = %s
|
||||
verifyDataCode := func(c string) bool {
|
||||
return VerifyTimeLimitCode(now, "data", 2, c)
|
||||
}
|
||||
code1 := CreateTimeLimitCode("data", 2, now, sha1.New())
|
||||
code2 := CreateTimeLimitCode("data", 2, now, nil)
|
||||
assert.True(t, verifyDataCode(code1))
|
||||
assert.True(t, verifyDataCode(code2))
|
||||
code := CreateTimeLimitCode("data", 2, now, nil)
|
||||
assert.True(t, verifyDataCode(code))
|
||||
initGeneralSecret("000_QLUd4fYVyxetjxC4eZkrBgWM2SndOOWDNtgUUko")
|
||||
assert.False(t, verifyDataCode(code1))
|
||||
assert.False(t, verifyDataCode(code2))
|
||||
assert.False(t, verifyDataCode(code))
|
||||
})
|
||||
}
|
||||
|
||||
@ -137,5 +134,3 @@ func TestInt64sToStrings(t *testing.T) {
|
||||
Int64sToStrings([]int64{1, 4, 16, 64, 256}),
|
||||
)
|
||||
}
|
||||
|
||||
// TODO: Test EntryIcon
|
||||
|
@ -18,6 +18,7 @@ import (
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/git/internal" //nolint:depguard // only this file can use the internal type CmdArg, other files and packages should use AddXxx functions
|
||||
"code.gitea.io/gitea/modules/gtprof"
|
||||
"code.gitea.io/gitea/modules/log"
|
||||
"code.gitea.io/gitea/modules/process"
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
@ -54,7 +55,7 @@ func logArgSanitize(arg string) string {
|
||||
} else if filepath.IsAbs(arg) {
|
||||
base := filepath.Base(arg)
|
||||
dir := filepath.Dir(arg)
|
||||
return filepath.Join(filepath.Base(dir), base)
|
||||
return ".../" + filepath.Join(filepath.Base(dir), base)
|
||||
}
|
||||
return arg
|
||||
}
|
||||
@ -295,15 +296,20 @@ func (c *Command) run(skip int, opts *RunOpts) error {
|
||||
timeout = defaultCommandExecutionTimeout
|
||||
}
|
||||
|
||||
var desc string
|
||||
cmdLogString := c.LogString()
|
||||
callerInfo := util.CallerFuncName(1 /* util */ + 1 /* this */ + skip /* parent */)
|
||||
if pos := strings.LastIndex(callerInfo, "/"); pos >= 0 {
|
||||
callerInfo = callerInfo[pos+1:]
|
||||
}
|
||||
// these logs are for debugging purposes only, so no guarantee of correctness or stability
|
||||
desc = fmt.Sprintf("git.Run(by:%s, repo:%s): %s", callerInfo, logArgSanitize(opts.Dir), c.LogString())
|
||||
desc := fmt.Sprintf("git.Run(by:%s, repo:%s): %s", callerInfo, logArgSanitize(opts.Dir), cmdLogString)
|
||||
log.Debug("git.Command: %s", desc)
|
||||
|
||||
_, span := gtprof.GetTracer().Start(c.parentContext, gtprof.TraceSpanGitRun)
|
||||
defer span.End()
|
||||
span.SetAttributeString(gtprof.TraceAttrFuncCaller, callerInfo)
|
||||
span.SetAttributeString(gtprof.TraceAttrGitCommand, cmdLogString)
|
||||
|
||||
var ctx context.Context
|
||||
var cancel context.CancelFunc
|
||||
var finished context.CancelFunc
|
||||
|
@ -58,5 +58,5 @@ func TestCommandString(t *testing.T) {
|
||||
assert.EqualValues(t, cmd.prog+` a "-m msg" "it's a test" "say \"hello\""`, cmd.LogString())
|
||||
|
||||
cmd = NewCommandContextNoGlobals(context.Background(), "url: https://a:b@c/", "/root/dir-a/dir-b")
|
||||
assert.EqualValues(t, cmd.prog+` "url: https://sanitized-credential@c/" dir-a/dir-b`, cmd.LogString())
|
||||
assert.EqualValues(t, cmd.prog+` "url: https://sanitized-credential@c/" .../dir-a/dir-b`, cmd.LogString())
|
||||
}
|
||||
|
@ -46,9 +46,9 @@ func (sf *CommitSubmoduleFile) SubmoduleWebLink(ctx context.Context, optCommitID
|
||||
if len(optCommitID) == 2 {
|
||||
commitLink = sf.repoLink + "/compare/" + optCommitID[0] + "..." + optCommitID[1]
|
||||
} else if len(optCommitID) == 1 {
|
||||
commitLink = sf.repoLink + "/commit/" + optCommitID[0]
|
||||
commitLink = sf.repoLink + "/tree/" + optCommitID[0]
|
||||
} else {
|
||||
commitLink = sf.repoLink + "/commit/" + sf.refID
|
||||
commitLink = sf.repoLink + "/tree/" + sf.refID
|
||||
}
|
||||
return &SubmoduleWebLink{RepoWebLink: sf.repoLink, CommitWebLink: commitLink}
|
||||
}
|
||||
|
@ -15,11 +15,11 @@ func TestCommitSubmoduleLink(t *testing.T) {
|
||||
|
||||
wl := sf.SubmoduleWebLink(context.Background())
|
||||
assert.Equal(t, "https://github.com/user/repo", wl.RepoWebLink)
|
||||
assert.Equal(t, "https://github.com/user/repo/commit/aaaa", wl.CommitWebLink)
|
||||
assert.Equal(t, "https://github.com/user/repo/tree/aaaa", wl.CommitWebLink)
|
||||
|
||||
wl = sf.SubmoduleWebLink(context.Background(), "1111")
|
||||
assert.Equal(t, "https://github.com/user/repo", wl.RepoWebLink)
|
||||
assert.Equal(t, "https://github.com/user/repo/commit/1111", wl.CommitWebLink)
|
||||
assert.Equal(t, "https://github.com/user/repo/tree/1111", wl.CommitWebLink)
|
||||
|
||||
wl = sf.SubmoduleWebLink(context.Background(), "1111", "2222")
|
||||
assert.Equal(t, "https://github.com/user/repo", wl.RepoWebLink)
|
||||
|
@ -357,5 +357,5 @@ func Test_GetCommitBranchStart(t *testing.T) {
|
||||
startCommitID, err := repo.GetCommitBranchStart(os.Environ(), "branch1", commit.ID.String())
|
||||
assert.NoError(t, err)
|
||||
assert.NotEmpty(t, startCommitID)
|
||||
assert.EqualValues(t, "9c9aef8dd84e02bc7ec12641deb4c930a7c30185", startCommitID)
|
||||
assert.EqualValues(t, "95bb4d39648ee7e325106df01a621c530863a653", startCommitID)
|
||||
}
|
||||
|
@ -64,7 +64,10 @@ func GetRepoRawDiffForFile(repo *Repository, startCommit, endCommit string, diff
|
||||
} else if commit.ParentCount() == 0 {
|
||||
cmd.AddArguments("show").AddDynamicArguments(endCommit).AddDashesAndList(files...)
|
||||
} else {
|
||||
c, _ := commit.Parent(0)
|
||||
c, err := commit.Parent(0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
cmd.AddArguments("diff", "-M").AddDynamicArguments(c.ID.String(), endCommit).AddDashesAndList(files...)
|
||||
}
|
||||
case RawDiffPatch:
|
||||
@ -74,7 +77,10 @@ func GetRepoRawDiffForFile(repo *Repository, startCommit, endCommit string, diff
|
||||
} else if commit.ParentCount() == 0 {
|
||||
cmd.AddArguments("format-patch", "--no-signature", "--stdout", "--root").AddDynamicArguments(endCommit).AddDashesAndList(files...)
|
||||
} else {
|
||||
c, _ := commit.Parent(0)
|
||||
c, err := commit.Parent(0)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
query := fmt.Sprintf("%s...%s", endCommit, c.ID.String())
|
||||
cmd.AddArguments("format-patch", "--no-signature", "--stdout").AddDynamicArguments(query).AddDashesAndList(files...)
|
||||
}
|
||||
|
@ -57,7 +57,7 @@ func (repo *Repository) IsBranchExist(name string) bool {
|
||||
|
||||
// GetBranches returns branches from the repository, skipping "skip" initial branches and
|
||||
// returning at most "limit" branches, or all branches if "limit" is 0.
|
||||
// Branches are returned with sort of `-commiterdate` as the nogogit
|
||||
// Branches are returned with sort of `-committerdate` as the nogogit
|
||||
// implementation. This requires full fetch, sort and then the
|
||||
// skip/limit applies later as gogit returns in undefined order.
|
||||
func (repo *Repository) GetBranchNames(skip, limit int) ([]string, int, error) {
|
||||
|
@ -519,6 +519,7 @@ func (repo *Repository) AddLastCommitCache(cacheKey, fullName, sha string) error
|
||||
return nil
|
||||
}
|
||||
|
||||
// GetCommitBranchStart returns the commit where the branch diverged
|
||||
func (repo *Repository) GetCommitBranchStart(env []string, branch, endCommitID string) (string, error) {
|
||||
cmd := NewCommand(repo.Ctx, "log", prettyLogFormat)
|
||||
cmd.AddDynamicArguments(endCommitID)
|
||||
@ -533,7 +534,8 @@ func (repo *Repository) GetCommitBranchStart(env []string, branch, endCommitID s
|
||||
|
||||
parts := bytes.Split(bytes.TrimSpace(stdout), []byte{'\n'})
|
||||
|
||||
var startCommitID string
|
||||
// check the commits one by one until we find a commit contained by another branch
|
||||
// and we think this commit is the divergence point
|
||||
for _, commitID := range parts {
|
||||
branches, err := repo.getBranches(env, string(commitID), 2)
|
||||
if err != nil {
|
||||
@ -541,11 +543,9 @@ func (repo *Repository) GetCommitBranchStart(env []string, branch, endCommitID s
|
||||
}
|
||||
for _, b := range branches {
|
||||
if b != branch {
|
||||
return startCommitID, nil
|
||||
return string(commitID), nil
|
||||
}
|
||||
}
|
||||
|
||||
startCommitID = string(commitID)
|
||||
}
|
||||
|
||||
return "", nil
|
||||
|
32
modules/gtprof/event.go
Normal file
32
modules/gtprof/event.go
Normal file
@ -0,0 +1,32 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package gtprof
|
||||
|
||||
type EventConfig struct {
|
||||
attributes []*TraceAttribute
|
||||
}
|
||||
|
||||
type EventOption interface {
|
||||
applyEvent(*EventConfig)
|
||||
}
|
||||
|
||||
type applyEventFunc func(*EventConfig)
|
||||
|
||||
func (f applyEventFunc) applyEvent(cfg *EventConfig) {
|
||||
f(cfg)
|
||||
}
|
||||
|
||||
func WithAttributes(attrs ...*TraceAttribute) EventOption {
|
||||
return applyEventFunc(func(cfg *EventConfig) {
|
||||
cfg.attributes = append(cfg.attributes, attrs...)
|
||||
})
|
||||
}
|
||||
|
||||
func eventConfigFromOptions(options ...EventOption) *EventConfig {
|
||||
cfg := &EventConfig{}
|
||||
for _, opt := range options {
|
||||
opt.applyEvent(cfg)
|
||||
}
|
||||
return cfg
|
||||
}
|
175
modules/gtprof/trace.go
Normal file
175
modules/gtprof/trace.go
Normal file
@ -0,0 +1,175 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package gtprof
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/util"
|
||||
)
|
||||
|
||||
type contextKey struct {
|
||||
name string
|
||||
}
|
||||
|
||||
var contextKeySpan = &contextKey{"span"}
|
||||
|
||||
type traceStarter interface {
|
||||
start(ctx context.Context, traceSpan *TraceSpan, internalSpanIdx int) (context.Context, traceSpanInternal)
|
||||
}
|
||||
|
||||
type traceSpanInternal interface {
|
||||
addEvent(name string, cfg *EventConfig)
|
||||
recordError(err error, cfg *EventConfig)
|
||||
end()
|
||||
}
|
||||
|
||||
type TraceSpan struct {
|
||||
// immutable
|
||||
parent *TraceSpan
|
||||
internalSpans []traceSpanInternal
|
||||
internalContexts []context.Context
|
||||
|
||||
// mutable, must be protected by mutex
|
||||
mu sync.RWMutex
|
||||
name string
|
||||
statusCode uint32
|
||||
statusDesc string
|
||||
startTime time.Time
|
||||
endTime time.Time
|
||||
attributes []*TraceAttribute
|
||||
children []*TraceSpan
|
||||
}
|
||||
|
||||
type TraceAttribute struct {
|
||||
Key string
|
||||
Value TraceValue
|
||||
}
|
||||
|
||||
type TraceValue struct {
|
||||
v any
|
||||
}
|
||||
|
||||
func (t *TraceValue) AsString() string {
|
||||
return fmt.Sprint(t.v)
|
||||
}
|
||||
|
||||
func (t *TraceValue) AsInt64() int64 {
|
||||
v, _ := util.ToInt64(t.v)
|
||||
return v
|
||||
}
|
||||
|
||||
func (t *TraceValue) AsFloat64() float64 {
|
||||
v, _ := util.ToFloat64(t.v)
|
||||
return v
|
||||
}
|
||||
|
||||
var globalTraceStarters []traceStarter
|
||||
|
||||
type Tracer struct {
|
||||
starters []traceStarter
|
||||
}
|
||||
|
||||
func (s *TraceSpan) SetName(name string) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
s.name = name
|
||||
}
|
||||
|
||||
func (s *TraceSpan) SetStatus(code uint32, desc string) {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
s.statusCode, s.statusDesc = code, desc
|
||||
}
|
||||
|
||||
func (s *TraceSpan) AddEvent(name string, options ...EventOption) {
|
||||
cfg := eventConfigFromOptions(options...)
|
||||
for _, tsp := range s.internalSpans {
|
||||
tsp.addEvent(name, cfg)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *TraceSpan) RecordError(err error, options ...EventOption) {
|
||||
cfg := eventConfigFromOptions(options...)
|
||||
for _, tsp := range s.internalSpans {
|
||||
tsp.recordError(err, cfg)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *TraceSpan) SetAttributeString(key, value string) *TraceSpan {
|
||||
s.mu.Lock()
|
||||
defer s.mu.Unlock()
|
||||
|
||||
s.attributes = append(s.attributes, &TraceAttribute{Key: key, Value: TraceValue{v: value}})
|
||||
return s
|
||||
}
|
||||
|
||||
func (t *Tracer) Start(ctx context.Context, spanName string) (context.Context, *TraceSpan) {
|
||||
starters := t.starters
|
||||
if starters == nil {
|
||||
starters = globalTraceStarters
|
||||
}
|
||||
ts := &TraceSpan{name: spanName, startTime: time.Now()}
|
||||
parentSpan := GetContextSpan(ctx)
|
||||
if parentSpan != nil {
|
||||
parentSpan.mu.Lock()
|
||||
parentSpan.children = append(parentSpan.children, ts)
|
||||
parentSpan.mu.Unlock()
|
||||
ts.parent = parentSpan
|
||||
}
|
||||
|
||||
parentCtx := ctx
|
||||
for internalSpanIdx, tsp := range starters {
|
||||
var internalSpan traceSpanInternal
|
||||
if parentSpan != nil {
|
||||
parentCtx = parentSpan.internalContexts[internalSpanIdx]
|
||||
}
|
||||
ctx, internalSpan = tsp.start(parentCtx, ts, internalSpanIdx)
|
||||
ts.internalContexts = append(ts.internalContexts, ctx)
|
||||
ts.internalSpans = append(ts.internalSpans, internalSpan)
|
||||
}
|
||||
ctx = context.WithValue(ctx, contextKeySpan, ts)
|
||||
return ctx, ts
|
||||
}
|
||||
|
||||
type mutableContext interface {
|
||||
context.Context
|
||||
SetContextValue(key, value any)
|
||||
GetContextValue(key any) any
|
||||
}
|
||||
|
||||
// StartInContext starts a trace span in Gitea's mutable context (usually the web request context).
|
||||
// Due to the design limitation of Gitea's web framework, it can't use `context.WithValue` to bind a new span into a new context.
|
||||
// So here we use our "reqctx" framework to achieve the same result: web request context could always see the latest "span".
|
||||
func (t *Tracer) StartInContext(ctx mutableContext, spanName string) (*TraceSpan, func()) {
|
||||
curTraceSpan := GetContextSpan(ctx)
|
||||
_, newTraceSpan := GetTracer().Start(ctx, spanName)
|
||||
ctx.SetContextValue(contextKeySpan, newTraceSpan)
|
||||
return newTraceSpan, func() {
|
||||
newTraceSpan.End()
|
||||
ctx.SetContextValue(contextKeySpan, curTraceSpan)
|
||||
}
|
||||
}
|
||||
|
||||
func (s *TraceSpan) End() {
|
||||
s.mu.Lock()
|
||||
s.endTime = time.Now()
|
||||
s.mu.Unlock()
|
||||
|
||||
for _, tsp := range s.internalSpans {
|
||||
tsp.end()
|
||||
}
|
||||
}
|
||||
|
||||
func GetTracer() *Tracer {
|
||||
return &Tracer{}
|
||||
}
|
||||
|
||||
func GetContextSpan(ctx context.Context) *TraceSpan {
|
||||
ts, _ := ctx.Value(contextKeySpan).(*TraceSpan)
|
||||
return ts
|
||||
}
|
96
modules/gtprof/trace_builtin.go
Normal file
96
modules/gtprof/trace_builtin.go
Normal file
@ -0,0 +1,96 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package gtprof
|
||||
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
"sync/atomic"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/tailmsg"
|
||||
)
|
||||
|
||||
type traceBuiltinStarter struct{}
|
||||
|
||||
type traceBuiltinSpan struct {
|
||||
ts *TraceSpan
|
||||
|
||||
internalSpanIdx int
|
||||
}
|
||||
|
||||
func (t *traceBuiltinSpan) addEvent(name string, cfg *EventConfig) {
|
||||
// No-op because builtin tracer doesn't need it.
|
||||
// In the future we might use it to mark the time point between backend logic and network response.
|
||||
}
|
||||
|
||||
func (t *traceBuiltinSpan) recordError(err error, cfg *EventConfig) {
|
||||
// No-op because builtin tracer doesn't need it.
|
||||
// Actually Gitea doesn't handle err this way in most cases
|
||||
}
|
||||
|
||||
func (t *traceBuiltinSpan) toString(out *strings.Builder, indent int) {
|
||||
t.ts.mu.RLock()
|
||||
defer t.ts.mu.RUnlock()
|
||||
|
||||
out.WriteString(strings.Repeat(" ", indent))
|
||||
out.WriteString(t.ts.name)
|
||||
if t.ts.endTime.IsZero() {
|
||||
out.WriteString(" duration: (not ended)")
|
||||
} else {
|
||||
out.WriteString(fmt.Sprintf(" duration=%.4fs", t.ts.endTime.Sub(t.ts.startTime).Seconds()))
|
||||
}
|
||||
for _, a := range t.ts.attributes {
|
||||
out.WriteString(" ")
|
||||
out.WriteString(a.Key)
|
||||
out.WriteString("=")
|
||||
value := a.Value.AsString()
|
||||
if strings.ContainsAny(value, " \t\r\n") {
|
||||
quoted := false
|
||||
for _, c := range "\"'`" {
|
||||
if quoted = !strings.Contains(value, string(c)); quoted {
|
||||
value = string(c) + value + string(c)
|
||||
break
|
||||
}
|
||||
}
|
||||
if !quoted {
|
||||
value = fmt.Sprintf("%q", value)
|
||||
}
|
||||
}
|
||||
out.WriteString(value)
|
||||
}
|
||||
out.WriteString("\n")
|
||||
for _, c := range t.ts.children {
|
||||
span := c.internalSpans[t.internalSpanIdx].(*traceBuiltinSpan)
|
||||
span.toString(out, indent+2)
|
||||
}
|
||||
}
|
||||
|
||||
func (t *traceBuiltinSpan) end() {
|
||||
if t.ts.parent == nil {
|
||||
// TODO: debug purpose only
|
||||
// TODO: it should distinguish between http response network lag and actual processing time
|
||||
threshold := time.Duration(traceBuiltinThreshold.Load())
|
||||
if threshold != 0 && t.ts.endTime.Sub(t.ts.startTime) > threshold {
|
||||
sb := &strings.Builder{}
|
||||
t.toString(sb, 0)
|
||||
tailmsg.GetManager().GetTraceRecorder().Record(sb.String())
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (t *traceBuiltinStarter) start(ctx context.Context, traceSpan *TraceSpan, internalSpanIdx int) (context.Context, traceSpanInternal) {
|
||||
return ctx, &traceBuiltinSpan{ts: traceSpan, internalSpanIdx: internalSpanIdx}
|
||||
}
|
||||
|
||||
func init() {
|
||||
globalTraceStarters = append(globalTraceStarters, &traceBuiltinStarter{})
|
||||
}
|
||||
|
||||
var traceBuiltinThreshold atomic.Int64
|
||||
|
||||
func EnableBuiltinTracer(threshold time.Duration) {
|
||||
traceBuiltinThreshold.Store(int64(threshold))
|
||||
}
|
19
modules/gtprof/trace_const.go
Normal file
19
modules/gtprof/trace_const.go
Normal file
@ -0,0 +1,19 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package gtprof
|
||||
|
||||
// Some interesting names could be found in https://github.com/open-telemetry/opentelemetry-go/tree/main/semconv
|
||||
|
||||
const (
|
||||
TraceSpanHTTP = "http"
|
||||
TraceSpanGitRun = "git-run"
|
||||
TraceSpanDatabase = "database"
|
||||
)
|
||||
|
||||
const (
|
||||
TraceAttrFuncCaller = "func.caller"
|
||||
TraceAttrDbSQL = "db.sql"
|
||||
TraceAttrGitCommand = "git.command"
|
||||
TraceAttrHTTPRoute = "http.route"
|
||||
)
|
93
modules/gtprof/trace_test.go
Normal file
93
modules/gtprof/trace_test.go
Normal file
@ -0,0 +1,93 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package gtprof
|
||||
|
||||
import (
|
||||
"context"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
// "vendor span" is a simple demo for a span from a vendor library
|
||||
|
||||
var vendorContextKey any = "vendorContextKey"
|
||||
|
||||
type vendorSpan struct {
|
||||
name string
|
||||
children []*vendorSpan
|
||||
}
|
||||
|
||||
func vendorTraceStart(ctx context.Context, name string) (context.Context, *vendorSpan) {
|
||||
span := &vendorSpan{name: name}
|
||||
parentSpan, ok := ctx.Value(vendorContextKey).(*vendorSpan)
|
||||
if ok {
|
||||
parentSpan.children = append(parentSpan.children, span)
|
||||
}
|
||||
ctx = context.WithValue(ctx, vendorContextKey, span)
|
||||
return ctx, span
|
||||
}
|
||||
|
||||
// below "testTrace*" integrate the vendor span into our trace system
|
||||
|
||||
type testTraceSpan struct {
|
||||
vendorSpan *vendorSpan
|
||||
}
|
||||
|
||||
func (t *testTraceSpan) addEvent(name string, cfg *EventConfig) {}
|
||||
|
||||
func (t *testTraceSpan) recordError(err error, cfg *EventConfig) {}
|
||||
|
||||
func (t *testTraceSpan) end() {}
|
||||
|
||||
type testTraceStarter struct{}
|
||||
|
||||
func (t *testTraceStarter) start(ctx context.Context, traceSpan *TraceSpan, internalSpanIdx int) (context.Context, traceSpanInternal) {
|
||||
ctx, span := vendorTraceStart(ctx, traceSpan.name)
|
||||
return ctx, &testTraceSpan{span}
|
||||
}
|
||||
|
||||
func TestTraceStarter(t *testing.T) {
|
||||
globalTraceStarters = []traceStarter{&testTraceStarter{}}
|
||||
|
||||
ctx := context.Background()
|
||||
ctx, span := GetTracer().Start(ctx, "root")
|
||||
defer span.End()
|
||||
|
||||
func(ctx context.Context) {
|
||||
ctx, span := GetTracer().Start(ctx, "span1")
|
||||
defer span.End()
|
||||
func(ctx context.Context) {
|
||||
_, span := GetTracer().Start(ctx, "spanA")
|
||||
defer span.End()
|
||||
}(ctx)
|
||||
func(ctx context.Context) {
|
||||
_, span := GetTracer().Start(ctx, "spanB")
|
||||
defer span.End()
|
||||
}(ctx)
|
||||
}(ctx)
|
||||
|
||||
func(ctx context.Context) {
|
||||
_, span := GetTracer().Start(ctx, "span2")
|
||||
defer span.End()
|
||||
}(ctx)
|
||||
|
||||
var spanFullNames []string
|
||||
var collectSpanNames func(parentFullName string, s *vendorSpan)
|
||||
collectSpanNames = func(parentFullName string, s *vendorSpan) {
|
||||
fullName := parentFullName + "/" + s.name
|
||||
spanFullNames = append(spanFullNames, fullName)
|
||||
for _, c := range s.children {
|
||||
collectSpanNames(fullName, c)
|
||||
}
|
||||
}
|
||||
collectSpanNames("", span.internalSpans[0].(*testTraceSpan).vendorSpan)
|
||||
assert.Equal(t, []string{
|
||||
"/root",
|
||||
"/root/span1",
|
||||
"/root/span1/spanA",
|
||||
"/root/span1/spanB",
|
||||
"/root/span2",
|
||||
}, spanFullNames)
|
||||
}
|
@ -99,10 +99,10 @@ func (r *Request) Param(key, value string) *Request {
|
||||
return r
|
||||
}
|
||||
|
||||
// Body adds request raw body.
|
||||
// it supports string and []byte.
|
||||
// Body adds request raw body. It supports string, []byte and io.Reader as body.
|
||||
func (r *Request) Body(data any) *Request {
|
||||
switch t := data.(type) {
|
||||
case nil: // do nothing
|
||||
case string:
|
||||
bf := bytes.NewBufferString(t)
|
||||
r.req.Body = io.NopCloser(bf)
|
||||
@ -111,6 +111,12 @@ func (r *Request) Body(data any) *Request {
|
||||
bf := bytes.NewBuffer(t)
|
||||
r.req.Body = io.NopCloser(bf)
|
||||
r.req.ContentLength = int64(len(t))
|
||||
case io.ReadCloser:
|
||||
r.req.Body = t
|
||||
case io.Reader:
|
||||
r.req.Body = io.NopCloser(t)
|
||||
default:
|
||||
panic(fmt.Sprintf("unsupported request body type %T", t))
|
||||
}
|
||||
return r
|
||||
}
|
||||
@ -141,7 +147,7 @@ func (r *Request) getResponse() (*http.Response, error) {
|
||||
}
|
||||
} else if r.req.Method == "POST" && r.req.Body == nil && len(paramBody) > 0 {
|
||||
r.Header("Content-Type", "application/x-www-form-urlencoded")
|
||||
r.Body(paramBody)
|
||||
r.Body(paramBody) // string
|
||||
}
|
||||
|
||||
var err error
|
||||
@ -185,6 +191,7 @@ func (r *Request) getResponse() (*http.Response, error) {
|
||||
}
|
||||
|
||||
// Response executes request client gets response manually.
|
||||
// Caller MUST close the response body if no error occurs
|
||||
func (r *Request) Response() (*http.Response, error) {
|
||||
return r.getResponse()
|
||||
}
|
||||
|
@ -72,10 +72,14 @@ func (c *HTTPClient) batch(ctx context.Context, operation string, objects []Poin
|
||||
|
||||
url := fmt.Sprintf("%s/objects/batch", c.endpoint)
|
||||
|
||||
// Original: In some lfs server implementations, they require the ref attribute. #32838
|
||||
// `ref` is an "optional object describing the server ref that the objects belong to"
|
||||
// but some (incorrect) lfs servers require it, so maybe adding an empty ref here doesn't break the correct ones.
|
||||
// but some (incorrect) lfs servers like aliyun require it, so maybe adding an empty ref here doesn't break the correct ones.
|
||||
// https://github.com/git-lfs/git-lfs/blob/a32a02b44bf8a511aa14f047627c49e1a7fd5021/docs/api/batch.md?plain=1#L37
|
||||
request := &BatchRequest{operation, c.transferNames(), &Reference{}, objects}
|
||||
//
|
||||
// UPDATE: it can't use "empty ref" here because it breaks others like https://github.com/go-gitea/gitea/issues/33453
|
||||
request := &BatchRequest{operation, c.transferNames(), nil, objects}
|
||||
|
||||
payload := new(bytes.Buffer)
|
||||
err := json.NewEncoder(payload).Encode(request)
|
||||
if err != nil {
|
||||
|
@ -4,7 +4,6 @@
|
||||
package backend
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"context"
|
||||
"encoding/base64"
|
||||
"fmt"
|
||||
@ -29,7 +28,7 @@ var Capabilities = []string{
|
||||
"locking",
|
||||
}
|
||||
|
||||
var _ transfer.Backend = &GiteaBackend{}
|
||||
var _ transfer.Backend = (*GiteaBackend)(nil)
|
||||
|
||||
// GiteaBackend is an adapter between git-lfs-transfer library and Gitea's internal LFS API
|
||||
type GiteaBackend struct {
|
||||
@ -78,17 +77,17 @@ func (g *GiteaBackend) Batch(_ string, pointers []transfer.BatchItem, args trans
|
||||
headerAccept: mimeGitLFS,
|
||||
headerContentType: mimeGitLFS,
|
||||
}
|
||||
req := newInternalRequest(g.ctx, url, http.MethodPost, headers, bodyBytes)
|
||||
req := newInternalRequestLFS(g.ctx, url, http.MethodPost, headers, bodyBytes)
|
||||
resp, err := req.Response()
|
||||
if err != nil {
|
||||
g.logger.Log("http request error", err)
|
||||
return nil, err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
g.logger.Log("http statuscode error", resp.StatusCode, statusCodeToErr(resp.StatusCode))
|
||||
return nil, statusCodeToErr(resp.StatusCode)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
respBytes, err := io.ReadAll(resp.Body)
|
||||
if err != nil {
|
||||
g.logger.Log("http read error", err)
|
||||
@ -158,8 +157,7 @@ func (g *GiteaBackend) Batch(_ string, pointers []transfer.BatchItem, args trans
|
||||
return pointers, nil
|
||||
}
|
||||
|
||||
// Download implements transfer.Backend. The returned reader must be closed by the
|
||||
// caller.
|
||||
// Download implements transfer.Backend. The returned reader must be closed by the caller.
|
||||
func (g *GiteaBackend) Download(oid string, args transfer.Args) (io.ReadCloser, int64, error) {
|
||||
idMapStr, exists := args[argID]
|
||||
if !exists {
|
||||
@ -187,25 +185,25 @@ func (g *GiteaBackend) Download(oid string, args transfer.Args) (io.ReadCloser,
|
||||
headerGiteaInternalAuth: g.internalAuth,
|
||||
headerAccept: mimeOctetStream,
|
||||
}
|
||||
req := newInternalRequest(g.ctx, url, http.MethodGet, headers, nil)
|
||||
req := newInternalRequestLFS(g.ctx, url, http.MethodGet, headers, nil)
|
||||
resp, err := req.Response()
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
return nil, 0, fmt.Errorf("failed to get response: %w", err)
|
||||
}
|
||||
// no need to close the body here by "defer resp.Body.Close()", see below
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return nil, 0, statusCodeToErr(resp.StatusCode)
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
respBytes, err := io.ReadAll(resp.Body)
|
||||
|
||||
respSize, err := strconv.ParseInt(resp.Header.Get("X-Gitea-LFS-Content-Length"), 10, 64)
|
||||
if err != nil {
|
||||
return nil, 0, err
|
||||
return nil, 0, fmt.Errorf("failed to parse content length: %w", err)
|
||||
}
|
||||
respSize := int64(len(respBytes))
|
||||
respBuf := io.NopCloser(bytes.NewBuffer(respBytes))
|
||||
return respBuf, respSize, nil
|
||||
// transfer.Backend will check io.Closer interface and close this Body reader
|
||||
return resp.Body, respSize, nil
|
||||
}
|
||||
|
||||
// StartUpload implements transfer.Backend.
|
||||
// Upload implements transfer.Backend.
|
||||
func (g *GiteaBackend) Upload(oid string, size int64, r io.Reader, args transfer.Args) error {
|
||||
idMapStr, exists := args[argID]
|
||||
if !exists {
|
||||
@ -234,15 +232,14 @@ func (g *GiteaBackend) Upload(oid string, size int64, r io.Reader, args transfer
|
||||
headerContentType: mimeOctetStream,
|
||||
headerContentLength: strconv.FormatInt(size, 10),
|
||||
}
|
||||
reqBytes, err := io.ReadAll(r)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
req := newInternalRequest(g.ctx, url, http.MethodPut, headers, reqBytes)
|
||||
|
||||
req := newInternalRequestLFS(g.ctx, url, http.MethodPut, headers, nil)
|
||||
req.Body(r)
|
||||
resp, err := req.Response()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return statusCodeToErr(resp.StatusCode)
|
||||
}
|
||||
@ -284,11 +281,12 @@ func (g *GiteaBackend) Verify(oid string, size int64, args transfer.Args) (trans
|
||||
headerAccept: mimeGitLFS,
|
||||
headerContentType: mimeGitLFS,
|
||||
}
|
||||
req := newInternalRequest(g.ctx, url, http.MethodPost, headers, bodyBytes)
|
||||
req := newInternalRequestLFS(g.ctx, url, http.MethodPost, headers, bodyBytes)
|
||||
resp, err := req.Response()
|
||||
if err != nil {
|
||||
return transfer.NewStatus(transfer.StatusInternalServerError), err
|
||||
}
|
||||
defer resp.Body.Close()
|
||||
if resp.StatusCode != http.StatusOK {
|
||||
return transfer.NewStatus(uint32(resp.StatusCode), http.StatusText(resp.StatusCode)), statusCodeToErr(resp.StatusCode)
|
||||
}
|
||||
|
@ -50,7 +50,7 @@ func (g *giteaLockBackend) Create(path, refname string) (transfer.Lock, error) {
|
||||
headerAccept: mimeGitLFS,
|
||||
headerContentType: mimeGitLFS,
|
||||
}
|
||||
req := newInternalRequest(g.ctx, url, http.MethodPost, headers, bodyBytes)
|
||||
req := newInternalRequestLFS(g.ctx, url, http.MethodPost, headers, bodyBytes)
|
||||
resp, err := req.Response()
|
||||
if err != nil {
|
||||
g.logger.Log("http request error", err)
|
||||
@ -102,7 +102,7 @@ func (g *giteaLockBackend) Unlock(lock transfer.Lock) error {
|
||||
headerAccept: mimeGitLFS,
|
||||
headerContentType: mimeGitLFS,
|
||||
}
|
||||
req := newInternalRequest(g.ctx, url, http.MethodPost, headers, bodyBytes)
|
||||
req := newInternalRequestLFS(g.ctx, url, http.MethodPost, headers, bodyBytes)
|
||||
resp, err := req.Response()
|
||||
if err != nil {
|
||||
g.logger.Log("http request error", err)
|
||||
@ -185,7 +185,7 @@ func (g *giteaLockBackend) queryLocks(v url.Values) ([]transfer.Lock, string, er
|
||||
headerAccept: mimeGitLFS,
|
||||
headerContentType: mimeGitLFS,
|
||||
}
|
||||
req := newInternalRequest(g.ctx, url, http.MethodGet, headers, nil)
|
||||
req := newInternalRequestLFS(g.ctx, url, http.MethodGet, headers, nil)
|
||||
resp, err := req.Response()
|
||||
if err != nil {
|
||||
g.logger.Log("http request error", err)
|
||||
|
@ -5,15 +5,12 @@ package backend
|
||||
|
||||
import (
|
||||
"context"
|
||||
"crypto/tls"
|
||||
"fmt"
|
||||
"net"
|
||||
"io"
|
||||
"net/http"
|
||||
"time"
|
||||
|
||||
"code.gitea.io/gitea/modules/httplib"
|
||||
"code.gitea.io/gitea/modules/proxyprotocol"
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
"code.gitea.io/gitea/modules/private"
|
||||
|
||||
"github.com/charmbracelet/git-lfs-transfer/transfer"
|
||||
)
|
||||
@ -89,53 +86,19 @@ func statusCodeToErr(code int) error {
|
||||
}
|
||||
}
|
||||
|
||||
func newInternalRequest(ctx context.Context, url, method string, headers map[string]string, body []byte) *httplib.Request {
|
||||
req := httplib.NewRequest(url, method).
|
||||
SetContext(ctx).
|
||||
SetTimeout(10*time.Second, 60*time.Second).
|
||||
SetTLSClientConfig(&tls.Config{
|
||||
InsecureSkipVerify: true,
|
||||
})
|
||||
|
||||
if setting.Protocol == setting.HTTPUnix {
|
||||
req.SetTransport(&http.Transport{
|
||||
DialContext: func(ctx context.Context, _, _ string) (net.Conn, error) {
|
||||
var d net.Dialer
|
||||
conn, err := d.DialContext(ctx, "unix", setting.HTTPAddr)
|
||||
if err != nil {
|
||||
return conn, err
|
||||
}
|
||||
if setting.LocalUseProxyProtocol {
|
||||
if err = proxyprotocol.WriteLocalHeader(conn); err != nil {
|
||||
_ = conn.Close()
|
||||
return nil, err
|
||||
}
|
||||
}
|
||||
return conn, err
|
||||
},
|
||||
})
|
||||
} else if setting.LocalUseProxyProtocol {
|
||||
req.SetTransport(&http.Transport{
|
||||
DialContext: func(ctx context.Context, network, address string) (net.Conn, error) {
|
||||
var d net.Dialer
|
||||
conn, err := d.DialContext(ctx, network, address)
|
||||
if err != nil {
|
||||
return conn, err
|
||||
}
|
||||
if err = proxyprotocol.WriteLocalHeader(conn); err != nil {
|
||||
_ = conn.Close()
|
||||
return nil, err
|
||||
}
|
||||
return conn, err
|
||||
},
|
||||
})
|
||||
}
|
||||
|
||||
func newInternalRequestLFS(ctx context.Context, url, method string, headers map[string]string, body any) *httplib.Request {
|
||||
req := private.NewInternalRequest(ctx, url, method)
|
||||
for k, v := range headers {
|
||||
req.Header(k, v)
|
||||
}
|
||||
|
||||
req.Body(body)
|
||||
|
||||
switch body := body.(type) {
|
||||
case nil: // do nothing
|
||||
case []byte:
|
||||
req.Body(body) // []byte
|
||||
case io.Reader:
|
||||
req.Body(body) // io.Reader or io.ReadCloser
|
||||
default:
|
||||
panic(fmt.Sprintf("unsupported request body type %T", body))
|
||||
}
|
||||
return req
|
||||
}
|
||||
|
@ -47,7 +47,7 @@ var globalVars = sync.OnceValue(func() *globalVarsType {
|
||||
// NOTE: All below regex matching do not perform any extra validation.
|
||||
// Thus a link is produced even if the linked entity does not exist.
|
||||
// While fast, this is also incorrect and lead to false positives.
|
||||
// TODO: fix invalid linking issue
|
||||
// TODO: fix invalid linking issue (update: stale TODO, what issues? maybe no TODO anymore)
|
||||
|
||||
// valid chars in encoded path and parameter: [-+~_%.a-zA-Z0-9/]
|
||||
|
||||
|
@ -17,7 +17,7 @@ type GenerateTokenRequest struct {
|
||||
func GenerateActionsRunnerToken(ctx context.Context, scope string) (*ResponseText, ResponseExtra) {
|
||||
reqURL := setting.LocalURL + "api/internal/actions/generate_actions_runner_token"
|
||||
|
||||
req := newInternalRequest(ctx, reqURL, "POST", GenerateTokenRequest{
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST", GenerateTokenRequest{
|
||||
Scope: scope,
|
||||
})
|
||||
|
||||
|
@ -85,7 +85,7 @@ type HookProcReceiveRefResult struct {
|
||||
// HookPreReceive check whether the provided commits are allowed
|
||||
func HookPreReceive(ctx context.Context, ownerName, repoName string, opts HookOptions) ResponseExtra {
|
||||
reqURL := setting.LocalURL + fmt.Sprintf("api/internal/hook/pre-receive/%s/%s", url.PathEscape(ownerName), url.PathEscape(repoName))
|
||||
req := newInternalRequest(ctx, reqURL, "POST", opts)
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST", opts)
|
||||
req.SetReadWriteTimeout(time.Duration(60+len(opts.OldCommitIDs)) * time.Second)
|
||||
_, extra := requestJSONResp(req, &ResponseText{})
|
||||
return extra
|
||||
@ -94,7 +94,7 @@ func HookPreReceive(ctx context.Context, ownerName, repoName string, opts HookOp
|
||||
// HookPostReceive updates services and users
|
||||
func HookPostReceive(ctx context.Context, ownerName, repoName string, opts HookOptions) (*HookPostReceiveResult, ResponseExtra) {
|
||||
reqURL := setting.LocalURL + fmt.Sprintf("api/internal/hook/post-receive/%s/%s", url.PathEscape(ownerName), url.PathEscape(repoName))
|
||||
req := newInternalRequest(ctx, reqURL, "POST", opts)
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST", opts)
|
||||
req.SetReadWriteTimeout(time.Duration(60+len(opts.OldCommitIDs)) * time.Second)
|
||||
return requestJSONResp(req, &HookPostReceiveResult{})
|
||||
}
|
||||
@ -103,7 +103,7 @@ func HookPostReceive(ctx context.Context, ownerName, repoName string, opts HookO
|
||||
func HookProcReceive(ctx context.Context, ownerName, repoName string, opts HookOptions) (*HookProcReceiveResult, ResponseExtra) {
|
||||
reqURL := setting.LocalURL + fmt.Sprintf("api/internal/hook/proc-receive/%s/%s", url.PathEscape(ownerName), url.PathEscape(repoName))
|
||||
|
||||
req := newInternalRequest(ctx, reqURL, "POST", opts)
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST", opts)
|
||||
req.SetReadWriteTimeout(time.Duration(60+len(opts.OldCommitIDs)) * time.Second)
|
||||
return requestJSONResp(req, &HookProcReceiveResult{})
|
||||
}
|
||||
@ -115,7 +115,7 @@ func SetDefaultBranch(ctx context.Context, ownerName, repoName, branch string) R
|
||||
url.PathEscape(repoName),
|
||||
url.PathEscape(branch),
|
||||
)
|
||||
req := newInternalRequest(ctx, reqURL, "POST")
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST")
|
||||
_, extra := requestJSONResp(req, &ResponseText{})
|
||||
return extra
|
||||
}
|
||||
@ -123,7 +123,7 @@ func SetDefaultBranch(ctx context.Context, ownerName, repoName, branch string) R
|
||||
// SSHLog sends ssh error log response
|
||||
func SSHLog(ctx context.Context, isErr bool, msg string) error {
|
||||
reqURL := setting.LocalURL + "api/internal/ssh/log"
|
||||
req := newInternalRequest(ctx, reqURL, "POST", &SSHLogOption{IsError: isErr, Message: msg})
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST", &SSHLogOption{IsError: isErr, Message: msg})
|
||||
_, extra := requestJSONResp(req, &ResponseText{})
|
||||
return extra.Error
|
||||
}
|
||||
|
@ -34,7 +34,7 @@ func getClientIP() string {
|
||||
return strings.Fields(sshConnEnv)[0]
|
||||
}
|
||||
|
||||
func newInternalRequest(ctx context.Context, url, method string, body ...any) *httplib.Request {
|
||||
func NewInternalRequest(ctx context.Context, url, method string) *httplib.Request {
|
||||
if setting.InternalToken == "" {
|
||||
log.Fatal(`The INTERNAL_TOKEN setting is missing from the configuration file: %q.
|
||||
Ensure you are running in the correct environment or set the correct configuration file with -c.`, setting.CustomConf)
|
||||
@ -82,13 +82,17 @@ Ensure you are running in the correct environment or set the correct configurati
|
||||
},
|
||||
})
|
||||
}
|
||||
return req
|
||||
}
|
||||
|
||||
func newInternalRequestAPI(ctx context.Context, url, method string, body ...any) *httplib.Request {
|
||||
req := NewInternalRequest(ctx, url, method)
|
||||
if len(body) == 1 {
|
||||
req.Header("Content-Type", "application/json")
|
||||
jsonBytes, _ := json.Marshal(body[0])
|
||||
req.Body(jsonBytes)
|
||||
} else if len(body) > 1 {
|
||||
log.Fatal("Too many arguments for newInternalRequest")
|
||||
log.Fatal("Too many arguments for newInternalRequestAPI")
|
||||
}
|
||||
|
||||
req.SetTimeout(10*time.Second, 60*time.Second)
|
||||
|
@ -14,7 +14,7 @@ import (
|
||||
func UpdatePublicKeyInRepo(ctx context.Context, keyID, repoID int64) error {
|
||||
// Ask for running deliver hook and test pull request tasks.
|
||||
reqURL := setting.LocalURL + fmt.Sprintf("api/internal/ssh/%d/update/%d", keyID, repoID)
|
||||
req := newInternalRequest(ctx, reqURL, "POST")
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST")
|
||||
_, extra := requestJSONResp(req, &ResponseText{})
|
||||
return extra.Error
|
||||
}
|
||||
@ -24,7 +24,7 @@ func UpdatePublicKeyInRepo(ctx context.Context, keyID, repoID int64) error {
|
||||
func AuthorizedPublicKeyByContent(ctx context.Context, content string) (*ResponseText, ResponseExtra) {
|
||||
// Ask for running deliver hook and test pull request tasks.
|
||||
reqURL := setting.LocalURL + "api/internal/ssh/authorized_keys"
|
||||
req := newInternalRequest(ctx, reqURL, "POST")
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST")
|
||||
req.Param("content", content)
|
||||
return requestJSONResp(req, &ResponseText{})
|
||||
}
|
||||
|
@ -23,7 +23,7 @@ type Email struct {
|
||||
func SendEmail(ctx context.Context, subject, message string, to []string) (*ResponseText, ResponseExtra) {
|
||||
reqURL := setting.LocalURL + "api/internal/mail/send"
|
||||
|
||||
req := newInternalRequest(ctx, reqURL, "POST", Email{
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST", Email{
|
||||
Subject: subject,
|
||||
Message: message,
|
||||
To: to,
|
||||
|
@ -18,21 +18,21 @@ import (
|
||||
// Shutdown calls the internal shutdown function
|
||||
func Shutdown(ctx context.Context) ResponseExtra {
|
||||
reqURL := setting.LocalURL + "api/internal/manager/shutdown"
|
||||
req := newInternalRequest(ctx, reqURL, "POST")
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST")
|
||||
return requestJSONClientMsg(req, "Shutting down")
|
||||
}
|
||||
|
||||
// Restart calls the internal restart function
|
||||
func Restart(ctx context.Context) ResponseExtra {
|
||||
reqURL := setting.LocalURL + "api/internal/manager/restart"
|
||||
req := newInternalRequest(ctx, reqURL, "POST")
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST")
|
||||
return requestJSONClientMsg(req, "Restarting")
|
||||
}
|
||||
|
||||
// ReloadTemplates calls the internal reload-templates function
|
||||
func ReloadTemplates(ctx context.Context) ResponseExtra {
|
||||
reqURL := setting.LocalURL + "api/internal/manager/reload-templates"
|
||||
req := newInternalRequest(ctx, reqURL, "POST")
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST")
|
||||
return requestJSONClientMsg(req, "Reloaded")
|
||||
}
|
||||
|
||||
@ -45,7 +45,7 @@ type FlushOptions struct {
|
||||
// FlushQueues calls the internal flush-queues function
|
||||
func FlushQueues(ctx context.Context, timeout time.Duration, nonBlocking bool) ResponseExtra {
|
||||
reqURL := setting.LocalURL + "api/internal/manager/flush-queues"
|
||||
req := newInternalRequest(ctx, reqURL, "POST", FlushOptions{Timeout: timeout, NonBlocking: nonBlocking})
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST", FlushOptions{Timeout: timeout, NonBlocking: nonBlocking})
|
||||
if timeout > 0 {
|
||||
req.SetReadWriteTimeout(timeout + 10*time.Second)
|
||||
}
|
||||
@ -55,28 +55,28 @@ func FlushQueues(ctx context.Context, timeout time.Duration, nonBlocking bool) R
|
||||
// PauseLogging pauses logging
|
||||
func PauseLogging(ctx context.Context) ResponseExtra {
|
||||
reqURL := setting.LocalURL + "api/internal/manager/pause-logging"
|
||||
req := newInternalRequest(ctx, reqURL, "POST")
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST")
|
||||
return requestJSONClientMsg(req, "Logging Paused")
|
||||
}
|
||||
|
||||
// ResumeLogging resumes logging
|
||||
func ResumeLogging(ctx context.Context) ResponseExtra {
|
||||
reqURL := setting.LocalURL + "api/internal/manager/resume-logging"
|
||||
req := newInternalRequest(ctx, reqURL, "POST")
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST")
|
||||
return requestJSONClientMsg(req, "Logging Restarted")
|
||||
}
|
||||
|
||||
// ReleaseReopenLogging releases and reopens logging files
|
||||
func ReleaseReopenLogging(ctx context.Context) ResponseExtra {
|
||||
reqURL := setting.LocalURL + "api/internal/manager/release-and-reopen-logging"
|
||||
req := newInternalRequest(ctx, reqURL, "POST")
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST")
|
||||
return requestJSONClientMsg(req, "Logging Restarted")
|
||||
}
|
||||
|
||||
// SetLogSQL sets database logging
|
||||
func SetLogSQL(ctx context.Context, on bool) ResponseExtra {
|
||||
reqURL := setting.LocalURL + "api/internal/manager/set-log-sql?on=" + strconv.FormatBool(on)
|
||||
req := newInternalRequest(ctx, reqURL, "POST")
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST")
|
||||
return requestJSONClientMsg(req, "Log SQL setting set")
|
||||
}
|
||||
|
||||
@ -91,7 +91,7 @@ type LoggerOptions struct {
|
||||
// AddLogger adds a logger
|
||||
func AddLogger(ctx context.Context, logger, writer, mode string, config map[string]any) ResponseExtra {
|
||||
reqURL := setting.LocalURL + "api/internal/manager/add-logger"
|
||||
req := newInternalRequest(ctx, reqURL, "POST", LoggerOptions{
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST", LoggerOptions{
|
||||
Logger: logger,
|
||||
Writer: writer,
|
||||
Mode: mode,
|
||||
@ -103,7 +103,7 @@ func AddLogger(ctx context.Context, logger, writer, mode string, config map[stri
|
||||
// RemoveLogger removes a logger
|
||||
func RemoveLogger(ctx context.Context, logger, writer string) ResponseExtra {
|
||||
reqURL := setting.LocalURL + fmt.Sprintf("api/internal/manager/remove-logger/%s/%s", url.PathEscape(logger), url.PathEscape(writer))
|
||||
req := newInternalRequest(ctx, reqURL, "POST")
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST")
|
||||
return requestJSONClientMsg(req, "Removed")
|
||||
}
|
||||
|
||||
@ -111,7 +111,7 @@ func RemoveLogger(ctx context.Context, logger, writer string) ResponseExtra {
|
||||
func Processes(ctx context.Context, out io.Writer, flat, noSystem, stacktraces, json bool, cancel string) ResponseExtra {
|
||||
reqURL := setting.LocalURL + fmt.Sprintf("api/internal/manager/processes?flat=%t&no-system=%t&stacktraces=%t&json=%t&cancel-pid=%s", flat, noSystem, stacktraces, json, url.QueryEscape(cancel))
|
||||
|
||||
req := newInternalRequest(ctx, reqURL, "GET")
|
||||
req := newInternalRequestAPI(ctx, reqURL, "GET")
|
||||
callback := func(resp *http.Response, extra *ResponseExtra) {
|
||||
_, extra.Error = io.Copy(out, resp.Body)
|
||||
}
|
||||
|
@ -24,7 +24,7 @@ type RestoreParams struct {
|
||||
func RestoreRepo(ctx context.Context, repoDir, ownerName, repoName string, units []string, validation bool) ResponseExtra {
|
||||
reqURL := setting.LocalURL + "api/internal/restore_repo"
|
||||
|
||||
req := newInternalRequest(ctx, reqURL, "POST", RestoreParams{
|
||||
req := newInternalRequestAPI(ctx, reqURL, "POST", RestoreParams{
|
||||
RepoDir: repoDir,
|
||||
OwnerName: ownerName,
|
||||
RepoName: repoName,
|
||||
|
@ -23,7 +23,7 @@ type KeyAndOwner struct {
|
||||
// ServNoCommand returns information about the provided key
|
||||
func ServNoCommand(ctx context.Context, keyID int64) (*asymkey_model.PublicKey, *user_model.User, error) {
|
||||
reqURL := setting.LocalURL + fmt.Sprintf("api/internal/serv/none/%d", keyID)
|
||||
req := newInternalRequest(ctx, reqURL, "GET")
|
||||
req := newInternalRequestAPI(ctx, reqURL, "GET")
|
||||
keyAndOwner, extra := requestJSONResp(req, &KeyAndOwner{})
|
||||
if extra.HasError() {
|
||||
return nil, nil, extra.Error
|
||||
@ -58,6 +58,6 @@ func ServCommand(ctx context.Context, keyID int64, ownerName, repoName string, m
|
||||
reqURL += fmt.Sprintf("&verb=%s", url.QueryEscape(verb))
|
||||
}
|
||||
}
|
||||
req := newInternalRequest(ctx, reqURL, "GET")
|
||||
req := newInternalRequestAPI(ctx, reqURL, "GET")
|
||||
return requestJSONResp(req, &ServCommandResults{})
|
||||
}
|
||||
|
@ -6,7 +6,6 @@ package repository
|
||||
import (
|
||||
"context"
|
||||
"fmt"
|
||||
"strings"
|
||||
|
||||
"code.gitea.io/gitea/models/db"
|
||||
git_model "code.gitea.io/gitea/models/git"
|
||||
@ -52,9 +51,6 @@ func SyncRepoBranchesWithRepo(ctx context.Context, repo *repo_model.Repository,
|
||||
{
|
||||
branches, _, err := gitRepo.GetBranchNames(0, 0)
|
||||
if err != nil {
|
||||
if strings.Contains(err.Error(), "ref file is empty") {
|
||||
return 0, nil
|
||||
}
|
||||
return 0, err
|
||||
}
|
||||
log.Trace("SyncRepoBranches[%s]: branches[%d]: %v", repo.FullName(), len(branches), branches)
|
||||
|
@ -46,6 +46,7 @@ var Service = struct {
|
||||
RequireSignInView bool
|
||||
EnableNotifyMail bool
|
||||
EnableBasicAuth bool
|
||||
EnablePasskeyAuth bool
|
||||
EnableReverseProxyAuth bool
|
||||
EnableReverseProxyAuthAPI bool
|
||||
EnableReverseProxyAutoRegister bool
|
||||
@ -161,6 +162,7 @@ func loadServiceFrom(rootCfg ConfigProvider) {
|
||||
Service.RequireSignInView = sec.Key("REQUIRE_SIGNIN_VIEW").MustBool()
|
||||
Service.EnableBasicAuth = sec.Key("ENABLE_BASIC_AUTHENTICATION").MustBool(true)
|
||||
Service.EnablePasswordSignInForm = sec.Key("ENABLE_PASSWORD_SIGNIN_FORM").MustBool(true)
|
||||
Service.EnablePasskeyAuth = sec.Key("ENABLE_PASSKEY_AUTHENTICATION").MustBool(true)
|
||||
Service.EnableReverseProxyAuth = sec.Key("ENABLE_REVERSE_PROXY_AUTHENTICATION").MustBool()
|
||||
Service.EnableReverseProxyAuthAPI = sec.Key("ENABLE_REVERSE_PROXY_AUTHENTICATION_API").MustBool()
|
||||
Service.EnableReverseProxyAutoRegister = sec.Key("ENABLE_REVERSE_PROXY_AUTO_REGISTRATION").MustBool()
|
||||
|
@ -93,7 +93,7 @@ func Clean(storage ObjectStorage) error {
|
||||
}
|
||||
|
||||
// SaveFrom saves data to the ObjectStorage with path p from the callback
|
||||
func SaveFrom(objStorage ObjectStorage, p string, callback func(w io.Writer) error) error {
|
||||
func SaveFrom(objStorage ObjectStorage, path string, callback func(w io.Writer) error) error {
|
||||
pr, pw := io.Pipe()
|
||||
defer pr.Close()
|
||||
go func() {
|
||||
@ -103,7 +103,7 @@ func SaveFrom(objStorage ObjectStorage, p string, callback func(w io.Writer) err
|
||||
}
|
||||
}()
|
||||
|
||||
_, err := objStorage.Save(p, pr, -1)
|
||||
_, err := objStorage.Save(path, pr, -1)
|
||||
return err
|
||||
}
|
||||
|
||||
|
@ -116,14 +116,7 @@ var (
|
||||
_ Payloader = &PackagePayload{}
|
||||
)
|
||||
|
||||
// _________ __
|
||||
// \_ ___ \_______ ____ _____ _/ |_ ____
|
||||
// / \ \/\_ __ \_/ __ \\__ \\ __\/ __ \
|
||||
// \ \____| | \/\ ___/ / __ \| | \ ___/
|
||||
// \______ /|__| \___ >____ /__| \___ >
|
||||
// \/ \/ \/ \/
|
||||
|
||||
// CreatePayload FIXME
|
||||
// CreatePayload represents a payload information of create event.
|
||||
type CreatePayload struct {
|
||||
Sha string `json:"sha"`
|
||||
Ref string `json:"ref"`
|
||||
@ -157,13 +150,6 @@ func ParseCreateHook(raw []byte) (*CreatePayload, error) {
|
||||
return hook, nil
|
||||
}
|
||||
|
||||
// ________ .__ __
|
||||
// \______ \ ____ | | _____/ |_ ____
|
||||
// | | \_/ __ \| | _/ __ \ __\/ __ \
|
||||
// | ` \ ___/| |_\ ___/| | \ ___/
|
||||
// /_______ /\___ >____/\___ >__| \___ >
|
||||
// \/ \/ \/ \/
|
||||
|
||||
// PusherType define the type to push
|
||||
type PusherType string
|
||||
|
||||
@ -186,13 +172,6 @@ func (p *DeletePayload) JSONPayload() ([]byte, error) {
|
||||
return json.MarshalIndent(p, "", " ")
|
||||
}
|
||||
|
||||
// ___________ __
|
||||
// \_ _____/__________| | __
|
||||
// | __)/ _ \_ __ \ |/ /
|
||||
// | \( <_> ) | \/ <
|
||||
// \___ / \____/|__| |__|_ \
|
||||
// \/ \/
|
||||
|
||||
// ForkPayload represents fork payload
|
||||
type ForkPayload struct {
|
||||
Forkee *Repository `json:"forkee"`
|
||||
@ -232,13 +211,6 @@ func (p *IssueCommentPayload) JSONPayload() ([]byte, error) {
|
||||
return json.MarshalIndent(p, "", " ")
|
||||
}
|
||||
|
||||
// __________ .__
|
||||
// \______ \ ____ | | ____ _____ ______ ____
|
||||
// | _// __ \| | _/ __ \\__ \ / ___// __ \
|
||||
// | | \ ___/| |_\ ___/ / __ \_\___ \\ ___/
|
||||
// |____|_ /\___ >____/\___ >____ /____ >\___ >
|
||||
// \/ \/ \/ \/ \/ \/
|
||||
|
||||
// HookReleaseAction defines hook release action type
|
||||
type HookReleaseAction string
|
||||
|
||||
@ -302,13 +274,6 @@ func (p *PushPayload) Branch() string {
|
||||
return strings.ReplaceAll(p.Ref, "refs/heads/", "")
|
||||
}
|
||||
|
||||
// .___
|
||||
// | | ______ ________ __ ____
|
||||
// | |/ ___// ___/ | \_/ __ \
|
||||
// | |\___ \ \___ \| | /\ ___/
|
||||
// |___/____ >____ >____/ \___ >
|
||||
// \/ \/ \/
|
||||
|
||||
// HookIssueAction FIXME
|
||||
type HookIssueAction string
|
||||
|
||||
@ -371,13 +336,6 @@ type ChangesPayload struct {
|
||||
Ref *ChangesFromPayload `json:"ref,omitempty"`
|
||||
}
|
||||
|
||||
// __________ .__ .__ __________ __
|
||||
// \______ \__ __| | | | \______ \ ____ ________ __ ____ _______/ |_
|
||||
// | ___/ | \ | | | | _// __ \/ ____/ | \_/ __ \ / ___/\ __\
|
||||
// | | | | / |_| |__ | | \ ___< <_| | | /\ ___/ \___ \ | |
|
||||
// |____| |____/|____/____/ |____|_ /\___ >__ |____/ \___ >____ > |__|
|
||||
// \/ \/ |__| \/ \/
|
||||
|
||||
// PullRequestPayload represents a payload information of pull request event.
|
||||
type PullRequestPayload struct {
|
||||
Action HookIssueAction `json:"action"`
|
||||
@ -402,13 +360,6 @@ type ReviewPayload struct {
|
||||
Content string `json:"content"`
|
||||
}
|
||||
|
||||
// __ __.__ __ .__
|
||||
// / \ / \__| | _|__|
|
||||
// \ \/\/ / | |/ / |
|
||||
// \ /| | <| |
|
||||
// \__/\ / |__|__|_ \__|
|
||||
// \/ \/
|
||||
|
||||
// HookWikiAction an action that happens to a wiki page
|
||||
type HookWikiAction string
|
||||
|
||||
@ -435,13 +386,6 @@ func (p *WikiPayload) JSONPayload() ([]byte, error) {
|
||||
return json.MarshalIndent(p, "", " ")
|
||||
}
|
||||
|
||||
//__________ .__ __
|
||||
//\______ \ ____ ______ ____ _____|__|/ |_ ___________ ___.__.
|
||||
// | _// __ \\____ \ / _ \/ ___/ \ __\/ _ \_ __ < | |
|
||||
// | | \ ___/| |_> > <_> )___ \| || | ( <_> ) | \/\___ |
|
||||
// |____|_ /\___ > __/ \____/____ >__||__| \____/|__| / ____|
|
||||
// \/ \/|__| \/ \/
|
||||
|
||||
// HookRepoAction an action that happens to a repo
|
||||
type HookRepoAction string
|
||||
|
||||
@ -480,7 +424,7 @@ type PackagePayload struct {
|
||||
Action HookPackageAction `json:"action"`
|
||||
Repository *Repository `json:"repository"`
|
||||
Package *Package `json:"package"`
|
||||
Organization *User `json:"organization"`
|
||||
Organization *Organization `json:"organization"`
|
||||
Sender *User `json:"sender"`
|
||||
}
|
||||
|
||||
|
@ -57,3 +57,12 @@ type EditOrgOption struct {
|
||||
Visibility string `json:"visibility" binding:"In(,public,limited,private)"`
|
||||
RepoAdminChangeTeamAccess *bool `json:"repo_admin_change_team_access"`
|
||||
}
|
||||
|
||||
// RenameOrgOption options when renaming an organization
|
||||
type RenameOrgOption struct {
|
||||
// New username for this org. This name cannot be in use yet by any other user.
|
||||
//
|
||||
// required: true
|
||||
// unique: true
|
||||
NewName string `json:"new_name" binding:"Required"`
|
||||
}
|
||||
|
73
modules/tailmsg/talimsg.go
Normal file
73
modules/tailmsg/talimsg.go
Normal file
@ -0,0 +1,73 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package tailmsg
|
||||
|
||||
import (
|
||||
"sync"
|
||||
"time"
|
||||
)
|
||||
|
||||
type MsgRecord struct {
|
||||
Time time.Time
|
||||
Content string
|
||||
}
|
||||
|
||||
type MsgRecorder interface {
|
||||
Record(content string)
|
||||
GetRecords() []*MsgRecord
|
||||
}
|
||||
|
||||
type memoryMsgRecorder struct {
|
||||
mu sync.RWMutex
|
||||
msgs []*MsgRecord
|
||||
limit int
|
||||
}
|
||||
|
||||
// TODO: use redis for a clustered environment
|
||||
|
||||
func (m *memoryMsgRecorder) Record(content string) {
|
||||
m.mu.Lock()
|
||||
defer m.mu.Unlock()
|
||||
m.msgs = append(m.msgs, &MsgRecord{
|
||||
Time: time.Now(),
|
||||
Content: content,
|
||||
})
|
||||
if len(m.msgs) > m.limit {
|
||||
m.msgs = m.msgs[len(m.msgs)-m.limit:]
|
||||
}
|
||||
}
|
||||
|
||||
func (m *memoryMsgRecorder) GetRecords() []*MsgRecord {
|
||||
m.mu.RLock()
|
||||
defer m.mu.RUnlock()
|
||||
ret := make([]*MsgRecord, len(m.msgs))
|
||||
copy(ret, m.msgs)
|
||||
return ret
|
||||
}
|
||||
|
||||
func NewMsgRecorder(limit int) MsgRecorder {
|
||||
return &memoryMsgRecorder{
|
||||
limit: limit,
|
||||
}
|
||||
}
|
||||
|
||||
type Manager struct {
|
||||
traceRecorder MsgRecorder
|
||||
logRecorder MsgRecorder
|
||||
}
|
||||
|
||||
func (m *Manager) GetTraceRecorder() MsgRecorder {
|
||||
return m.traceRecorder
|
||||
}
|
||||
|
||||
func (m *Manager) GetLogRecorder() MsgRecorder {
|
||||
return m.logRecorder
|
||||
}
|
||||
|
||||
var GetManager = sync.OnceValue(func() *Manager {
|
||||
return &Manager{
|
||||
traceRecorder: NewMsgRecorder(100),
|
||||
logRecorder: NewMsgRecorder(1000),
|
||||
}
|
||||
})
|
@ -69,7 +69,7 @@ func NewFuncMap() template.FuncMap {
|
||||
// time / number / format
|
||||
"FileSize": base.FileSize,
|
||||
"CountFmt": countFmt,
|
||||
"Sec2Time": util.SecToTime,
|
||||
"Sec2Hour": util.SecToHours,
|
||||
|
||||
"TimeEstimateString": timeEstimateString,
|
||||
|
||||
|
@ -8,61 +8,23 @@ import (
|
||||
"strings"
|
||||
)
|
||||
|
||||
// SecToTime converts an amount of seconds to a human-readable string. E.g.
|
||||
// 66s -> 1 minute 6 seconds
|
||||
// 52410s -> 14 hours 33 minutes
|
||||
// 563418 -> 6 days 12 hours
|
||||
// 1563418 -> 2 weeks 4 days
|
||||
// 3937125s -> 1 month 2 weeks
|
||||
// 45677465s -> 1 year 6 months
|
||||
func SecToTime(durationVal any) string {
|
||||
duration, _ := ToInt64(durationVal)
|
||||
// SecToHours converts an amount of seconds to a human-readable hours string.
|
||||
// This is stable for planning and managing timesheets.
|
||||
// Here it only supports hours and minutes, because a work day could contain 6 or 7 or 8 hours.
|
||||
// If the duration is less than 1 minute, it will be shown as seconds.
|
||||
func SecToHours(durationVal any) string {
|
||||
seconds, _ := ToInt64(durationVal)
|
||||
hours := seconds / 3600
|
||||
minutes := (seconds / 60) % 60
|
||||
|
||||
formattedTime := ""
|
||||
|
||||
// The following four variables are calculated by taking
|
||||
// into account the previously calculated variables, this avoids
|
||||
// pitfalls when using remainders. As that could lead to incorrect
|
||||
// results when the calculated number equals the quotient number.
|
||||
remainingDays := duration / (60 * 60 * 24)
|
||||
years := remainingDays / 365
|
||||
remainingDays -= years * 365
|
||||
months := remainingDays * 12 / 365
|
||||
remainingDays -= months * 365 / 12
|
||||
weeks := remainingDays / 7
|
||||
remainingDays -= weeks * 7
|
||||
days := remainingDays
|
||||
|
||||
// The following three variables are calculated without depending
|
||||
// on the previous calculated variables.
|
||||
hours := (duration / 3600) % 24
|
||||
minutes := (duration / 60) % 60
|
||||
seconds := duration % 60
|
||||
|
||||
// Extract only the relevant information of the time
|
||||
// If the time is greater than a year, it makes no sense to display seconds.
|
||||
switch {
|
||||
case years > 0:
|
||||
formattedTime = formatTime(years, "year", formattedTime)
|
||||
formattedTime = formatTime(months, "month", formattedTime)
|
||||
case months > 0:
|
||||
formattedTime = formatTime(months, "month", formattedTime)
|
||||
formattedTime = formatTime(weeks, "week", formattedTime)
|
||||
case weeks > 0:
|
||||
formattedTime = formatTime(weeks, "week", formattedTime)
|
||||
formattedTime = formatTime(days, "day", formattedTime)
|
||||
case days > 0:
|
||||
formattedTime = formatTime(days, "day", formattedTime)
|
||||
formattedTime = formatTime(hours, "hour", formattedTime)
|
||||
case hours > 0:
|
||||
formattedTime = formatTime(hours, "hour", formattedTime)
|
||||
formattedTime = formatTime(minutes, "minute", formattedTime)
|
||||
default:
|
||||
formattedTime = formatTime(minutes, "minute", formattedTime)
|
||||
formattedTime = formatTime(seconds, "second", formattedTime)
|
||||
}
|
||||
formattedTime = formatTime(hours, "hour", formattedTime)
|
||||
formattedTime = formatTime(minutes, "minute", formattedTime)
|
||||
|
||||
// The formatTime() function always appends a space at the end. This will be trimmed
|
||||
if formattedTime == "" && seconds > 0 {
|
||||
formattedTime = formatTime(seconds, "second", "")
|
||||
}
|
||||
return strings.TrimRight(formattedTime, " ")
|
||||
}
|
||||
|
||||
@ -76,6 +38,5 @@ func formatTime(value int64, name, formattedTime string) string {
|
||||
} else if value > 1 {
|
||||
formattedTime = fmt.Sprintf("%s%d %ss ", formattedTime, value, name)
|
||||
}
|
||||
|
||||
return formattedTime
|
||||
}
|
||||
|
@ -9,22 +9,20 @@ import (
|
||||
"github.com/stretchr/testify/assert"
|
||||
)
|
||||
|
||||
func TestSecToTime(t *testing.T) {
|
||||
func TestSecToHours(t *testing.T) {
|
||||
second := int64(1)
|
||||
minute := 60 * second
|
||||
hour := 60 * minute
|
||||
day := 24 * hour
|
||||
year := 365 * day
|
||||
|
||||
assert.Equal(t, "1 minute 6 seconds", SecToTime(minute+6*second))
|
||||
assert.Equal(t, "1 hour", SecToTime(hour))
|
||||
assert.Equal(t, "1 hour", SecToTime(hour+second))
|
||||
assert.Equal(t, "14 hours 33 minutes", SecToTime(14*hour+33*minute+30*second))
|
||||
assert.Equal(t, "6 days 12 hours", SecToTime(6*day+12*hour+30*minute+18*second))
|
||||
assert.Equal(t, "2 weeks 4 days", SecToTime((2*7+4)*day+2*hour+16*minute+58*second))
|
||||
assert.Equal(t, "4 weeks", SecToTime(4*7*day))
|
||||
assert.Equal(t, "4 weeks 1 day", SecToTime((4*7+1)*day))
|
||||
assert.Equal(t, "1 month 2 weeks", SecToTime((6*7+3)*day+13*hour+38*minute+45*second))
|
||||
assert.Equal(t, "11 months", SecToTime(year-25*day))
|
||||
assert.Equal(t, "1 year 5 months", SecToTime(year+163*day+10*hour+11*minute+5*second))
|
||||
assert.Equal(t, "1 minute", SecToHours(minute+6*second))
|
||||
assert.Equal(t, "1 hour", SecToHours(hour))
|
||||
assert.Equal(t, "1 hour", SecToHours(hour+second))
|
||||
assert.Equal(t, "14 hours 33 minutes", SecToHours(14*hour+33*minute+30*second))
|
||||
assert.Equal(t, "156 hours 30 minutes", SecToHours(6*day+12*hour+30*minute+18*second))
|
||||
assert.Equal(t, "98 hours 16 minutes", SecToHours(4*day+2*hour+16*minute+58*second))
|
||||
assert.Equal(t, "672 hours", SecToHours(4*7*day))
|
||||
assert.Equal(t, "1 second", SecToHours(1))
|
||||
assert.Equal(t, "2 seconds", SecToHours(2))
|
||||
assert.Equal(t, "", SecToHours(nil)) // old behavior, empty means no output
|
||||
}
|
||||
|
@ -19,40 +19,40 @@ func getGlobPatternErrorString(pattern string) string {
|
||||
return ""
|
||||
}
|
||||
|
||||
var globValidationTestCases = []validationTestCase{
|
||||
{
|
||||
description: "Empty glob pattern",
|
||||
data: TestForm{
|
||||
GlobPattern: "",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "Valid glob",
|
||||
data: TestForm{
|
||||
GlobPattern: "{master,release*}",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
|
||||
{
|
||||
description: "Invalid glob",
|
||||
data: TestForm{
|
||||
GlobPattern: "[a-",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"GlobPattern"},
|
||||
Classification: ErrGlobPattern,
|
||||
Message: getGlobPatternErrorString("[a-"),
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func Test_GlobPatternValidation(t *testing.T) {
|
||||
AddBindingRules()
|
||||
|
||||
globValidationTestCases := []validationTestCase{
|
||||
{
|
||||
description: "Empty glob pattern",
|
||||
data: TestForm{
|
||||
GlobPattern: "",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "Valid glob",
|
||||
data: TestForm{
|
||||
GlobPattern: "{master,release*}",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
|
||||
{
|
||||
description: "Invalid glob",
|
||||
data: TestForm{
|
||||
GlobPattern: "[a-",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"GlobPattern"},
|
||||
Classification: ErrGlobPattern,
|
||||
Message: getGlobPatternErrorString("[a-"),
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range globValidationTestCases {
|
||||
t.Run(testCase.description, func(t *testing.T) {
|
||||
performValidationTest(t, testCase)
|
||||
|
@ -8,13 +8,26 @@ import (
|
||||
"net/url"
|
||||
"regexp"
|
||||
"strings"
|
||||
"sync"
|
||||
|
||||
"code.gitea.io/gitea/modules/setting"
|
||||
|
||||
"github.com/gobwas/glob"
|
||||
)
|
||||
|
||||
var externalTrackerRegex = regexp.MustCompile(`({?)(?:user|repo|index)+?(}?)`)
|
||||
type globalVarsStruct struct {
|
||||
externalTrackerRegex *regexp.Regexp
|
||||
validUsernamePattern *regexp.Regexp
|
||||
invalidUsernamePattern *regexp.Regexp
|
||||
}
|
||||
|
||||
var globalVars = sync.OnceValue(func() *globalVarsStruct {
|
||||
return &globalVarsStruct{
|
||||
externalTrackerRegex: regexp.MustCompile(`({?)(?:user|repo|index)+?(}?)`),
|
||||
validUsernamePattern: regexp.MustCompile(`^[\da-zA-Z][-.\w]*$`),
|
||||
invalidUsernamePattern: regexp.MustCompile(`[-._]{2,}|[-._]$`), // No consecutive or trailing non-alphanumeric chars
|
||||
}
|
||||
})
|
||||
|
||||
func isLoopbackIP(ip string) bool {
|
||||
return net.ParseIP(ip).IsLoopback()
|
||||
@ -105,9 +118,9 @@ func IsValidExternalTrackerURLFormat(uri string) bool {
|
||||
if !IsValidExternalURL(uri) {
|
||||
return false
|
||||
}
|
||||
|
||||
vars := globalVars()
|
||||
// check for typoed variables like /{index/ or /[repo}
|
||||
for _, match := range externalTrackerRegex.FindAllStringSubmatch(uri, -1) {
|
||||
for _, match := range vars.externalTrackerRegex.FindAllStringSubmatch(uri, -1) {
|
||||
if (match[1] == "{" || match[2] == "}") && (match[1] != "{" || match[2] != "}") {
|
||||
return false
|
||||
}
|
||||
@ -116,14 +129,10 @@ func IsValidExternalTrackerURLFormat(uri string) bool {
|
||||
return true
|
||||
}
|
||||
|
||||
var (
|
||||
validUsernamePattern = regexp.MustCompile(`^[\da-zA-Z][-.\w]*$`)
|
||||
invalidUsernamePattern = regexp.MustCompile(`[-._]{2,}|[-._]$`) // No consecutive or trailing non-alphanumeric chars
|
||||
)
|
||||
|
||||
// IsValidUsername checks if username is valid
|
||||
func IsValidUsername(name string) bool {
|
||||
// It is difficult to find a single pattern that is both readable and effective,
|
||||
// but it's easier to use positive and negative checks.
|
||||
return validUsernamePattern.MatchString(name) && !invalidUsernamePattern.MatchString(name)
|
||||
vars := globalVars()
|
||||
return vars.validUsernamePattern.MatchString(name) && !vars.invalidUsernamePattern.MatchString(name)
|
||||
}
|
||||
|
@ -9,253 +9,252 @@ import (
|
||||
"gitea.com/go-chi/binding"
|
||||
)
|
||||
|
||||
var gitRefNameValidationTestCases = []validationTestCase{
|
||||
{
|
||||
description: "Reference name contains only characters",
|
||||
data: TestForm{
|
||||
BranchName: "test",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "Reference name contains single slash",
|
||||
data: TestForm{
|
||||
BranchName: "feature/test",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "Reference name has allowed special characters",
|
||||
data: TestForm{
|
||||
BranchName: "debian/1%1.6.0-2",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "Reference name contains backslash",
|
||||
data: TestForm{
|
||||
BranchName: "feature\\test",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name starts with dot",
|
||||
data: TestForm{
|
||||
BranchName: ".test",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name ends with dot",
|
||||
data: TestForm{
|
||||
BranchName: "test.",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name starts with slash",
|
||||
data: TestForm{
|
||||
BranchName: "/test",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name ends with slash",
|
||||
data: TestForm{
|
||||
BranchName: "test/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name ends with .lock",
|
||||
data: TestForm{
|
||||
BranchName: "test.lock",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name contains multiple consecutive dots",
|
||||
data: TestForm{
|
||||
BranchName: "te..st",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name contains multiple consecutive slashes",
|
||||
data: TestForm{
|
||||
BranchName: "te//st",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name is single @",
|
||||
data: TestForm{
|
||||
BranchName: "@",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name has @{",
|
||||
data: TestForm{
|
||||
BranchName: "branch@{",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name has unallowed special character ~",
|
||||
data: TestForm{
|
||||
BranchName: "~debian/1%1.6.0-2",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name has unallowed special character *",
|
||||
data: TestForm{
|
||||
BranchName: "*debian/1%1.6.0-2",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name has unallowed special character ?",
|
||||
data: TestForm{
|
||||
BranchName: "?debian/1%1.6.0-2",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name has unallowed special character ^",
|
||||
data: TestForm{
|
||||
BranchName: "^debian/1%1.6.0-2",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name has unallowed special character :",
|
||||
data: TestForm{
|
||||
BranchName: "debian:jessie",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name has unallowed special character (whitespace)",
|
||||
data: TestForm{
|
||||
BranchName: "debian jessie",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name has unallowed special character [",
|
||||
data: TestForm{
|
||||
BranchName: "debian[jessie",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func Test_GitRefNameValidation(t *testing.T) {
|
||||
AddBindingRules()
|
||||
gitRefNameValidationTestCases := []validationTestCase{
|
||||
{
|
||||
description: "Reference name contains only characters",
|
||||
data: TestForm{
|
||||
BranchName: "test",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "Reference name contains single slash",
|
||||
data: TestForm{
|
||||
BranchName: "feature/test",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "Reference name has allowed special characters",
|
||||
data: TestForm{
|
||||
BranchName: "debian/1%1.6.0-2",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "Reference name contains backslash",
|
||||
data: TestForm{
|
||||
BranchName: "feature\\test",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name starts with dot",
|
||||
data: TestForm{
|
||||
BranchName: ".test",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name ends with dot",
|
||||
data: TestForm{
|
||||
BranchName: "test.",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name starts with slash",
|
||||
data: TestForm{
|
||||
BranchName: "/test",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name ends with slash",
|
||||
data: TestForm{
|
||||
BranchName: "test/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name ends with .lock",
|
||||
data: TestForm{
|
||||
BranchName: "test.lock",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name contains multiple consecutive dots",
|
||||
data: TestForm{
|
||||
BranchName: "te..st",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name contains multiple consecutive slashes",
|
||||
data: TestForm{
|
||||
BranchName: "te//st",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name is single @",
|
||||
data: TestForm{
|
||||
BranchName: "@",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name has @{",
|
||||
data: TestForm{
|
||||
BranchName: "branch@{",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name has unallowed special character ~",
|
||||
data: TestForm{
|
||||
BranchName: "~debian/1%1.6.0-2",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name has unallowed special character *",
|
||||
data: TestForm{
|
||||
BranchName: "*debian/1%1.6.0-2",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name has unallowed special character ?",
|
||||
data: TestForm{
|
||||
BranchName: "?debian/1%1.6.0-2",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name has unallowed special character ^",
|
||||
data: TestForm{
|
||||
BranchName: "^debian/1%1.6.0-2",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name has unallowed special character :",
|
||||
data: TestForm{
|
||||
BranchName: "debian:jessie",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name has unallowed special character (whitespace)",
|
||||
data: TestForm{
|
||||
BranchName: "debian jessie",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Reference name has unallowed special character [",
|
||||
data: TestForm{
|
||||
BranchName: "debian[jessie",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"BranchName"},
|
||||
Classification: ErrGitRefName,
|
||||
Message: "GitRefName",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range gitRefNameValidationTestCases {
|
||||
t.Run(testCase.description, func(t *testing.T) {
|
||||
|
@ -17,40 +17,40 @@ func getRegexPatternErrorString(pattern string) string {
|
||||
return ""
|
||||
}
|
||||
|
||||
var regexValidationTestCases = []validationTestCase{
|
||||
{
|
||||
description: "Empty regex pattern",
|
||||
data: TestForm{
|
||||
RegexPattern: "",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "Valid regex",
|
||||
data: TestForm{
|
||||
RegexPattern: `(\d{1,3})+`,
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
|
||||
{
|
||||
description: "Invalid regex",
|
||||
data: TestForm{
|
||||
RegexPattern: "[a-",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"RegexPattern"},
|
||||
Classification: ErrRegexPattern,
|
||||
Message: getRegexPatternErrorString("[a-"),
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func Test_RegexPatternValidation(t *testing.T) {
|
||||
AddBindingRules()
|
||||
|
||||
regexValidationTestCases := []validationTestCase{
|
||||
{
|
||||
description: "Empty regex pattern",
|
||||
data: TestForm{
|
||||
RegexPattern: "",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "Valid regex",
|
||||
data: TestForm{
|
||||
RegexPattern: `(\d{1,3})+`,
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
|
||||
{
|
||||
description: "Invalid regex",
|
||||
data: TestForm{
|
||||
RegexPattern: "[a-",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"RegexPattern"},
|
||||
Classification: ErrRegexPattern,
|
||||
Message: getRegexPatternErrorString("[a-"),
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range regexValidationTestCases {
|
||||
t.Run(testCase.description, func(t *testing.T) {
|
||||
performValidationTest(t, testCase)
|
||||
|
@ -9,99 +9,99 @@ import (
|
||||
"gitea.com/go-chi/binding"
|
||||
)
|
||||
|
||||
var urlValidationTestCases = []validationTestCase{
|
||||
{
|
||||
description: "Empty URL",
|
||||
data: TestForm{
|
||||
URL: "",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "URL without port",
|
||||
data: TestForm{
|
||||
URL: "http://test.lan/",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "URL with port",
|
||||
data: TestForm{
|
||||
URL: "http://test.lan:3000/",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "URL with IPv6 address without port",
|
||||
data: TestForm{
|
||||
URL: "http://[::1]/",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "URL with IPv6 address with port",
|
||||
data: TestForm{
|
||||
URL: "http://[::1]:3000/",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "Invalid URL",
|
||||
data: TestForm{
|
||||
URL: "http//test.lan/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URL"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "Url",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Invalid schema",
|
||||
data: TestForm{
|
||||
URL: "ftp://test.lan/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URL"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "Url",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Invalid port",
|
||||
data: TestForm{
|
||||
URL: "http://test.lan:3x4/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URL"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "Url",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Invalid port with IPv6 address",
|
||||
data: TestForm{
|
||||
URL: "http://[::1]:3x4/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URL"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "Url",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func Test_ValidURLValidation(t *testing.T) {
|
||||
AddBindingRules()
|
||||
|
||||
urlValidationTestCases := []validationTestCase{
|
||||
{
|
||||
description: "Empty URL",
|
||||
data: TestForm{
|
||||
URL: "",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "URL without port",
|
||||
data: TestForm{
|
||||
URL: "http://test.lan/",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "URL with port",
|
||||
data: TestForm{
|
||||
URL: "http://test.lan:3000/",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "URL with IPv6 address without port",
|
||||
data: TestForm{
|
||||
URL: "http://[::1]/",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "URL with IPv6 address with port",
|
||||
data: TestForm{
|
||||
URL: "http://[::1]:3000/",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "Invalid URL",
|
||||
data: TestForm{
|
||||
URL: "http//test.lan/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URL"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "Url",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Invalid schema",
|
||||
data: TestForm{
|
||||
URL: "ftp://test.lan/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URL"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "Url",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Invalid port",
|
||||
data: TestForm{
|
||||
URL: "http://test.lan:3x4/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URL"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "Url",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Invalid port with IPv6 address",
|
||||
data: TestForm{
|
||||
URL: "http://[::1]:3x4/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URL"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "Url",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range urlValidationTestCases {
|
||||
t.Run(testCase.description, func(t *testing.T) {
|
||||
performValidationTest(t, testCase)
|
||||
|
@ -9,146 +9,146 @@ import (
|
||||
"gitea.com/go-chi/binding"
|
||||
)
|
||||
|
||||
// This is a copy of all the URL tests cases, plus additional ones to
|
||||
// account for multiple URLs
|
||||
var urlListValidationTestCases = []validationTestCase{
|
||||
{
|
||||
description: "Empty URL",
|
||||
data: TestForm{
|
||||
URLs: "",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "URL without port",
|
||||
data: TestForm{
|
||||
URLs: "http://test.lan/",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "URL with port",
|
||||
data: TestForm{
|
||||
URLs: "http://test.lan:3000/",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "URL with IPv6 address without port",
|
||||
data: TestForm{
|
||||
URLs: "http://[::1]/",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "URL with IPv6 address with port",
|
||||
data: TestForm{
|
||||
URLs: "http://[::1]:3000/",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "Invalid URL",
|
||||
data: TestForm{
|
||||
URLs: "http//test.lan/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URLs"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "http//test.lan/",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Invalid schema",
|
||||
data: TestForm{
|
||||
URLs: "ftp://test.lan/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URLs"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "ftp://test.lan/",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Invalid port",
|
||||
data: TestForm{
|
||||
URLs: "http://test.lan:3x4/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URLs"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "http://test.lan:3x4/",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Invalid port with IPv6 address",
|
||||
data: TestForm{
|
||||
URLs: "http://[::1]:3x4/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URLs"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "http://[::1]:3x4/",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Multi URLs",
|
||||
data: TestForm{
|
||||
URLs: "http://test.lan:3000/\nhttp://test.local/",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "Multi URLs with newline",
|
||||
data: TestForm{
|
||||
URLs: "http://test.lan:3000/\nhttp://test.local/\n",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "List with invalid entry",
|
||||
data: TestForm{
|
||||
URLs: "http://test.lan:3000/\nhttp://[::1]:3x4/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URLs"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "http://[::1]:3x4/",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "List with two invalid entries",
|
||||
data: TestForm{
|
||||
URLs: "ftp://test.lan:3000/\nhttp://[::1]:3x4/\n",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URLs"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "ftp://test.lan:3000/",
|
||||
},
|
||||
binding.Error{
|
||||
FieldNames: []string{"URLs"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "http://[::1]:3x4/",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
func Test_ValidURLListValidation(t *testing.T) {
|
||||
AddBindingRules()
|
||||
|
||||
// This is a copy of all the URL tests cases, plus additional ones to
|
||||
// account for multiple URLs
|
||||
urlListValidationTestCases := []validationTestCase{
|
||||
{
|
||||
description: "Empty URL",
|
||||
data: TestForm{
|
||||
URLs: "",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "URL without port",
|
||||
data: TestForm{
|
||||
URLs: "http://test.lan/",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "URL with port",
|
||||
data: TestForm{
|
||||
URLs: "http://test.lan:3000/",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "URL with IPv6 address without port",
|
||||
data: TestForm{
|
||||
URLs: "http://[::1]/",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "URL with IPv6 address with port",
|
||||
data: TestForm{
|
||||
URLs: "http://[::1]:3000/",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "Invalid URL",
|
||||
data: TestForm{
|
||||
URLs: "http//test.lan/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URLs"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "http//test.lan/",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Invalid schema",
|
||||
data: TestForm{
|
||||
URLs: "ftp://test.lan/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URLs"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "ftp://test.lan/",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Invalid port",
|
||||
data: TestForm{
|
||||
URLs: "http://test.lan:3x4/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URLs"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "http://test.lan:3x4/",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Invalid port with IPv6 address",
|
||||
data: TestForm{
|
||||
URLs: "http://[::1]:3x4/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URLs"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "http://[::1]:3x4/",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "Multi URLs",
|
||||
data: TestForm{
|
||||
URLs: "http://test.lan:3000/\nhttp://test.local/",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "Multi URLs with newline",
|
||||
data: TestForm{
|
||||
URLs: "http://test.lan:3000/\nhttp://test.local/\n",
|
||||
},
|
||||
expectedErrors: binding.Errors{},
|
||||
},
|
||||
{
|
||||
description: "List with invalid entry",
|
||||
data: TestForm{
|
||||
URLs: "http://test.lan:3000/\nhttp://[::1]:3x4/",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URLs"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "http://[::1]:3x4/",
|
||||
},
|
||||
},
|
||||
},
|
||||
{
|
||||
description: "List with two invalid entries",
|
||||
data: TestForm{
|
||||
URLs: "ftp://test.lan:3000/\nhttp://[::1]:3x4/\n",
|
||||
},
|
||||
expectedErrors: binding.Errors{
|
||||
binding.Error{
|
||||
FieldNames: []string{"URLs"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "ftp://test.lan:3000/",
|
||||
},
|
||||
binding.Error{
|
||||
FieldNames: []string{"URLs"},
|
||||
Classification: binding.ERR_URL,
|
||||
Message: "http://[::1]:3x4/",
|
||||
},
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
for _, testCase := range urlListValidationTestCases {
|
||||
t.Run(testCase.description, func(t *testing.T) {
|
||||
performValidationTest(t, testCase)
|
||||
|
@ -78,7 +78,7 @@ func GetInclude(field reflect.StructField) string {
|
||||
return getRuleBody(field, "Include(")
|
||||
}
|
||||
|
||||
// Validate validate TODO:
|
||||
// Validate validate
|
||||
func Validate(errs binding.Errors, data map[string]any, f Form, l translation.Locale) binding.Errors {
|
||||
if errs.Len() == 0 {
|
||||
return errs
|
||||
|
@ -6,6 +6,7 @@ package middleware
|
||||
import (
|
||||
"fmt"
|
||||
"html/template"
|
||||
"net/http"
|
||||
"net/url"
|
||||
|
||||
"code.gitea.io/gitea/modules/reqctx"
|
||||
@ -65,3 +66,27 @@ func (f *Flash) Success(msg any, current ...bool) {
|
||||
f.SuccessMsg = flashMsgStringOrHTML(msg)
|
||||
f.set("success", f.SuccessMsg, current...)
|
||||
}
|
||||
|
||||
func ParseCookieFlashMessage(val string) *Flash {
|
||||
if vals, _ := url.ParseQuery(val); len(vals) > 0 {
|
||||
return &Flash{
|
||||
Values: vals,
|
||||
ErrorMsg: vals.Get("error"),
|
||||
SuccessMsg: vals.Get("success"),
|
||||
InfoMsg: vals.Get("info"),
|
||||
WarningMsg: vals.Get("warning"),
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func GetSiteCookieFlashMessage(dataStore reqctx.RequestDataStore, req *http.Request, cookieName string) (string, *Flash) {
|
||||
// Get the last flash message from cookie
|
||||
lastFlashCookie := GetSiteCookie(req, cookieName)
|
||||
lastFlashMsg := ParseCookieFlashMessage(lastFlashCookie)
|
||||
if lastFlashMsg != nil {
|
||||
lastFlashMsg.DataStore = dataStore
|
||||
return lastFlashCookie, lastFlashMsg
|
||||
}
|
||||
return lastFlashCookie, nil
|
||||
}
|
||||
|
@ -1,14 +0,0 @@
|
||||
// Copyright 2020 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package middleware
|
||||
|
||||
import (
|
||||
"net/http"
|
||||
"strings"
|
||||
)
|
||||
|
||||
// IsAPIPath returns true if the specified URL is an API path
|
||||
func IsAPIPath(req *http.Request) bool {
|
||||
return strings.HasPrefix(req.URL.Path, "/api/")
|
||||
}
|
@ -6,6 +6,9 @@ package routing
|
||||
import (
|
||||
"context"
|
||||
"net/http"
|
||||
|
||||
"code.gitea.io/gitea/modules/gtprof"
|
||||
"code.gitea.io/gitea/modules/reqctx"
|
||||
)
|
||||
|
||||
type contextKeyType struct{}
|
||||
@ -14,10 +17,12 @@ var contextKey contextKeyType
|
||||
|
||||
// RecordFuncInfo records a func info into context
|
||||
func RecordFuncInfo(ctx context.Context, funcInfo *FuncInfo) (end func()) {
|
||||
// TODO: reqCtx := reqctx.FromContext(ctx), add trace support
|
||||
end = func() {}
|
||||
|
||||
// save the func info into the context record
|
||||
if reqCtx := reqctx.FromContext(ctx); reqCtx != nil {
|
||||
var traceSpan *gtprof.TraceSpan
|
||||
traceSpan, end = gtprof.GetTracer().StartInContext(reqCtx, "http.func")
|
||||
traceSpan.SetAttributeString("func", funcInfo.shortName)
|
||||
}
|
||||
if record, ok := ctx.Value(contextKey).(*requestRecord); ok {
|
||||
record.lock.Lock()
|
||||
record.funcInfo = funcInfo
|
||||
|
20
modules/webhook/events.go
Normal file
20
modules/webhook/events.go
Normal file
@ -0,0 +1,20 @@
|
||||
// Copyright 2025 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package webhook
|
||||
|
||||
type HookEvents map[HookEventType]bool
|
||||
|
||||
func (he HookEvents) Get(evt HookEventType) bool {
|
||||
return he[evt]
|
||||
}
|
||||
|
||||
// HookEvent represents events that will delivery hook.
|
||||
type HookEvent struct {
|
||||
PushOnly bool `json:"push_only"`
|
||||
SendEverything bool `json:"send_everything"`
|
||||
ChooseEvents bool `json:"choose_events"`
|
||||
BranchFilter string `json:"branch_filter"`
|
||||
|
||||
HookEvents `json:"events"`
|
||||
}
|
@ -1,39 +0,0 @@
|
||||
// Copyright 2022 The Gitea Authors. All rights reserved.
|
||||
// SPDX-License-Identifier: MIT
|
||||
|
||||
package webhook
|
||||
|
||||
// HookEvents is a set of web hook events
|
||||
type HookEvents struct {
|
||||
Create bool `json:"create"`
|
||||
Delete bool `json:"delete"`
|
||||
Fork bool `json:"fork"`
|
||||
Issues bool `json:"issues"`
|
||||
IssueAssign bool `json:"issue_assign"`
|
||||
IssueLabel bool `json:"issue_label"`
|
||||
IssueMilestone bool `json:"issue_milestone"`
|
||||
IssueComment bool `json:"issue_comment"`
|
||||
Push bool `json:"push"`
|
||||
PullRequest bool `json:"pull_request"`
|
||||
PullRequestAssign bool `json:"pull_request_assign"`
|
||||
PullRequestLabel bool `json:"pull_request_label"`
|
||||
PullRequestMilestone bool `json:"pull_request_milestone"`
|
||||
PullRequestComment bool `json:"pull_request_comment"`
|
||||
PullRequestReview bool `json:"pull_request_review"`
|
||||
PullRequestSync bool `json:"pull_request_sync"`
|
||||
PullRequestReviewRequest bool `json:"pull_request_review_request"`
|
||||
Wiki bool `json:"wiki"`
|
||||
Repository bool `json:"repository"`
|
||||
Release bool `json:"release"`
|
||||
Package bool `json:"package"`
|
||||
}
|
||||
|
||||
// HookEvent represents events that will delivery hook.
|
||||
type HookEvent struct {
|
||||
PushOnly bool `json:"push_only"`
|
||||
SendEverything bool `json:"send_everything"`
|
||||
ChooseEvents bool `json:"choose_events"`
|
||||
BranchFilter string `json:"branch_filter"`
|
||||
|
||||
HookEvents `json:"events"`
|
||||
}
|
@ -31,21 +31,47 @@ const (
|
||||
HookEventRepository HookEventType = "repository"
|
||||
HookEventRelease HookEventType = "release"
|
||||
HookEventPackage HookEventType = "package"
|
||||
HookEventSchedule HookEventType = "schedule"
|
||||
HookEventStatus HookEventType = "status"
|
||||
// once a new event added here, please also added to AllEvents() function
|
||||
|
||||
// FIXME: This event should be a group of pull_request_review_xxx events
|
||||
HookEventPullRequestReview HookEventType = "pull_request_review"
|
||||
// Actions event only
|
||||
HookEventSchedule HookEventType = "schedule"
|
||||
)
|
||||
|
||||
func AllEvents() []HookEventType {
|
||||
return []HookEventType{
|
||||
HookEventCreate,
|
||||
HookEventDelete,
|
||||
HookEventFork,
|
||||
HookEventPush,
|
||||
HookEventIssues,
|
||||
HookEventIssueAssign,
|
||||
HookEventIssueLabel,
|
||||
HookEventIssueMilestone,
|
||||
HookEventIssueComment,
|
||||
HookEventPullRequest,
|
||||
HookEventPullRequestAssign,
|
||||
HookEventPullRequestLabel,
|
||||
HookEventPullRequestMilestone,
|
||||
HookEventPullRequestComment,
|
||||
HookEventPullRequestReviewApproved,
|
||||
HookEventPullRequestReviewRejected,
|
||||
HookEventPullRequestReviewComment,
|
||||
HookEventPullRequestSync,
|
||||
HookEventPullRequestReviewRequest,
|
||||
HookEventWiki,
|
||||
HookEventRepository,
|
||||
HookEventRelease,
|
||||
HookEventPackage,
|
||||
HookEventStatus,
|
||||
}
|
||||
}
|
||||
|
||||
// Event returns the HookEventType as an event string
|
||||
func (h HookEventType) Event() string {
|
||||
switch h {
|
||||
case HookEventCreate:
|
||||
return "create"
|
||||
case HookEventDelete:
|
||||
return "delete"
|
||||
case HookEventFork:
|
||||
return "fork"
|
||||
case HookEventPush:
|
||||
return "push"
|
||||
case HookEventIssues, HookEventIssueAssign, HookEventIssueLabel, HookEventIssueMilestone:
|
||||
return "issues"
|
||||
case HookEventPullRequest, HookEventPullRequestAssign, HookEventPullRequestLabel, HookEventPullRequestMilestone,
|
||||
@ -59,14 +85,9 @@ func (h HookEventType) Event() string {
|
||||
return "pull_request_rejected"
|
||||
case HookEventPullRequestReviewComment:
|
||||
return "pull_request_comment"
|
||||
case HookEventWiki:
|
||||
return "wiki"
|
||||
case HookEventRepository:
|
||||
return "repository"
|
||||
case HookEventRelease:
|
||||
return "release"
|
||||
default:
|
||||
return string(h)
|
||||
}
|
||||
return ""
|
||||
}
|
||||
|
||||
func (h HookEventType) IsPullRequest() bool {
|
||||
|
119
options/gitignore/Flutter
Normal file
119
options/gitignore/Flutter
Normal file
@ -0,0 +1,119 @@
|
||||
# Miscellaneous
|
||||
*.class
|
||||
*.lock
|
||||
*.log
|
||||
*.pyc
|
||||
*.swp
|
||||
.buildlog/
|
||||
.history
|
||||
|
||||
|
||||
|
||||
# Flutter repo-specific
|
||||
/bin/cache/
|
||||
/bin/internal/bootstrap.bat
|
||||
/bin/internal/bootstrap.sh
|
||||
/bin/mingit/
|
||||
/dev/benchmarks/mega_gallery/
|
||||
/dev/bots/.recipe_deps
|
||||
/dev/bots/android_tools/
|
||||
/dev/devicelab/ABresults*.json
|
||||
/dev/docs/doc/
|
||||
/dev/docs/flutter.docs.zip
|
||||
/dev/docs/lib/
|
||||
/dev/docs/pubspec.yaml
|
||||
/dev/integration_tests/**/xcuserdata
|
||||
/dev/integration_tests/**/Pods
|
||||
/packages/flutter/coverage/
|
||||
version
|
||||
analysis_benchmark.json
|
||||
|
||||
# packages file containing multi-root paths
|
||||
.packages.generated
|
||||
|
||||
# Flutter/Dart/Pub related
|
||||
**/doc/api/
|
||||
.dart_tool/
|
||||
.flutter-plugins
|
||||
.flutter-plugins-dependencies
|
||||
**/generated_plugin_registrant.dart
|
||||
.packages
|
||||
.pub-preload-cache/
|
||||
.pub/
|
||||
build/
|
||||
flutter_*.png
|
||||
linked_*.ds
|
||||
unlinked.ds
|
||||
unlinked_spec.ds
|
||||
|
||||
# Android related
|
||||
**/android/**/gradle-wrapper.jar
|
||||
.gradle/
|
||||
**/android/captures/
|
||||
**/android/gradlew
|
||||
**/android/gradlew.bat
|
||||
**/android/local.properties
|
||||
**/android/**/GeneratedPluginRegistrant.java
|
||||
**/android/key.properties
|
||||
*.jks
|
||||
|
||||
# iOS/XCode related
|
||||
**/ios/**/*.mode1v3
|
||||
**/ios/**/*.mode2v3
|
||||
**/ios/**/*.moved-aside
|
||||
**/ios/**/*.pbxuser
|
||||
**/ios/**/*.perspectivev3
|
||||
**/ios/**/*sync/
|
||||
**/ios/**/.sconsign.dblite
|
||||
**/ios/**/.tags*
|
||||
**/ios/**/.vagrant/
|
||||
**/ios/**/DerivedData/
|
||||
**/ios/**/Icon?
|
||||
**/ios/**/Pods/
|
||||
**/ios/**/.symlinks/
|
||||
**/ios/**/profile
|
||||
**/ios/**/xcuserdata
|
||||
**/ios/.generated/
|
||||
**/ios/Flutter/.last_build_id
|
||||
**/ios/Flutter/App.framework
|
||||
**/ios/Flutter/Flutter.framework
|
||||
**/ios/Flutter/Flutter.podspec
|
||||
**/ios/Flutter/Generated.xcconfig
|
||||
**/ios/Flutter/ephemeral
|
||||
**/ios/Flutter/app.flx
|
||||
**/ios/Flutter/app.zip
|
||||
**/ios/Flutter/flutter_assets/
|
||||
**/ios/Flutter/flutter_export_environment.sh
|
||||
**/ios/ServiceDefinitions.json
|
||||
**/ios/Runner/GeneratedPluginRegistrant.*
|
||||
|
||||
# macOS
|
||||
**/Flutter/ephemeral/
|
||||
**/Pods/
|
||||
**/macos/Flutter/GeneratedPluginRegistrant.swift
|
||||
**/macos/Flutter/ephemeral
|
||||
**/xcuserdata/
|
||||
|
||||
# Windows
|
||||
**/windows/flutter/generated_plugin_registrant.cc
|
||||
**/windows/flutter/generated_plugin_registrant.h
|
||||
**/windows/flutter/generated_plugins.cmake
|
||||
|
||||
# Linux
|
||||
**/linux/flutter/generated_plugin_registrant.cc
|
||||
**/linux/flutter/generated_plugin_registrant.h
|
||||
**/linux/flutter/generated_plugins.cmake
|
||||
|
||||
# Coverage
|
||||
coverage/
|
||||
|
||||
# Symbols
|
||||
app.*.symbols
|
||||
|
||||
# Exceptions to above rules.
|
||||
!**/ios/**/default.mode1v3
|
||||
!**/ios/**/default.mode2v3
|
||||
!**/ios/**/default.pbxuser
|
||||
!**/ios/**/default.perspectivev3
|
||||
!/packages/flutter_tools/test/data/dart_dependencies_test/**/.packages
|
||||
!/dev/ci/**/Gemfile.lock
|
@ -1,3 +1,6 @@
|
||||
# Ignore build outputs from performing a nix-build or `nix build` command
|
||||
result
|
||||
result-*
|
||||
|
||||
# Ignore automatically generated direnv output
|
||||
.direnv
|
||||
|
@ -104,6 +104,12 @@ dist
|
||||
.temp
|
||||
.cache
|
||||
|
||||
# vitepress build output
|
||||
**/.vitepress/dist
|
||||
|
||||
# vitepress cache directory
|
||||
**/.vitepress/cache
|
||||
|
||||
# Docusaurus cache and generated files
|
||||
.docusaurus
|
||||
|
||||
|
16
options/gitignore/NotesAndCoreConfiguration
Normal file
16
options/gitignore/NotesAndCoreConfiguration
Normal file
@ -0,0 +1,16 @@
|
||||
# Excludes Obsidian workspace cache and plugins. All notes and core obsidian
|
||||
# configuration files are tracked by Git.
|
||||
|
||||
# The current application UI state (DOM layout, recently-opened files, etc.) is
|
||||
# stored in these files (separate for desktop and mobile) so you can resume
|
||||
# your session seamlessly after a restart. If you want to track UI state, use
|
||||
# the Workspaces core plugin instead of relying on these files.
|
||||
.obsidian/workspace.json
|
||||
.obsidian/workspace-mobile.json
|
||||
|
||||
# Obsidian plugins are stored under .obsidian/plugins/$plugin_name. They
|
||||
# contain metadata (manifest.json), application code (main.js), stylesheets
|
||||
# (styles.css), and user-configuration data (data.json).
|
||||
# We want to exclude all plugin-related files, so we can exclude everything
|
||||
# under this directory.
|
||||
.obsidian/plugins/**/*
|
38
options/gitignore/NotesAndExtendedConfiguration
Normal file
38
options/gitignore/NotesAndExtendedConfiguration
Normal file
@ -0,0 +1,38 @@
|
||||
# Excludes Obsidian workspace cache and plugin code, but retains plugin
|
||||
# configuration. All notes and user-controlled configuration files are tracked
|
||||
# by Git.
|
||||
#
|
||||
# !!! WARNING !!!
|
||||
#
|
||||
# Community plugins may store sensitive secrets in their data.json files. By
|
||||
# including these files, those secrets may be tracked in your Git repository.
|
||||
#
|
||||
# To ignore configurations for specific plugins, add a line like this after the
|
||||
# contents of this file (order is important):
|
||||
# .obsidian/plugins/{{plugin_name}}/data.json
|
||||
#
|
||||
# Alternatively, ensure that you are treating your entire Git repository as
|
||||
# sensitive data, since it may contain secrets, or may have contained them in
|
||||
# past commits. Understand your threat profile, and make the decision
|
||||
# appropriate for yourself. If in doubt, err on the side of not including
|
||||
# plugin configuration. Use one of the alternative gitignore files instead:
|
||||
# * NotesOnly.gitignore
|
||||
# * NotesAndCoreConfiguration.gitignore
|
||||
|
||||
# The current application UI state (DOM layout, recently-opened files, etc.) is
|
||||
# stored in these files (separate for desktop and mobile) so you can resume
|
||||
# your session seamlessly after a restart. If you want to track UI state, use
|
||||
# the Workspaces core plugin instead of relying on these files.
|
||||
.obsidian/workspace.json
|
||||
.obsidian/workspace-mobile.json
|
||||
|
||||
# Obsidian plugins are stored under .obsidian/plugins/$plugin_name. They
|
||||
# contain metadata (manifest.json), application code (main.js), stylesheets
|
||||
# (styles.css), and user-configuration data (data.json).
|
||||
# We only want to track data.json, so we:
|
||||
# 1. exclude everything under the plugins directory recursively,
|
||||
# 2. unignore the plugin directories themselves, which then allows us to
|
||||
# 3. unignore the data.json files
|
||||
.obsidian/plugins/**/*
|
||||
!.obsidian/plugins/*/
|
||||
!.obsidian/plugins/*/data.json
|
4
options/gitignore/NotesOnly
Normal file
4
options/gitignore/NotesOnly
Normal file
@ -0,0 +1,4 @@
|
||||
# Excludes all Obsidian-related configuration. All notes are tracked by Git.
|
||||
|
||||
# All Obsidian configuration and runtime state is stored here
|
||||
.obsidian/**/*
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
x
Reference in New Issue
Block a user