diff --git a/.drone.yml b/.drone.yml index a8fa7eba3687b..f9da8f9743807 100644 --- a/.drone.yml +++ b/.drone.yml @@ -25,7 +25,7 @@ steps: - make deps-frontend - name: deps-backend - image: golang:1.19 + image: golang:1.20 pull: always commands: - make deps-backend @@ -88,7 +88,7 @@ steps: depends_on: [deps-frontend] - name: checks-backend - image: golang:1.19 + image: golang:1.20 commands: - make --always-make checks-backend # ensure the 'go-licenses' make target runs depends_on: [deps-backend] @@ -109,7 +109,7 @@ steps: depends_on: [deps-frontend] - name: build-backend-no-gcc - image: golang:1.18 # this step is kept as the lowest version of golang that we support + image: golang:1.19 # this step is kept as the lowest version of golang that we support pull: always environment: GO111MODULE: on @@ -122,7 +122,7 @@ steps: path: /go - name: build-backend-arm64 - image: golang:1.19 + image: golang:1.20 environment: GO111MODULE: on GOPROXY: https://goproxy.io @@ -138,7 +138,7 @@ steps: path: /go - name: build-backend-windows - image: golang:1.19 + image: golang:1.20 environment: GO111MODULE: on GOPROXY: https://goproxy.io @@ -153,7 +153,7 @@ steps: path: /go - name: build-backend-386 - image: golang:1.19 + image: golang:1.20 environment: GO111MODULE: on GOPROXY: https://goproxy.io @@ -247,7 +247,7 @@ steps: - pull_request - name: deps-backend - image: golang:1.19 + image: golang:1.20 pull: always commands: - make deps-backend @@ -364,7 +364,7 @@ steps: path: /go - name: generate-coverage - image: golang:1.19 + image: golang:1.20 commands: - make coverage environment: @@ -440,7 +440,7 @@ steps: - pull_request - name: deps-backend - image: golang:1.19 + image: golang:1.20 pull: always commands: - make deps-backend @@ -557,7 +557,7 @@ steps: - name: test-e2e image: mcr.microsoft.com/playwright:v1.29.2-focal commands: - - curl -sLO https://go.dev/dl/go1.19.linux-amd64.tar.gz && tar -C /usr/local -xzf go1.19.linux-amd64.tar.gz + - curl -sLO https://go.dev/dl/go1.20.linux-amd64.tar.gz && tar -C /usr/local -xzf go1.20.linux-amd64.tar.gz - groupadd --gid 1001 gitea && useradd -m --gid 1001 --uid 1001 gitea - apt-get -qq update && apt-get -qqy install build-essential - export TEST_PGSQL_SCHEMA='' @@ -656,7 +656,7 @@ trigger: steps: - name: download - image: golang:1.19 + image: golang:1.20 pull: always commands: - timeout -s ABRT 40m make generate-license generate-gitignore @@ -720,7 +720,7 @@ steps: - make deps-frontend - name: deps-backend - image: golang:1.19 + image: golang:1.20 pull: always commands: - make deps-backend @@ -729,7 +729,7 @@ steps: path: /go - name: static - image: techknowlogick/xgo:go-1.19.x + image: techknowlogick/xgo:go-1.20.x pull: always commands: # Upgrade to node 18 once https://github.com/techknowlogick/xgo/issues/163 is resolved @@ -841,7 +841,7 @@ steps: - make deps-frontend - name: deps-backend - image: golang:1.19 + image: golang:1.20 pull: always commands: - make deps-backend @@ -850,7 +850,7 @@ steps: path: /go - name: static - image: techknowlogick/xgo:go-1.19.x + image: techknowlogick/xgo:go-1.20.x pull: always commands: # Upgrade to node 18 once https://github.com/techknowlogick/xgo/issues/163 is resolved @@ -932,7 +932,7 @@ trigger: steps: - name: build-docs - image: golang:1.19 + image: golang:1.20 commands: - cd docs - make trans-copy clean build diff --git a/.golangci.yml b/.golangci.yml index 7635e83a37260..aa04e0a8efd31 100644 --- a/.golangci.yml +++ b/.golangci.yml @@ -28,7 +28,7 @@ linters: fast: false run: - go: 1.19 + go: 1.20 timeout: 10m skip-dirs: - node_modules @@ -74,7 +74,7 @@ linters-settings: - name: modifies-value-receiver gofumpt: extra-rules: true - lang-version: "1.19" + lang-version: "1.20" depguard: list-type: denylist # Check the list against standard lib. @@ -84,6 +84,7 @@ linters-settings: - github.com/unknwon/com: "use gitea's util and replacements" - io/ioutil: "use os or io instead" - golang.org/x/exp: "it's experimental and unreliable." + - code.gitea.io/gitea/modules/git/internal: "do not use the internal package, use AddXxx function instead" issues: max-issues-per-linter: 0 diff --git a/Dockerfile b/Dockerfile index 3ee474bb3425b..89f000882c57a 100644 --- a/Dockerfile +++ b/Dockerfile @@ -1,5 +1,5 @@ #Build stage -FROM golang:1.19-alpine3.17 AS build-env +FROM golang:1.20-alpine3.17 AS build-env ARG GOPROXY ENV GOPROXY ${GOPROXY:-direct} diff --git a/Dockerfile.rootless b/Dockerfile.rootless index a43a63fa10c78..e92ce857d3cd3 100644 --- a/Dockerfile.rootless +++ b/Dockerfile.rootless @@ -1,5 +1,5 @@ #Build stage -FROM golang:1.19-alpine3.17 AS build-env +FROM golang:1.20-alpine3.17 AS build-env ARG GOPROXY ENV GOPROXY ${GOPROXY:-direct} diff --git a/Makefile b/Makefile index 4d7c507875529..717d7cafc660b 100644 --- a/Makefile +++ b/Makefile @@ -23,13 +23,13 @@ SHASUM ?= shasum -a 256 HAS_GO = $(shell hash $(GO) > /dev/null 2>&1 && echo "GO" || echo "NOGO" ) COMMA := , -XGO_VERSION := go-1.19.x +XGO_VERSION := go-1.20.x AIR_PACKAGE ?= github.com/cosmtrek/air@v1.40.4 EDITORCONFIG_CHECKER_PACKAGE ?= github.com/editorconfig-checker/editorconfig-checker/cmd/editorconfig-checker@2.6.0 ERRCHECK_PACKAGE ?= github.com/kisielk/errcheck@v1.6.2 GOFUMPT_PACKAGE ?= mvdan.cc/gofumpt@v0.4.0 -GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.50.1 +GOLANGCI_LINT_PACKAGE ?= github.com/golangci/golangci-lint/cmd/golangci-lint@v1.51.0 GXZ_PAGAGE ?= github.com/ulikunitz/xz/cmd/gxz@v0.5.10 MISSPELL_PACKAGE ?= github.com/client9/misspell/cmd/misspell@v0.3.4 SWAGGER_PACKAGE ?= github.com/go-swagger/go-swagger/cmd/swagger@v0.30.3 diff --git a/cmd/admin.go b/cmd/admin.go index 8f8c68f9810c9..eafed24bdd6e2 100644 --- a/cmd/admin.go +++ b/cmd/admin.go @@ -578,12 +578,16 @@ func runCreateUser(c *cli.Context) error { restricted = util.OptionalBoolOf(c.Bool("restricted")) } + // default user visibility in app.ini + visibility := setting.Service.DefaultUserVisibilityMode + u := &user_model.User{ Name: username, Email: c.String("email"), Passwd: password, IsAdmin: c.Bool("admin"), MustChangePassword: changePassword, + Visibility: visibility, } overwriteDefault := &user_model.CreateUserOverwriteOptions{ diff --git a/custom/conf/app.example.ini b/custom/conf/app.example.ini index 77efe1417ba96..0f1f18646b627 100644 --- a/custom/conf/app.example.ini +++ b/custom/conf/app.example.ini @@ -765,7 +765,7 @@ ROUTER = console ;; Enable this to require captcha validation for login ;REQUIRE_CAPTCHA_FOR_LOGIN = false ;; -;; Type of captcha you want to use. Options: image, recaptcha, hcaptcha, mcaptcha. +;; Type of captcha you want to use. Options: image, recaptcha, hcaptcha, mcaptcha, cfturnstile. ;CAPTCHA_TYPE = image ;; ;; Change this to use recaptcha.net or other recaptcha service @@ -787,6 +787,10 @@ ROUTER = console ;MCAPTCHA_SECRET = ;MCAPTCHA_SITEKEY = ;; +;; Go to https://dash.cloudflare.com/?to=/:account/turnstile to sign up for a key +;CF_TURNSTILE_SITEKEY = +;CF_TURNSTILE_SECRET = +;; ;; Default value for KeepEmailPrivate ;; Each new user will get the value of this setting copied into their profile ;DEFAULT_KEEP_EMAIL_PRIVATE = false @@ -927,14 +931,18 @@ ROUTER = console ;USE_COMPAT_SSH_URI = false ;; ;; Close issues as long as a commit on any branch marks it as fixed -;; Comma separated list of globally disabled repo units. Allowed values: repo.issues, repo.ext_issues, repo.pulls, repo.wiki, repo.ext_wiki, repo.projects +;; Comma separated list of globally disabled repo units. Allowed values: repo.issues, repo.ext_issues, repo.pulls, repo.wiki, repo.ext_wiki, repo.projects, repo.packages ;DISABLED_REPO_UNITS = ;; -;; Comma separated list of default repo units. Allowed values: repo.code, repo.releases, repo.issues, repo.pulls, repo.wiki, repo.projects. +;; Comma separated list of default new repo units. Allowed values: repo.code, repo.releases, repo.issues, repo.pulls, repo.wiki, repo.projects, repo.packages. ;; Note: Code and Releases can currently not be deactivated. If you specify default repo units you should still list them for future compatibility. ;; External wiki and issue tracker can't be enabled by default as it requires additional settings. ;; Disabled repo units will not be added to new repositories regardless if it is in the default list. -;DEFAULT_REPO_UNITS = repo.code,repo.releases,repo.issues,repo.pulls,repo.wiki,repo.projects +;DEFAULT_REPO_UNITS = repo.code,repo.releases,repo.issues,repo.pulls,repo.wiki,repo.projects,repo.packages +;; +;; Comma separated list of default forked repo units. +;; The set of allowed values and rules are the same as DEFAULT_REPO_UNITS. +;DEFAULT_FORK_REPO_UNITS = repo.code,repo.pulls ;; ;; Prefix archive files by placing them in a directory named after the repository ;PREFIX_ARCHIVE_FILES = true @@ -1218,10 +1226,6 @@ ROUTER = console ;; ;; Whether to enable a Service Worker to cache frontend assets ;USE_SERVICE_WORKER = false -;; -;; Whether to only show relevant repos on the explore page when no keyword is specified and default sorting is used. -;; A repo is considered irrelevant if it's a fork or if it has no metadata (no description, no icon, no topic). -;ONLY_SHOW_RELEVANT_REPOS = false ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; ;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;;; @@ -2454,6 +2458,8 @@ ROUTER = console ;LIMIT_TOTAL_OWNER_COUNT = -1 ;; Maximum size of packages a single owner can use (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`) ;LIMIT_TOTAL_OWNER_SIZE = -1 +;; Maximum size of a Cargo upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`) +;LIMIT_SIZE_CARGO = -1 ;; Maximum size of a Composer upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`) ;LIMIT_SIZE_COMPOSER = -1 ;; Maximum size of a Conan upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`) diff --git a/docs/config.yaml b/docs/config.yaml index 0b47c0ffd83f5..0a6d5d13f23a9 100644 --- a/docs/config.yaml +++ b/docs/config.yaml @@ -19,8 +19,8 @@ params: author: The Gitea Authors website: https://docs.gitea.io version: 1.18.1 - minGoVersion: 1.18 - goVersion: 1.19 + minGoVersion: 1.19 + goVersion: 1.20 minNodeVersion: 16 search: nav repo: "https://github.com/go-gitea/gitea" diff --git a/docs/content/doc/advanced/config-cheat-sheet.en-us.md b/docs/content/doc/advanced/config-cheat-sheet.en-us.md index 441bb824ad208..c9116edc66594 100644 --- a/docs/content/doc/advanced/config-cheat-sheet.en-us.md +++ b/docs/content/doc/advanced/config-cheat-sheet.en-us.md @@ -104,7 +104,8 @@ In addition there is _`StaticRootPath`_ which can be set as a built-in at build - `ENABLE_PUSH_CREATE_USER`: **false**: Allow users to push local repositories to Gitea and have them automatically created for a user. - `ENABLE_PUSH_CREATE_ORG`: **false**: Allow users to push local repositories to Gitea and have them automatically created for an org. - `DISABLED_REPO_UNITS`: **_empty_**: Comma separated list of globally disabled repo units. Allowed values: \[repo.issues, repo.ext_issues, repo.pulls, repo.wiki, repo.ext_wiki, repo.projects\] -- `DEFAULT_REPO_UNITS`: **repo.code,repo.releases,repo.issues,repo.pulls,repo.wiki,repo.projects**: Comma separated list of default repo units. Allowed values: \[repo.code, repo.releases, repo.issues, repo.pulls, repo.wiki, repo.projects\]. Note: Code and Releases can currently not be deactivated. If you specify default repo units you should still list them for future compatibility. External wiki and issue tracker can't be enabled by default as it requires additional settings. Disabled repo units will not be added to new repositories regardless if it is in the default list. +- `DEFAULT_REPO_UNITS`: **repo.code,repo.releases,repo.issues,repo.pulls,repo.wiki,repo.projects,repo.packages**: Comma separated list of default new repo units. Allowed values: \[repo.code, repo.releases, repo.issues, repo.pulls, repo.wiki, repo.projects\]. Note: Code and Releases can currently not be deactivated. If you specify default repo units you should still list them for future compatibility. External wiki and issue tracker can't be enabled by default as it requires additional settings. Disabled repo units will not be added to new repositories regardless if it is in the default list. +- `DEFAULT_FORK_REPO_UNITS`: **repo.code,repo.pulls**: Comma separated list of default forked repo units. The set of allowed values and rules is the same as `DEFAULT_REPO_UNITS`. - `PREFIX_ARCHIVE_FILES`: **true**: Prefix archive files by placing them in a directory named after the repository. - `DISABLE_MIGRATIONS`: **false**: Disable migrating feature. - `DISABLE_STARS`: **false**: Disable stars feature. @@ -230,8 +231,6 @@ The following configuration set `Content-Type: application/vnd.android.package-a - `DEFAULT_SHOW_FULL_NAME`: **false**: Whether the full name of the users should be shown where possible. If the full name isn't set, the username will be used. - `SEARCH_REPO_DESCRIPTION`: **true**: Whether to search within description at repository search on explore page. - `USE_SERVICE_WORKER`: **false**: Whether to enable a Service Worker to cache frontend assets. -- `ONLY_SHOW_RELEVANT_REPOS`: **false** Whether to only show relevant repos on the explore page when no keyword is specified and default sorting is used. - A repo is considered irrelevant if it's a fork or if it has no metadata (no description, no icon, no topic). ### UI - Admin (`ui.admin`) @@ -644,7 +643,7 @@ Certain queues have defaults that override the defaults set in `[queue]` (this o - `REQUIRE_CAPTCHA_FOR_LOGIN`: **false**: Enable this to require captcha validation for login. You also must enable `ENABLE_CAPTCHA`. - `REQUIRE_EXTERNAL_REGISTRATION_CAPTCHA`: **false**: Enable this to force captcha validation even for External Accounts (i.e. GitHub, OpenID Connect, etc). You also must enable `ENABLE_CAPTCHA`. -- `CAPTCHA_TYPE`: **image**: \[image, recaptcha, hcaptcha, mcaptcha\] +- `CAPTCHA_TYPE`: **image**: \[image, recaptcha, hcaptcha, mcaptcha, cfturnstile\] - `RECAPTCHA_SECRET`: **""**: Go to https://www.google.com/recaptcha/admin to get a secret for recaptcha. - `RECAPTCHA_SITEKEY`: **""**: Go to https://www.google.com/recaptcha/admin to get a sitekey for recaptcha. - `RECAPTCHA_URL`: **https://www.google.com/recaptcha/**: Set the recaptcha url - allows the use of recaptcha net. @@ -653,6 +652,8 @@ Certain queues have defaults that override the defaults set in `[queue]` (this o - `MCAPTCHA_SECRET`: **""**: Go to your mCaptcha instance to get a secret for mCaptcha. - `MCAPTCHA_SITEKEY`: **""**: Go to your mCaptcha instance to get a sitekey for mCaptcha. - `MCAPTCHA_URL` **https://demo.mcaptcha.org/**: Set the mCaptcha URL. +- `CF_TURNSTILE_SECRET` **""**: Go to https://dash.cloudflare.com/?to=/:account/turnstile to get a secret for cloudflare turnstile. +- `CF_TURNSTILE_SITEKEY` **""**: Go to https://dash.cloudflare.com/?to=/:account/turnstile to get a sitekey for cloudflare turnstile. - `DEFAULT_KEEP_EMAIL_PRIVATE`: **false**: By default set users to keep their email address private. - `DEFAULT_ALLOW_CREATE_ORGANIZATION`: **true**: Allow new users to create organizations by default. - `DEFAULT_USER_IS_RESTRICTED`: **false**: Give new users restricted permissions by default @@ -1212,6 +1213,7 @@ Task queue configuration has been moved to `queue.task`. However, the below conf - `CHUNKED_UPLOAD_PATH`: **tmp/package-upload**: Path for chunked uploads. Defaults to `APP_DATA_PATH` + `tmp/package-upload` - `LIMIT_TOTAL_OWNER_COUNT`: **-1**: Maximum count of package versions a single owner can have (`-1` means no limits) - `LIMIT_TOTAL_OWNER_SIZE`: **-1**: Maximum size of packages a single owner can use (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`) +- `LIMIT_SIZE_CARGO`: **-1**: Maximum size of a Cargo upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`) - `LIMIT_SIZE_COMPOSER`: **-1**: Maximum size of a Composer upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`) - `LIMIT_SIZE_CONAN`: **-1**: Maximum size of a Conan upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`) - `LIMIT_SIZE_CONDA`: **-1**: Maximum size of a Conda upload (`-1` means no limits, format `1000`, `1 MB`, `1 GiB`) diff --git a/docs/content/doc/advanced/config-cheat-sheet.zh-cn.md b/docs/content/doc/advanced/config-cheat-sheet.zh-cn.md index f10b6258c87a2..2598f16a14963 100644 --- a/docs/content/doc/advanced/config-cheat-sheet.zh-cn.md +++ b/docs/content/doc/advanced/config-cheat-sheet.zh-cn.md @@ -147,6 +147,17 @@ menu: - `ENABLE_REVERSE_PROXY_AUTO_REGISTRATION`: 允许通过反向认证做自动注册。 - `ENABLE_CAPTCHA`: **false**: 注册时使用图片验证码。 - `REQUIRE_CAPTCHA_FOR_LOGIN`: **false**: 登录时需要图片验证码。需要同时开启 `ENABLE_CAPTCHA`。 +- `CAPTCHA_TYPE`: **image**: \[image, recaptcha, hcaptcha, mcaptcha, cfturnstile\],人机验证类型,分别表示图片认证、 recaptcha 、 hcaptcha 、mcaptcha 、和 cloudlfare 的 turnstile。 +- `RECAPTCHA_SECRET`: **""**: recaptcha 服务的密钥,可在 https://www.google.com/recaptcha/admin 获取。 +- `RECAPTCHA_SITEKEY`: **""**: recaptcha 服务的网站密钥 ,可在 https://www.google.com/recaptcha/admin 获取。 +- `RECAPTCHA_URL`: **https://www.google.com/recaptcha/**: 设置 recaptcha 的 url 。 +- `HCAPTCHA_SECRET`: **""**: hcaptcha 服务的密钥,可在 https://www.hcaptcha.com/ 获取。 +- `HCAPTCHA_SITEKEY`: **""**: hcaptcha 服务的网站密钥,可在 https://www.hcaptcha.com/ 获取。 +- `MCAPTCHA_SECRET`: **""**: mCaptcha 服务的密钥。 +- `MCAPTCHA_SITEKEY`: **""**: mCaptcha 服务的网站密钥。 +- `MCAPTCHA_URL` **https://demo.mcaptcha.org/**: 设置 remCaptchacaptcha 的 url 。 +- `CF_TURNSTILE_SECRET` **""**: cloudlfare turnstile 服务的密钥,可在 https://dash.cloudflare.com/?to=/:account/turnstile 获取。 +- `CF_TURNSTILE_SITEKEY` **""**: cloudlfare turnstile 服务的网站密钥 ,可在 https://www.google.com/recaptcha/admin 获取。 ### Service - Expore (`service.explore`) diff --git a/docs/content/doc/features/authentication.en-us.md b/docs/content/doc/features/authentication.en-us.md index f25065d9c48db..c27a09b00bf32 100644 --- a/docs/content/doc/features/authentication.en-us.md +++ b/docs/content/doc/features/authentication.en-us.md @@ -329,3 +329,22 @@ Before activating SSPI single sign-on authentication (SSO) you have to prepare y - You have added the URL of the web app to the `Local intranet zone` - The clocks of the server and client should not differ with more than 5 minutes (depends on group policy) - `Integrated Windows Authentication` should be enabled in Internet Explorer (under `Advanced settings`) + +## Reverse Proxy + +Gitea supports Reverse Proxy Header authentication, it will read headers as a trusted login user name or user email address. This hasn't been enabled by default, you can enable it with + +```ini +[service] +ENABLE_REVERSE_PROXY_AUTHENTICATION = true +``` + +The default login user name is in the `X-WEBAUTH-USER` header, you can change it via changing `REVERSE_PROXY_AUTHENTICATION_USER` in app.ini. If the user doesn't exist, you can enable automatic registration with `ENABLE_REVERSE_PROXY_AUTO_REGISTRATION=true`. + +The default login user email is `X-WEBAUTH-EMAIL`, you can change it via changing `REVERSE_PROXY_AUTHENTICATION_EMAIL` in app.ini, this could also be disabled with `ENABLE_REVERSE_PROXY_EMAIL` + +If set `ENABLE_REVERSE_PROXY_FULL_NAME=true`, a user full name expected in `X-WEBAUTH-FULLNAME` will be assigned to the user when auto creating the user. You can also change the header name with `REVERSE_PROXY_AUTHENTICATION_FULL_NAME`. + +You can also limit the reverse proxy's IP address range with `REVERSE_PROXY_TRUSTED_PROXIES` which default value is `127.0.0.0/8,::1/128`. By `REVERSE_PROXY_LIMIT`, you can limit trusted proxies level. + +Notice: Reverse Proxy Auth doesn't support the API. You still need an access token or basic auth to make API requests. diff --git a/docs/content/doc/features/authentication.zh-cn.md b/docs/content/doc/features/authentication.zh-cn.md index 481e33441b5b7..aeb099f760b8e 100644 --- a/docs/content/doc/features/authentication.zh-cn.md +++ b/docs/content/doc/features/authentication.zh-cn.md @@ -15,4 +15,21 @@ menu: # 认证 -## TBD +## 反向代理认证 + +Gitea 支持通过读取反向代理传递的 HTTP 头中的登录名或者 email 地址来支持反向代理来认证。默认是不启用的,你可以用以下配置启用。 + +```ini +[service] +ENABLE_REVERSE_PROXY_AUTHENTICATION = true +``` + +默认的登录用户名的 HTTP 头是 `X-WEBAUTH-USER`,你可以通过修改 `REVERSE_PROXY_AUTHENTICATION_USER` 来变更它。如果用户不存在,可以自动创建用户,当然你需要修改 `ENABLE_REVERSE_PROXY_AUTO_REGISTRATION=true` 来启用它。 + +默认的登录用户 Email 的 HTTP 头是 `X-WEBAUTH-EMAIL`,你可以通过修改 `REVERSE_PROXY_AUTHENTICATION_EMAIL` 来变更它。如果用户不存在,可以自动创建用户,当然你需要修改 `ENABLE_REVERSE_PROXY_AUTO_REGISTRATION=true` 来启用它。你也可以通过修改 `ENABLE_REVERSE_PROXY_EMAIL` 来启用或停用这个 HTTP 头。 + +如果设置了 `ENABLE_REVERSE_PROXY_FULL_NAME=true`,则用户的全名会从 `X-WEBAUTH-FULLNAME` 读取,这样在自动创建用户时将使用这个字段作为用户全名,你也可以通过修改 `REVERSE_PROXY_AUTHENTICATION_FULL_NAME` 来变更 HTTP 头。 + +你也可以通过修改 `REVERSE_PROXY_TRUSTED_PROXIES` 来设置反向代理的IP地址范围,加强安全性,默认值是 `127.0.0.0/8,::1/128`。 通过 `REVERSE_PROXY_LIMIT`, 可以设置最多信任几级反向代理。 + +注意:反向代理认证不支持认证 API,API 仍旧需要用 access token 来进行认证。 diff --git a/docs/content/doc/packages/cargo.en-us.md b/docs/content/doc/packages/cargo.en-us.md new file mode 100644 index 0000000000000..1f90d939d1f7e --- /dev/null +++ b/docs/content/doc/packages/cargo.en-us.md @@ -0,0 +1,109 @@ +--- +date: "2022-11-20T00:00:00+00:00" +title: "Cargo Packages Repository" +slug: "packages/cargo" +draft: false +toc: false +menu: + sidebar: + parent: "packages" + name: "Cargo" + weight: 5 + identifier: "cargo" +--- + +# Cargo Packages Repository + +Publish [Cargo](https://doc.rust-lang.org/stable/cargo/) packages for your user or organization. + +**Table of Contents** + +{{< toc >}} + +## Requirements + +To work with the Cargo package registry, you need [Rust and Cargo](https://www.rust-lang.org/tools/install). + +Cargo stores informations about the available packages in a package index stored in a git repository. +This repository is needed to work with the registry. +The following section describes how to create it. + +## Index Repository + +Cargo stores informations about the available packages in a package index stored in a git repository. +In Gitea this repository has the special name `_cargo-index`. +After a package was uploaded, its metadata is automatically written to the index. +The content of this repository should not be manually modified. + +The user or organization package settings page allows to create the index repository along with the configuration file. +If needed this action will rewrite the configuration file. +This can be useful if for example the Gitea instance domain was changed. + +If the case arises where the packages stored in Gitea and the information in the index repository are out of sync, the settings page allows to rebuild the index repository. +This action iterates all packages in the registry and writes their information to the index. +If there are lot of packages this process may take some time. + +## Configuring the package registry + +To register the package registry the Cargo configuration must be updated. +Add the following text to the configuration file located in the current users home directory (for example `~/.cargo/config.toml`): + +``` +[registry] +default = "gitea" + +[registries.gitea] +index = "https://gitea.example.com/{owner}/_cargo-index.git" + +[net] +git-fetch-with-cli = true +``` + +| Parameter | Description | +| --------- | ----------- | +| `owner` | The owner of the package. | + +If the registry is private or you want to publish new packages, you have to configure your credentials. +Add the credentials section to the credentials file located in the current users home directory (for example `~/.cargo/credentials.toml`): + +``` +[registries.gitea] +token = "Bearer {token}" +``` + +| Parameter | Description | +| --------- | ----------- | +| `token` | Your [personal access token]({{< relref "doc/developers/api-usage.en-us.md#authentication" >}}) | + +## Publish a package + +Publish a package by running the following command in your project: + +```shell +cargo publish +``` + +You cannot publish a package if a package of the same name and version already exists. You must delete the existing package first. + +## Install a package + +To install a package from the package registry, execute the following command: + +```shell +cargo add {package_name} +``` + +| Parameter | Description | +| -------------- | ----------- | +| `package_name` | The package name. | + +## Supported commands + +``` +cargo publish +cargo add +cargo install +cargo yank +cargo unyank +cargo search +``` diff --git a/docs/content/doc/packages/overview.en-us.md b/docs/content/doc/packages/overview.en-us.md index 9a736c1e5641e..b3ccb73c19658 100644 --- a/docs/content/doc/packages/overview.en-us.md +++ b/docs/content/doc/packages/overview.en-us.md @@ -26,6 +26,7 @@ The following package managers are currently supported: | Name | Language | Package client | | ---- | -------- | -------------- | +| [Cargo]({{< relref "doc/packages/cargo.en-us.md" >}}) | Rust | `cargo` | | [Composer]({{< relref "doc/packages/composer.en-us.md" >}}) | PHP | `composer` | | [Conan]({{< relref "doc/packages/conan.en-us.md" >}}) | C++ | `conan` | | [Conda]({{< relref "doc/packages/conda.en-us.md" >}}) | - | `conda` | diff --git a/go.mod b/go.mod index a929508e0de0d..eb23bd9e32a17 100644 --- a/go.mod +++ b/go.mod @@ -1,6 +1,6 @@ module code.gitea.io/gitea -go 1.18 +go 1.19 require ( code.gitea.io/actions-proto-go v0.2.0 diff --git a/models/asymkey/error.go b/models/asymkey/error.go index 1d486082f4610..03bc82302f100 100644 --- a/models/asymkey/error.go +++ b/models/asymkey/error.go @@ -24,6 +24,9 @@ func (err ErrKeyUnableVerify) Error() string { return fmt.Sprintf("Unable to verify key content [result: %s]", err.Result) } +// ErrKeyIsPrivate is returned when the provided key is a private key not a public key +var ErrKeyIsPrivate = util.NewSilentWrapErrorf(util.ErrInvalidArgument, "the provided key is a private key") + // ErrKeyNotExist represents a "KeyNotExist" kind of error. type ErrKeyNotExist struct { ID int64 diff --git a/models/asymkey/ssh_key_parse.go b/models/asymkey/ssh_key_parse.go index 1df6db6fa7219..8693c87e76b2d 100644 --- a/models/asymkey/ssh_key_parse.go +++ b/models/asymkey/ssh_key_parse.go @@ -96,6 +96,9 @@ func parseKeyString(content string) (string, error) { if block == nil { return "", fmt.Errorf("failed to parse PEM block containing the public key") } + if strings.Contains(block.Type, "PRIVATE") { + return "", ErrKeyIsPrivate + } pub, err := x509.ParsePKIXPublicKey(block.Bytes) if err != nil { diff --git a/models/db/search.go b/models/db/search.go index f5273cb6f6bfa..26e082756a51b 100644 --- a/models/db/search.go +++ b/models/db/search.go @@ -27,3 +27,9 @@ const ( SearchOrderByForks SearchOrderBy = "num_forks ASC" SearchOrderByForksReverse SearchOrderBy = "num_forks DESC" ) + +const ( + // Which means a condition to filter the records which don't match any id. + // It's different from zero which means the condition could be ignored. + NoneID = -1 +) diff --git a/models/db/sql_postgres_with_schema.go b/models/db/sql_postgres_with_schema.go index ec63447f6f6f3..c2694b37bb934 100644 --- a/models/db/sql_postgres_with_schema.go +++ b/models/db/sql_postgres_with_schema.go @@ -37,9 +37,7 @@ func (d *postgresSchemaDriver) Open(name string) (driver.Conn, error) { } schemaValue, _ := driver.String.ConvertValue(setting.Database.Schema) - // golangci lint is incorrect here - there is no benefit to using driver.ExecerContext here - // and in any case pq does not implement it - if execer, ok := conn.(driver.Execer); ok { //nolint + if execer, ok := conn.(driver.Execer); ok { _, err := execer.Exec(`SELECT set_config( 'search_path', $1 || ',' || current_setting('search_path'), @@ -63,8 +61,7 @@ func (d *postgresSchemaDriver) Open(name string) (driver.Conn, error) { // driver.String.ConvertValue will never return err for string - // golangci lint is incorrect here - there is no benefit to using stmt.ExecWithContext here - _, err = stmt.Exec([]driver.Value{schemaValue}) //nolint + _, err = stmt.Exec([]driver.Value{schemaValue}) if err != nil { _ = conn.Close() return nil, err diff --git a/models/dbfs/dbfs.go b/models/dbfs/dbfs.go index 89d3dc7cbc911..6b5b3beeb2745 100644 --- a/models/dbfs/dbfs.go +++ b/models/dbfs/dbfs.go @@ -10,6 +10,35 @@ import ( "code.gitea.io/gitea/models/db" ) +/* +The reasons behind the DBFS (database-filesystem) package: +When a Gitea action is running, the Gitea action server should collect and store all the logs. + +The requirements are: +* The running logs must be stored across the cluster if the Gitea servers are deployed as a cluster. +* The logs will be archived to Object Storage (S3/MinIO, etc.) after a period of time. +* The Gitea action UI should be able to render the running logs and the archived logs. + +Some possible solutions for the running logs: +* [Not ideal] Using local temp file: it can not be shared across the cluster. +* [Not ideal] Using shared file in the filesystem of git repository: although at the moment, the Gitea cluster's + git repositories must be stored in a shared filesystem, in the future, Gitea may need a dedicated Git Service Server + to decouple the shared filesystem. Then the action logs will become a blocker. +* [Not ideal] Record the logs in a database table line by line: it has a couple of problems: + - It's difficult to make multiple increasing sequence (log line number) for different databases. + - The database table will have a lot of rows and be affected by the big-table performance problem. + - It's difficult to load logs by using the same interface as other storages. + - It's difficult to calculate the size of the logs. + +The DBFS solution: +* It can be used in a cluster. +* It can share the same interface (Read/Write/Seek) as other storages. +* It's very friendly to database because it only needs to store much fewer rows than the log-line solution. +* In the future, when Gitea action needs to limit the log size (other CI/CD services also do so), it's easier to calculate the log file size. +* Even sometimes the UI needs to render the tailing lines, the tailing lines can be found be counting the "\n" from the end of the file by seek. + The seeking and finding is not the fastest way, but it's still acceptable and won't affect the performance too much. +*/ + type dbfsMeta struct { ID int64 `xorm:"pk autoincr"` FullPath string `xorm:"VARCHAR(500) UNIQUE NOT NULL"` diff --git a/models/fixtures/repository.yml b/models/fixtures/repository.yml index 5d4781ad44f7c..ef3cfbbbec7c5 100644 --- a/models/fixtures/repository.yml +++ b/models/fixtures/repository.yml @@ -4,6 +4,7 @@ owner_name: user2 lower_name: repo1 name: repo1 + default_branch: master num_watches: 4 num_stars: 0 num_forks: 0 @@ -34,6 +35,7 @@ owner_name: user2 lower_name: repo2 name: repo2 + default_branch: master num_watches: 0 num_stars: 1 num_forks: 0 @@ -64,6 +66,7 @@ owner_name: user3 lower_name: repo3 name: repo3 + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -94,6 +97,7 @@ owner_name: user5 lower_name: repo4 name: repo4 + default_branch: master num_watches: 0 num_stars: 1 num_forks: 0 @@ -274,6 +278,7 @@ owner_name: user12 lower_name: repo10 name: repo10 + default_branch: master num_watches: 0 num_stars: 0 num_forks: 1 @@ -304,6 +309,7 @@ owner_name: user13 lower_name: repo11 name: repo11 + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -425,6 +431,7 @@ owner_name: user2 lower_name: repo15 name: repo15 + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -455,6 +462,7 @@ owner_name: user2 lower_name: repo16 name: repo16 + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -905,6 +913,7 @@ owner_name: user2 lower_name: repo20 name: repo20 + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -965,6 +974,7 @@ owner_name: user2 lower_name: utf8 name: utf8 + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -1055,6 +1065,7 @@ owner_name: user2 lower_name: commits_search_test name: commits_search_test + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -1085,6 +1096,7 @@ owner_name: user2 lower_name: git_hooks_test name: git_hooks_test + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -1115,6 +1127,7 @@ owner_name: limited_org lower_name: public_repo_on_limited_org name: public_repo_on_limited_org + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -1145,6 +1158,7 @@ owner_name: limited_org lower_name: private_repo_on_limited_org name: private_repo_on_limited_org + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -1175,6 +1189,7 @@ owner_name: privated_org lower_name: public_repo_on_private_org name: public_repo_on_private_org + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -1205,6 +1220,7 @@ owner_name: privated_org lower_name: private_repo_on_private_org name: private_repo_on_private_org + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -1235,6 +1251,7 @@ owner_name: user2 lower_name: glob name: glob + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -1295,6 +1312,7 @@ owner_name: user27 lower_name: template1 name: template1 + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -1355,6 +1373,7 @@ owner_name: org26 lower_name: repo_external_tracker name: repo_external_tracker + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -1385,6 +1404,7 @@ owner_name: org26 lower_name: repo_external_tracker_numeric name: repo_external_tracker_numeric + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -1415,6 +1435,7 @@ owner_name: org26 lower_name: repo_external_tracker_alpha name: repo_external_tracker_alpha + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -1445,6 +1466,7 @@ owner_name: user27 lower_name: repo49 name: repo49 + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -1475,6 +1497,7 @@ owner_name: user30 lower_name: repo50 name: repo50 + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -1505,6 +1528,7 @@ owner_name: user30 lower_name: repo51 name: repo51 + default_branch: master num_watches: 0 num_stars: 0 num_forks: 0 @@ -1565,6 +1589,7 @@ owner_name: user30 lower_name: renderer name: renderer + default_branch: master is_archived: false is_empty: false is_private: false @@ -1592,6 +1617,7 @@ owner_name: user2 lower_name: lfs name: lfs + default_branch: master is_empty: false is_archived: false is_private: true diff --git a/models/issues/issue.go b/models/issues/issue.go index 78cac900523f9..3ddc799270962 100644 --- a/models/issues/issue.go +++ b/models/issues/issue.go @@ -1251,6 +1251,8 @@ func (opts *IssuesOptions) setupSessionNoLimit(sess *xorm.Session) { if opts.ProjectID > 0 { sess.Join("INNER", "project_issue", "issue.id = project_issue.issue_id"). And("project_issue.project_id=?", opts.ProjectID) + } else if opts.ProjectID == db.NoneID { // show those that are in no project + sess.And(builder.NotIn("issue.id", builder.Select("issue_id").From("project_issue"))) } if opts.ProjectBoardID != 0 { diff --git a/models/issues/pull.go b/models/issues/pull.go index 6ff6502e4eb56..044fb5fa04d65 100644 --- a/models/issues/pull.go +++ b/models/issues/pull.go @@ -8,6 +8,7 @@ import ( "context" "fmt" "io" + "strconv" "strings" "code.gitea.io/gitea/models/db" @@ -132,6 +133,27 @@ const ( PullRequestStatusAncestor ) +func (status PullRequestStatus) String() string { + switch status { + case PullRequestStatusConflict: + return "CONFLICT" + case PullRequestStatusChecking: + return "CHECKING" + case PullRequestStatusMergeable: + return "MERGEABLE" + case PullRequestStatusManuallyMerged: + return "MANUALLY_MERGED" + case PullRequestStatusError: + return "ERROR" + case PullRequestStatusEmpty: + return "EMPTY" + case PullRequestStatusAncestor: + return "ANCESTOR" + default: + return strconv.Itoa(int(status)) + } +} + // PullRequestFlow the flow of pull request type PullRequestFlow int @@ -202,6 +224,42 @@ func DeletePullsByBaseRepoID(ctx context.Context, repoID int64) error { return err } +// ColorFormat writes a colored string to identify this struct +func (pr *PullRequest) ColorFormat(s fmt.State) { + if pr == nil { + log.ColorFprintf(s, "PR[%d]%s#%d[%s...%s:%s]", + log.NewColoredIDValue(0), + log.NewColoredValue("/"), + log.NewColoredIDValue(0), + log.NewColoredValue(""), + log.NewColoredValue("/"), + log.NewColoredValue(""), + ) + return + } + + log.ColorFprintf(s, "PR[%d]", log.NewColoredIDValue(pr.ID)) + if pr.BaseRepo != nil { + log.ColorFprintf(s, "%s#%d[%s...", log.NewColoredValue(pr.BaseRepo.FullName()), + log.NewColoredIDValue(pr.Index), log.NewColoredValue(pr.BaseBranch)) + } else { + log.ColorFprintf(s, "Repo[%d]#%d[%s...", log.NewColoredIDValue(pr.BaseRepoID), + log.NewColoredIDValue(pr.Index), log.NewColoredValue(pr.BaseBranch)) + } + if pr.HeadRepoID == pr.BaseRepoID { + log.ColorFprintf(s, "%s]", log.NewColoredValue(pr.HeadBranch)) + } else if pr.HeadRepo != nil { + log.ColorFprintf(s, "%s:%s]", log.NewColoredValue(pr.HeadRepo.FullName()), log.NewColoredValue(pr.HeadBranch)) + } else { + log.ColorFprintf(s, "Repo[%d]:%s]", log.NewColoredIDValue(pr.HeadRepoID), log.NewColoredValue(pr.HeadBranch)) + } +} + +// String represents the pr as a simple string +func (pr *PullRequest) String() string { + return log.ColorFormatAsString(pr) +} + // MustHeadUserName returns the HeadRepo's username if failed return blank func (pr *PullRequest) MustHeadUserName(ctx context.Context) string { if err := pr.LoadHeadRepo(ctx); err != nil { @@ -234,7 +292,8 @@ func (pr *PullRequest) LoadAttributes(ctx context.Context) (err error) { return nil } -// LoadHeadRepo loads the head repository +// LoadHeadRepo loads the head repository, pr.HeadRepo will remain nil if it does not exist +// and thus ErrRepoNotExist will never be returned func (pr *PullRequest) LoadHeadRepo(ctx context.Context) (err error) { if !pr.isHeadRepoLoaded && pr.HeadRepo == nil && pr.HeadRepoID > 0 { if pr.HeadRepoID == pr.BaseRepoID { @@ -249,14 +308,14 @@ func (pr *PullRequest) LoadHeadRepo(ctx context.Context) (err error) { pr.HeadRepo, err = repo_model.GetRepositoryByID(ctx, pr.HeadRepoID) if err != nil && !repo_model.IsErrRepoNotExist(err) { // Head repo maybe deleted, but it should still work - return fmt.Errorf("GetRepositoryByID(head): %w", err) + return fmt.Errorf("pr[%d].LoadHeadRepo[%d]: %w", pr.ID, pr.HeadRepoID, err) } pr.isHeadRepoLoaded = true } return nil } -// LoadBaseRepo loads the target repository +// LoadBaseRepo loads the target repository. ErrRepoNotExist may be returned. func (pr *PullRequest) LoadBaseRepo(ctx context.Context) (err error) { if pr.BaseRepo != nil { return nil @@ -274,7 +333,7 @@ func (pr *PullRequest) LoadBaseRepo(ctx context.Context) (err error) { pr.BaseRepo, err = repo_model.GetRepositoryByID(ctx, pr.BaseRepoID) if err != nil { - return fmt.Errorf("repo_model.GetRepositoryByID(base): %w", err) + return fmt.Errorf("pr[%d].LoadBaseRepo[%d]: %w", pr.ID, pr.BaseRepoID, err) } return nil } diff --git a/models/packages/descriptor.go b/models/packages/descriptor.go index 3b36ee22661ca..40010eb720cbe 100644 --- a/models/packages/descriptor.go +++ b/models/packages/descriptor.go @@ -11,6 +11,7 @@ import ( repo_model "code.gitea.io/gitea/models/repo" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/packages/cargo" "code.gitea.io/gitea/modules/packages/composer" "code.gitea.io/gitea/modules/packages/conan" "code.gitea.io/gitea/modules/packages/conda" @@ -129,6 +130,8 @@ func GetPackageDescriptor(ctx context.Context, pv *PackageVersion) (*PackageDesc var metadata interface{} switch p.Type { + case TypeCargo: + metadata = &cargo.Metadata{} case TypeComposer: metadata = &composer.Metadata{} case TypeConan: diff --git a/models/packages/package.go b/models/packages/package.go index 0015953d81d8c..b6b033cc95a69 100644 --- a/models/packages/package.go +++ b/models/packages/package.go @@ -30,6 +30,7 @@ type Type string // List of supported packages const ( + TypeCargo Type = "cargo" TypeComposer Type = "composer" TypeConan Type = "conan" TypeConda Type = "conda" @@ -46,6 +47,7 @@ const ( ) var TypeList = []Type{ + TypeCargo, TypeComposer, TypeConan, TypeConda, @@ -64,6 +66,8 @@ var TypeList = []Type{ // Name gets the name of the package type func (pt Type) Name() string { switch pt { + case TypeCargo: + return "Cargo" case TypeComposer: return "Composer" case TypeConan: @@ -97,6 +101,8 @@ func (pt Type) Name() string { // SVGName gets the name of the package type svg image func (pt Type) SVGName() string { switch pt { + case TypeCargo: + return "gitea-cargo" case TypeComposer: return "gitea-composer" case TypeConan: diff --git a/models/packages/package_property.go b/models/packages/package_property.go index 1b7f253d56637..e03b12c9df4dd 100644 --- a/models/packages/package_property.go +++ b/models/packages/package_property.go @@ -58,6 +58,12 @@ func GetPropertiesByName(ctx context.Context, refType PropertyType, refID int64, return pps, db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ? AND name = ?", refType, refID, name).Find(&pps) } +// UpdateProperty updates a property +func UpdateProperty(ctx context.Context, pp *PackageProperty) error { + _, err := db.GetEngine(ctx).ID(pp.ID).Update(pp) + return err +} + // DeleteAllProperties deletes all properties of a ref func DeleteAllProperties(ctx context.Context, refType PropertyType, refID int64) error { _, err := db.GetEngine(ctx).Where("ref_type = ? AND ref_id = ?", refType, refID).Delete(&PackageProperty{}) diff --git a/models/repo/repo.go b/models/repo/repo.go index 831eb22dc5a50..06ec34ed631aa 100644 --- a/models/repo/repo.go +++ b/models/repo/repo.go @@ -227,11 +227,6 @@ func (repo *Repository) IsBroken() bool { // AfterLoad is invoked from XORM after setting the values of all fields of this object. func (repo *Repository) AfterLoad() { - // FIXME: use models migration to solve all at once. - if len(repo.DefaultBranch) == 0 { - repo.DefaultBranch = setting.Repository.DefaultBranch - } - repo.NumOpenIssues = repo.NumIssues - repo.NumClosedIssues repo.NumOpenPulls = repo.NumPulls - repo.NumClosedPulls repo.NumOpenMilestones = repo.NumMilestones - repo.NumClosedMilestones diff --git a/models/repo/repo_list.go b/models/repo/repo_list.go index c6e9a204d1330..d64368daa6b28 100644 --- a/models/repo/repo_list.go +++ b/models/repo/repo_list.go @@ -494,7 +494,7 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond { } if opts.OnlyShowRelevant { - // Only show a repo that either has a topic or description. + // Only show a repo that has at least a topic, an icon, or a description subQueryCond := builder.NewCond() // Topic checking. Topics are present. @@ -504,13 +504,13 @@ func SearchRepositoryCondition(opts *SearchRepoOptions) builder.Cond { subQueryCond = subQueryCond.Or(builder.And(builder.Neq{"topics": "null"}, builder.Neq{"topics": "[]"})) } - // Description checking. Description not empty. + // Description checking. Description not empty subQueryCond = subQueryCond.Or(builder.Neq{"description": ""}) - // Repo has a avatar. + // Repo has a avatar subQueryCond = subQueryCond.Or(builder.Neq{"avatar": ""}) - // Always hide repo's that are empty. + // Always hide repo's that are empty subQueryCond = subQueryCond.And(builder.Eq{"is_empty": false}) cond = cond.And(subQueryCond) diff --git a/models/unit/unit.go b/models/unit/unit.go index bcd0572ab9ecf..a2a9079dc25c7 100644 --- a/models/unit/unit.go +++ b/models/unit/unit.go @@ -94,6 +94,12 @@ var ( TypePackages, } + // ForkRepoUnits contains the default unit types for forks + DefaultForkRepoUnits = []Type{ + TypeCode, + TypePullRequests, + } + // NotAllowedDefaultRepoUnits contains units that can't be default NotAllowedDefaultRepoUnits = []Type{ TypeExternalWiki, @@ -110,26 +116,41 @@ var ( DisabledRepoUnits = []Type{} ) -// LoadUnitConfig load units from settings -func LoadUnitConfig() { - setDefaultRepoUnits := FindUnitTypes(setting.Repository.DefaultRepoUnits...) - // Default repo units set if setting is not empty - if len(setDefaultRepoUnits) > 0 { +// Get valid set of default repository units from settings +func validateDefaultRepoUnits(defaultUnits, settingDefaultUnits []Type) []Type { + units := defaultUnits + + // Use setting if not empty + if len(settingDefaultUnits) > 0 { // MustRepoUnits required as default - DefaultRepoUnits = make([]Type, len(MustRepoUnits)) - copy(DefaultRepoUnits, MustRepoUnits) - for _, defaultU := range setDefaultRepoUnits { - if !defaultU.CanBeDefault() { - log.Warn("Not allowed as default unit: %s", defaultU.String()) + units = make([]Type, len(MustRepoUnits)) + copy(units, MustRepoUnits) + for _, settingUnit := range settingDefaultUnits { + if !settingUnit.CanBeDefault() { + log.Warn("Not allowed as default unit: %s", settingUnit.String()) continue } // MustRepoUnits already added - if defaultU.CanDisable() { - DefaultRepoUnits = append(DefaultRepoUnits, defaultU) + if settingUnit.CanDisable() { + units = append(units, settingUnit) } } } + // Remove disabled units + for _, disabledUnit := range DisabledRepoUnits { + for i, unit := range units { + if unit == disabledUnit { + units = append(units[:i], units[i+1:]...) + } + } + } + + return units +} + +// LoadUnitConfig load units from settings +func LoadUnitConfig() { DisabledRepoUnits = FindUnitTypes(setting.Repository.DisabledRepoUnits...) // Check that must units are not disabled for i, disabledU := range DisabledRepoUnits { @@ -138,14 +159,11 @@ func LoadUnitConfig() { DisabledRepoUnits = append(DisabledRepoUnits[:i], DisabledRepoUnits[i+1:]...) } } - // Remove disabled units from default units - for _, disabledU := range DisabledRepoUnits { - for i, defaultU := range DefaultRepoUnits { - if defaultU == disabledU { - DefaultRepoUnits = append(DefaultRepoUnits[:i], DefaultRepoUnits[i+1:]...) - } - } - } + + setDefaultRepoUnits := FindUnitTypes(setting.Repository.DefaultRepoUnits...) + DefaultRepoUnits = validateDefaultRepoUnits(DefaultRepoUnits, setDefaultRepoUnits) + setDefaultForkRepoUnits := FindUnitTypes(setting.Repository.DefaultForkRepoUnits...) + DefaultForkRepoUnits = validateDefaultRepoUnits(DefaultForkRepoUnits, setDefaultForkRepoUnits) } // UnitGlobalDisabled checks if unit type is global disabled diff --git a/modules/actions/workflows.go b/modules/actions/workflows.go index c1c8e71f53b5c..7f0e6e456436b 100644 --- a/modules/actions/workflows.go +++ b/modules/actions/workflows.go @@ -75,7 +75,6 @@ func DetectWorkflows(commit *git.Commit, triggedEvent webhook_module.HookEventTy if evt.Name != triggedEvent.Event() { continue } - if detectMatched(commit, triggedEvent, payload, evt) { workflows[entry.Name()] = content } @@ -105,8 +104,9 @@ func detectMatched(commit *git.Commit, triggedEvent webhook_module.HookEventType for cond, vals := range evt.Acts { switch cond { case "branches", "tags": + refShortName := git.RefName(pushPayload.Ref).ShortName() for _, val := range vals { - if glob.MustCompile(val, '/').Match(pushPayload.Ref) { + if glob.MustCompile(val, '/').Match(refShortName) { matchTimes++ break } @@ -160,8 +160,9 @@ func detectMatched(commit *git.Commit, triggedEvent webhook_module.HookEventType } } case "branches": + refShortName := git.RefName(prPayload.PullRequest.Base.Ref).ShortName() for _, val := range vals { - if glob.MustCompile(val, '/').Match(prPayload.PullRequest.Base.Ref) { + if glob.MustCompile(val, '/').Match(refShortName) { matchTimes++ break } diff --git a/modules/charset/escape.go b/modules/charset/escape.go index 3b1c20697793b..5608836a4510e 100644 --- a/modules/charset/escape.go +++ b/modules/charset/escape.go @@ -44,7 +44,7 @@ func EscapeControlReader(reader io.Reader, writer io.Writer, locale translation. return streamer.escaped, err } -// EscapeControlStringReader escapes the unicode control sequences in a provided reader of string content and writer in a locale and returns the findings as an EscapeStatus and the escaped []byte +// EscapeControlStringReader escapes the unicode control sequences in a provided reader of string content and writer in a locale and returns the findings as an EscapeStatus and the escaped []byte. HTML line breaks are not inserted after every newline by this method. func EscapeControlStringReader(reader io.Reader, writer io.Writer, locale translation.Locale, allowed ...rune) (escaped *EscapeStatus, err error) { bufRd := bufio.NewReader(reader) outputStream := &HTMLStreamerWriter{Writer: writer} @@ -65,10 +65,6 @@ func EscapeControlStringReader(reader io.Reader, writer io.Writer, locale transl } break } - if err := streamer.SelfClosingTag("br"); err != nil { - streamer.escaped.HasError = true - return streamer.escaped, err - } } return streamer.escaped, err } diff --git a/modules/context/captcha.go b/modules/context/captcha.go index 735613504caee..07232e9390663 100644 --- a/modules/context/captcha.go +++ b/modules/context/captcha.go @@ -14,6 +14,7 @@ import ( "code.gitea.io/gitea/modules/mcaptcha" "code.gitea.io/gitea/modules/recaptcha" "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/turnstile" "gitea.com/go-chi/captcha" ) @@ -47,12 +48,14 @@ func SetCaptchaData(ctx *Context) { ctx.Data["HcaptchaSitekey"] = setting.Service.HcaptchaSitekey ctx.Data["McaptchaSitekey"] = setting.Service.McaptchaSitekey ctx.Data["McaptchaURL"] = setting.Service.McaptchaURL + ctx.Data["CfTurnstileSitekey"] = setting.Service.CfTurnstileSitekey } const ( - gRecaptchaResponseField = "g-recaptcha-response" - hCaptchaResponseField = "h-captcha-response" - mCaptchaResponseField = "m-captcha-response" + gRecaptchaResponseField = "g-recaptcha-response" + hCaptchaResponseField = "h-captcha-response" + mCaptchaResponseField = "m-captcha-response" + cfTurnstileResponseField = "cf-turnstile-response" ) // VerifyCaptcha verifies Captcha data @@ -73,6 +76,8 @@ func VerifyCaptcha(ctx *Context, tpl base.TplName, form interface{}) { valid, err = hcaptcha.Verify(ctx, ctx.Req.Form.Get(hCaptchaResponseField)) case setting.MCaptcha: valid, err = mcaptcha.Verify(ctx, ctx.Req.Form.Get(mCaptchaResponseField)) + case setting.CfTurnstile: + valid, err = turnstile.Verify(ctx, ctx.Req.Form.Get(cfTurnstileResponseField)) default: ctx.ServerError("Unknown Captcha Type", fmt.Errorf("Unknown Captcha Type: %s", setting.Service.CaptchaType)) return diff --git a/modules/git/command.go b/modules/git/command.go index d88fcd1a8c807..0bc8103116a0b 100644 --- a/modules/git/command.go +++ b/modules/git/command.go @@ -16,14 +16,20 @@ import ( "time" "unsafe" + "code.gitea.io/gitea/modules/git/internal" //nolint:depguard // only this file can use the internal type CmdArg, other files and packages should use AddXxx functions "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/util" ) +// TrustedCmdArgs returns the trusted arguments for git command. +// It's mainly for passing user-provided and trusted arguments to git command +// In most cases, it shouldn't be used. Use AddXxx function instead +type TrustedCmdArgs []internal.CmdArg + var ( // globalCommandArgs global command args for external package setting - globalCommandArgs []CmdArg + globalCommandArgs TrustedCmdArgs // defaultCommandExecutionTimeout default command execution timeout duration defaultCommandExecutionTimeout = 360 * time.Second @@ -42,8 +48,6 @@ type Command struct { brokenArgs []string } -type CmdArg string - func (c *Command) String() string { if len(c.args) == 0 { return c.name @@ -53,7 +57,7 @@ func (c *Command) String() string { // NewCommand creates and returns a new Git Command based on given command and arguments. // Each argument should be safe to be trusted. User-provided arguments should be passed to AddDynamicArguments instead. -func NewCommand(ctx context.Context, args ...CmdArg) *Command { +func NewCommand(ctx context.Context, args ...internal.CmdArg) *Command { // Make an explicit copy of globalCommandArgs, otherwise append might overwrite it cargs := make([]string, 0, len(globalCommandArgs)+len(args)) for _, arg := range globalCommandArgs { @@ -70,15 +74,9 @@ func NewCommand(ctx context.Context, args ...CmdArg) *Command { } } -// NewCommandNoGlobals creates and returns a new Git Command based on given command and arguments only with the specify args and don't care global command args -// Each argument should be safe to be trusted. User-provided arguments should be passed to AddDynamicArguments instead. -func NewCommandNoGlobals(args ...CmdArg) *Command { - return NewCommandContextNoGlobals(DefaultContext, args...) -} - // NewCommandContextNoGlobals creates and returns a new Git Command based on given command and arguments only with the specify args and don't care global command args // Each argument should be safe to be trusted. User-provided arguments should be passed to AddDynamicArguments instead. -func NewCommandContextNoGlobals(ctx context.Context, args ...CmdArg) *Command { +func NewCommandContextNoGlobals(ctx context.Context, args ...internal.CmdArg) *Command { cargs := make([]string, 0, len(args)) for _, arg := range args { cargs = append(cargs, string(arg)) @@ -96,27 +94,70 @@ func (c *Command) SetParentContext(ctx context.Context) *Command { return c } -// SetDescription sets the description for this command which be returned on -// c.String() +// SetDescription sets the description for this command which be returned on c.String() func (c *Command) SetDescription(desc string) *Command { c.desc = desc return c } -// AddArguments adds new git argument(s) to the command. Each argument must be safe to be trusted. -// User-provided arguments should be passed to AddDynamicArguments instead. -func (c *Command) AddArguments(args ...CmdArg) *Command { +// isSafeArgumentValue checks if the argument is safe to be used as a value (not an option) +func isSafeArgumentValue(s string) bool { + return s == "" || s[0] != '-' +} + +// isValidArgumentOption checks if the argument is a valid option (starting with '-'). +// It doesn't check whether the option is supported or not +func isValidArgumentOption(s string) bool { + return s != "" && s[0] == '-' +} + +// AddArguments adds new git arguments (option/value) to the command. It only accepts string literals, or trusted CmdArg. +// Type CmdArg is in the internal package, so it can not be used outside of this package directly, +// it makes sure that user-provided arguments won't cause RCE risks. +// User-provided arguments should be passed by other AddXxx functions +func (c *Command) AddArguments(args ...internal.CmdArg) *Command { for _, arg := range args { c.args = append(c.args, string(arg)) } return c } -// AddDynamicArguments adds new dynamic argument(s) to the command. -// The arguments may come from user input and can not be trusted, so no leading '-' is allowed to avoid passing options +// AddOptionValues adds a new option with a list of non-option values +// For example: AddOptionValues("--opt", val) means 2 arguments: {"--opt", val}. +// The values are treated as dynamic argument values. It equals to: AddArguments("--opt") then AddDynamicArguments(val). +func (c *Command) AddOptionValues(opt internal.CmdArg, args ...string) *Command { + if !isValidArgumentOption(string(opt)) { + c.brokenArgs = append(c.brokenArgs, string(opt)) + return c + } + c.args = append(c.args, string(opt)) + c.AddDynamicArguments(args...) + return c +} + +// AddOptionFormat adds a new option with a format string and arguments +// For example: AddOptionFormat("--opt=%s %s", val1, val2) means 1 argument: {"--opt=val1 val2"}. +func (c *Command) AddOptionFormat(opt string, args ...any) *Command { + if !isValidArgumentOption(opt) { + c.brokenArgs = append(c.brokenArgs, opt) + return c + } + // a quick check to make sure the format string matches the number of arguments, to find low-level mistakes ASAP + if strings.Count(strings.ReplaceAll(opt, "%%", ""), "%") != len(args) { + c.brokenArgs = append(c.brokenArgs, opt) + return c + } + s := fmt.Sprintf(opt, args...) + c.args = append(c.args, s) + return c +} + +// AddDynamicArguments adds new dynamic argument values to the command. +// The arguments may come from user input and can not be trusted, so no leading '-' is allowed to avoid passing options. +// TODO: in the future, this function can be renamed to AddArgumentValues func (c *Command) AddDynamicArguments(args ...string) *Command { for _, arg := range args { - if arg != "" && arg[0] == '-' { + if !isSafeArgumentValue(arg) { c.brokenArgs = append(c.brokenArgs, arg) } } @@ -137,14 +178,14 @@ func (c *Command) AddDashesAndList(list ...string) *Command { return c } -// CmdArgCheck checks whether the string is safe to be used as a dynamic argument. -// It panics if the check fails. Usually it should not be used, it's just for refactoring purpose -// deprecated -func CmdArgCheck(s string) CmdArg { - if s != "" && s[0] == '-' { - panic("invalid git cmd argument: " + s) +// ToTrustedCmdArgs converts a list of strings (trusted as argument) to TrustedCmdArgs +// In most cases, it shouldn't be used. Use AddXxx function instead +func ToTrustedCmdArgs(args []string) TrustedCmdArgs { + ret := make(TrustedCmdArgs, len(args)) + for i, arg := range args { + ret[i] = internal.CmdArg(arg) } - return CmdArg(s) + return ret } // RunOpts represents parameters to run the command. If UseContextTimeout is specified, then Timeout is ignored. @@ -364,9 +405,9 @@ func (c *Command) RunStdBytes(opts *RunOpts) (stdout, stderr []byte, runErr RunS } // AllowLFSFiltersArgs return globalCommandArgs with lfs filter, it should only be used for tests -func AllowLFSFiltersArgs() []CmdArg { +func AllowLFSFiltersArgs() TrustedCmdArgs { // Now here we should explicitly allow lfs filters to run - filteredLFSGlobalArgs := make([]CmdArg, len(globalCommandArgs)) + filteredLFSGlobalArgs := make(TrustedCmdArgs, len(globalCommandArgs)) j := 0 for _, arg := range globalCommandArgs { if strings.Contains(string(arg), "lfs") { diff --git a/modules/git/command_test.go b/modules/git/command_test.go index 2dca2d0d344d0..4e5f991d31125 100644 --- a/modules/git/command_test.go +++ b/modules/git/command_test.go @@ -41,3 +41,14 @@ func TestRunWithContextStd(t *testing.T) { assert.Empty(t, stderr) assert.Contains(t, stdout, "git version") } + +func TestGitArgument(t *testing.T) { + assert.True(t, isValidArgumentOption("-x")) + assert.True(t, isValidArgumentOption("--xx")) + assert.False(t, isValidArgumentOption("")) + assert.False(t, isValidArgumentOption("x")) + + assert.True(t, isSafeArgumentValue("")) + assert.True(t, isSafeArgumentValue("x")) + assert.False(t, isSafeArgumentValue("-x")) +} diff --git a/modules/git/commit.go b/modules/git/commit.go index 14710de612153..1f6289ed02224 100644 --- a/modules/git/commit.go +++ b/modules/git/commit.go @@ -9,7 +9,6 @@ import ( "bytes" "context" "errors" - "fmt" "io" "os/exec" "strconv" @@ -91,8 +90,8 @@ func AddChanges(repoPath string, all bool, files ...string) error { } // AddChangesWithArgs marks local changes to be ready for commit. -func AddChangesWithArgs(repoPath string, globalArgs []CmdArg, all bool, files ...string) error { - cmd := NewCommandNoGlobals(append(globalArgs, "add")...) +func AddChangesWithArgs(repoPath string, globalArgs TrustedCmdArgs, all bool, files ...string) error { + cmd := NewCommandContextNoGlobals(DefaultContext, globalArgs...).AddArguments("add") if all { cmd.AddArguments("--all") } @@ -111,17 +110,18 @@ type CommitChangesOptions struct { // CommitChanges commits local changes with given committer, author and message. // If author is nil, it will be the same as committer. func CommitChanges(repoPath string, opts CommitChangesOptions) error { - cargs := make([]CmdArg, len(globalCommandArgs)) + cargs := make(TrustedCmdArgs, len(globalCommandArgs)) copy(cargs, globalCommandArgs) return CommitChangesWithArgs(repoPath, cargs, opts) } // CommitChangesWithArgs commits local changes with given committer, author and message. // If author is nil, it will be the same as committer. -func CommitChangesWithArgs(repoPath string, args []CmdArg, opts CommitChangesOptions) error { - cmd := NewCommandNoGlobals(args...) +func CommitChangesWithArgs(repoPath string, args TrustedCmdArgs, opts CommitChangesOptions) error { + cmd := NewCommandContextNoGlobals(DefaultContext, args...) if opts.Committer != nil { - cmd.AddArguments("-c", CmdArg("user.name="+opts.Committer.Name), "-c", CmdArg("user.email="+opts.Committer.Email)) + cmd.AddOptionValues("-c", "user.name="+opts.Committer.Name) + cmd.AddOptionValues("-c", "user.email="+opts.Committer.Email) } cmd.AddArguments("commit") @@ -129,9 +129,9 @@ func CommitChangesWithArgs(repoPath string, args []CmdArg, opts CommitChangesOpt opts.Author = opts.Committer } if opts.Author != nil { - cmd.AddArguments(CmdArg(fmt.Sprintf("--author='%s <%s>'", opts.Author.Name, opts.Author.Email))) + cmd.AddOptionFormat("--author='%s <%s>'", opts.Author.Name, opts.Author.Email) } - cmd.AddArguments("-m").AddDynamicArguments(opts.Message) + cmd.AddOptionValues("-m", opts.Message) _, _, err := cmd.RunStdString(&RunOpts{Dir: repoPath}) // No stderr but exit status 1 means nothing to commit. diff --git a/modules/git/git.go b/modules/git/git.go index f5919d82dcae1..2feb242ac5dd0 100644 --- a/modules/git/git.go +++ b/modules/git/git.go @@ -383,6 +383,6 @@ func configUnsetAll(key, value string) error { } // Fsck verifies the connectivity and validity of the objects in the database -func Fsck(ctx context.Context, repoPath string, timeout time.Duration, args ...CmdArg) error { +func Fsck(ctx context.Context, repoPath string, timeout time.Duration, args TrustedCmdArgs) error { return NewCommand(ctx, "fsck").AddArguments(args...).Run(&RunOpts{Timeout: timeout, Dir: repoPath}) } diff --git a/modules/git/internal/cmdarg.go b/modules/git/internal/cmdarg.go new file mode 100644 index 0000000000000..f8f3c2011ea9c --- /dev/null +++ b/modules/git/internal/cmdarg.go @@ -0,0 +1,9 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package internal + +// CmdArg represents a command argument for git command, and it will be used for the git command directly without any further processing. +// In most cases, you should use the "AddXxx" functions to add arguments, but not use this type directly. +// Casting a risky (user-provided) string to CmdArg would cause security issues if it's injected with a "--xxx" argument. +type CmdArg string diff --git a/modules/git/repo.go b/modules/git/repo.go index 4ba40d20af0d5..e77a3a6ad8e39 100644 --- a/modules/git/repo.go +++ b/modules/git/repo.go @@ -115,7 +115,7 @@ func Clone(ctx context.Context, from, to string, opts CloneRepoOptions) error { } // CloneWithArgs original repository to target path. -func CloneWithArgs(ctx context.Context, args []CmdArg, from, to string, opts CloneRepoOptions) (err error) { +func CloneWithArgs(ctx context.Context, args TrustedCmdArgs, from, to string, opts CloneRepoOptions) (err error) { toDir := path.Dir(to) if err = os.MkdirAll(toDir, os.ModePerm); err != nil { return err diff --git a/modules/git/repo_archive.go b/modules/git/repo_archive.go index cff9724f00543..2b45a50f191b6 100644 --- a/modules/git/repo_archive.go +++ b/modules/git/repo_archive.go @@ -57,9 +57,9 @@ func (repo *Repository) CreateArchive(ctx context.Context, format ArchiveType, t cmd := NewCommand(ctx, "archive") if usePrefix { - cmd.AddArguments(CmdArg("--prefix=" + filepath.Base(strings.TrimSuffix(repo.Path, ".git")) + "/")) + cmd.AddOptionFormat("--prefix=%s", filepath.Base(strings.TrimSuffix(repo.Path, ".git"))+"/") } - cmd.AddArguments(CmdArg("--format=" + format.String())) + cmd.AddOptionFormat("--format=%s", format.String()) cmd.AddDynamicArguments(commitID) var stderr strings.Builder diff --git a/modules/git/repo_attribute.go b/modules/git/repo_attribute.go index 404d9e502c042..e7d5fb68068bb 100644 --- a/modules/git/repo_attribute.go +++ b/modules/git/repo_attribute.go @@ -17,7 +17,7 @@ import ( type CheckAttributeOpts struct { CachedOnly bool AllAttributes bool - Attributes []CmdArg + Attributes []string Filenames []string IndexFile string WorkTree string @@ -48,7 +48,7 @@ func (repo *Repository) CheckAttribute(opts CheckAttributeOpts) (map[string]map[ } else { for _, attribute := range opts.Attributes { if attribute != "" { - cmd.AddArguments(attribute) + cmd.AddDynamicArguments(attribute) } } } @@ -95,7 +95,7 @@ func (repo *Repository) CheckAttribute(opts CheckAttributeOpts) (map[string]map[ // CheckAttributeReader provides a reader for check-attribute content that can be long running type CheckAttributeReader struct { // params - Attributes []CmdArg + Attributes []string Repo *Repository IndexFile string WorkTree string @@ -111,19 +111,6 @@ type CheckAttributeReader struct { // Init initializes the CheckAttributeReader func (c *CheckAttributeReader) Init(ctx context.Context) error { - cmdArgs := []CmdArg{"check-attr", "--stdin", "-z"} - - if len(c.IndexFile) > 0 { - cmdArgs = append(cmdArgs, "--cached") - c.env = append(c.env, "GIT_INDEX_FILE="+c.IndexFile) - } - - if len(c.WorkTree) > 0 { - c.env = append(c.env, "GIT_WORK_TREE="+c.WorkTree) - } - - c.env = append(c.env, "GIT_FLUSH=1") - if len(c.Attributes) == 0 { lw := new(nulSeparatedAttributeWriter) lw.attributes = make(chan attributeTriple) @@ -134,11 +121,22 @@ func (c *CheckAttributeReader) Init(ctx context.Context) error { return fmt.Errorf("no provided Attributes to check") } - cmdArgs = append(cmdArgs, c.Attributes...) - cmdArgs = append(cmdArgs, "--") - c.ctx, c.cancel = context.WithCancel(ctx) - c.cmd = NewCommand(c.ctx, cmdArgs...) + c.cmd = NewCommand(c.ctx, "check-attr", "--stdin", "-z") + + if len(c.IndexFile) > 0 { + c.cmd.AddArguments("--cached") + c.env = append(c.env, "GIT_INDEX_FILE="+c.IndexFile) + } + + if len(c.WorkTree) > 0 { + c.env = append(c.env, "GIT_WORK_TREE="+c.WorkTree) + } + + c.env = append(c.env, "GIT_FLUSH=1") + + // The empty "--" comes from #16773 , and it seems unnecessary because nothing else would be added later. + c.cmd.AddDynamicArguments(c.Attributes...).AddArguments("--") var err error @@ -294,7 +292,7 @@ func (repo *Repository) CheckAttributeReader(commitID string) (*CheckAttributeRe } checker := &CheckAttributeReader{ - Attributes: []CmdArg{"linguist-vendored", "linguist-generated", "linguist-language", "gitlab-language"}, + Attributes: []string{"linguist-vendored", "linguist-generated", "linguist-language", "gitlab-language"}, Repo: repo, IndexFile: indexFilename, WorkTree: worktree, diff --git a/modules/git/repo_blame.go b/modules/git/repo_blame.go index 7f44735f9f907..e25efa2d3bfa5 100644 --- a/modules/git/repo_blame.go +++ b/modules/git/repo_blame.go @@ -3,7 +3,9 @@ package git -import "fmt" +import ( + "fmt" +) // FileBlame return the Blame object of file func (repo *Repository) FileBlame(revision, path, file string) ([]byte, error) { @@ -14,8 +16,8 @@ func (repo *Repository) FileBlame(revision, path, file string) ([]byte, error) { // LineBlame returns the latest commit at the given line func (repo *Repository) LineBlame(revision, path, file string, line uint) (*Commit, error) { res, _, err := NewCommand(repo.Ctx, "blame"). - AddArguments(CmdArg(fmt.Sprintf("-L %d,%d", line, line))). - AddArguments("-p").AddDynamicArguments(revision). + AddOptionFormat("-L %d,%d", line, line). + AddOptionValues("-p", revision). AddDashesAndList(file).RunStdString(&RunOpts{Dir: path}) if err != nil { return nil, err diff --git a/modules/git/repo_branch_gogit.go b/modules/git/repo_branch_gogit.go index ca19d3827e586..f9896a7a09047 100644 --- a/modules/git/repo_branch_gogit.go +++ b/modules/git/repo_branch_gogit.go @@ -50,8 +50,8 @@ func (repo *Repository) IsBranchExist(name string) bool { return reference.Type() != plumbing.InvalidReference } -// GetBranches returns branches from the repository, skipping skip initial branches and -// returning at most limit branches, or all branches if limit is 0. +// GetBranches returns branches from the repository, skipping "skip" initial branches and +// returning at most "limit" branches, or all branches if "limit" is 0. func (repo *Repository) GetBranchNames(skip, limit int) ([]string, int, error) { var branchNames []string diff --git a/modules/git/repo_branch_nogogit.go b/modules/git/repo_branch_nogogit.go index 7559513c9b433..b1e7c8b73e640 100644 --- a/modules/git/repo_branch_nogogit.go +++ b/modules/git/repo_branch_nogogit.go @@ -59,10 +59,10 @@ func (repo *Repository) IsBranchExist(name string) bool { return repo.IsReferenceExist(BranchPrefix + name) } -// GetBranchNames returns branches from the repository, skipping skip initial branches and -// returning at most limit branches, or all branches if limit is 0. +// GetBranchNames returns branches from the repository, skipping "skip" initial branches and +// returning at most "limit" branches, or all branches if "limit" is 0. func (repo *Repository) GetBranchNames(skip, limit int) ([]string, int, error) { - return callShowRef(repo.Ctx, repo.Path, BranchPrefix, []CmdArg{BranchPrefix, "--sort=-committerdate"}, skip, limit) + return callShowRef(repo.Ctx, repo.Path, BranchPrefix, TrustedCmdArgs{BranchPrefix, "--sort=-committerdate"}, skip, limit) } // WalkReferences walks all the references from the repository @@ -73,19 +73,19 @@ func WalkReferences(ctx context.Context, repoPath string, walkfn func(sha1, refn // WalkReferences walks all the references from the repository // refType should be empty, ObjectTag or ObjectBranch. All other values are equivalent to empty. func (repo *Repository) WalkReferences(refType ObjectType, skip, limit int, walkfn func(sha1, refname string) error) (int, error) { - var args []CmdArg + var args TrustedCmdArgs switch refType { case ObjectTag: - args = []CmdArg{TagPrefix, "--sort=-taggerdate"} + args = TrustedCmdArgs{TagPrefix, "--sort=-taggerdate"} case ObjectBranch: - args = []CmdArg{BranchPrefix, "--sort=-committerdate"} + args = TrustedCmdArgs{BranchPrefix, "--sort=-committerdate"} } return walkShowRef(repo.Ctx, repo.Path, args, skip, limit, walkfn) } // callShowRef return refs, if limit = 0 it will not limit -func callShowRef(ctx context.Context, repoPath, trimPrefix string, extraArgs []CmdArg, skip, limit int) (branchNames []string, countAll int, err error) { +func callShowRef(ctx context.Context, repoPath, trimPrefix string, extraArgs TrustedCmdArgs, skip, limit int) (branchNames []string, countAll int, err error) { countAll, err = walkShowRef(ctx, repoPath, extraArgs, skip, limit, func(_, branchName string) error { branchName = strings.TrimPrefix(branchName, trimPrefix) branchNames = append(branchNames, branchName) @@ -95,7 +95,7 @@ func callShowRef(ctx context.Context, repoPath, trimPrefix string, extraArgs []C return branchNames, countAll, err } -func walkShowRef(ctx context.Context, repoPath string, extraArgs []CmdArg, skip, limit int, walkfn func(sha1, refname string) error) (countAll int, err error) { +func walkShowRef(ctx context.Context, repoPath string, extraArgs TrustedCmdArgs, skip, limit int, walkfn func(sha1, refname string) error) (countAll int, err error) { stdoutReader, stdoutWriter := io.Pipe() defer func() { _ = stdoutReader.Close() @@ -104,7 +104,7 @@ func walkShowRef(ctx context.Context, repoPath string, extraArgs []CmdArg, skip, go func() { stderrBuilder := &strings.Builder{} - args := []CmdArg{"for-each-ref", "--format=%(objectname) %(refname)"} + args := TrustedCmdArgs{"for-each-ref", "--format=%(objectname) %(refname)"} args = append(args, extraArgs...) err := NewCommand(ctx, args...).Run(&RunOpts{ Dir: repoPath, diff --git a/modules/git/repo_commit.go b/modules/git/repo_commit.go index 8343e34843534..a62e0670fedca 100644 --- a/modules/git/repo_commit.go +++ b/modules/git/repo_commit.go @@ -89,7 +89,7 @@ func (repo *Repository) GetCommitByPath(relpath string) (*Commit, error) { func (repo *Repository) commitsByRange(id SHA1, page, pageSize int) ([]*Commit, error) { stdout, _, err := NewCommand(repo.Ctx, "log"). - AddArguments(CmdArg("--skip="+strconv.Itoa((page-1)*pageSize)), CmdArg("--max-count="+strconv.Itoa(pageSize)), prettyLogFormat). + AddOptionFormat("--skip=%d", (page-1)*pageSize).AddOptionFormat("--max-count=%d", pageSize).AddArguments(prettyLogFormat). AddDynamicArguments(id.String()). RunStdBytes(&RunOpts{Dir: repo.Path}) if err != nil { @@ -99,32 +99,36 @@ func (repo *Repository) commitsByRange(id SHA1, page, pageSize int) ([]*Commit, } func (repo *Repository) searchCommits(id SHA1, opts SearchCommitsOptions) ([]*Commit, error) { - // create new git log command with limit of 100 commis - cmd := NewCommand(repo.Ctx, "log", "-100", prettyLogFormat).AddDynamicArguments(id.String()) - // ignore case - args := []CmdArg{"-i"} + // add common arguments to git command + addCommonSearchArgs := func(c *Command) { + // ignore case + c.AddArguments("-i") + + // add authors if present in search query + if len(opts.Authors) > 0 { + for _, v := range opts.Authors { + c.AddOptionFormat("--author=%s", v) + } + } - // add authors if present in search query - if len(opts.Authors) > 0 { - for _, v := range opts.Authors { - args = append(args, CmdArg("--author="+v)) + // add committers if present in search query + if len(opts.Committers) > 0 { + for _, v := range opts.Committers { + c.AddOptionFormat("--committer=%s", v) + } } - } - // add committers if present in search query - if len(opts.Committers) > 0 { - for _, v := range opts.Committers { - args = append(args, CmdArg("--committer="+v)) + // add time constraints if present in search query + if len(opts.After) > 0 { + c.AddOptionFormat("--after=%s", opts.After) + } + if len(opts.Before) > 0 { + c.AddOptionFormat("--before=%s", opts.Before) } } - // add time constraints if present in search query - if len(opts.After) > 0 { - args = append(args, CmdArg("--after="+opts.After)) - } - if len(opts.Before) > 0 { - args = append(args, CmdArg("--before="+opts.Before)) - } + // create new git log command with limit of 100 commits + cmd := NewCommand(repo.Ctx, "log", "-100", prettyLogFormat).AddDynamicArguments(id.String()) // pretend that all refs along with HEAD were listed on command line as // https://git-scm.com/docs/git-log#Documentation/git-log.txt---all @@ -137,12 +141,12 @@ func (repo *Repository) searchCommits(id SHA1, opts SearchCommitsOptions) ([]*Co // note this is done only for command created above if len(opts.Keywords) > 0 { for _, v := range opts.Keywords { - cmd.AddArguments(CmdArg("--grep=" + v)) + cmd.AddOptionFormat("--grep=%s", v) } } // search for commits matching given constraints and keywords in commit msg - cmd.AddArguments(args...) + addCommonSearchArgs(cmd) stdout, _, err := cmd.RunStdBytes(&RunOpts{Dir: repo.Path}) if err != nil { return nil, err @@ -160,7 +164,7 @@ func (repo *Repository) searchCommits(id SHA1, opts SearchCommitsOptions) ([]*Co // create new git log command with 1 commit limit hashCmd := NewCommand(repo.Ctx, "log", "-1", prettyLogFormat) // add previous arguments except for --grep and --all - hashCmd.AddArguments(args...) + addCommonSearchArgs(hashCmd) // add keyword as hashCmd.AddDynamicArguments(v) @@ -213,8 +217,8 @@ func (repo *Repository) CommitsByFileAndRange(revision, file string, page int) ( go func() { stderr := strings.Builder{} gitCmd := NewCommand(repo.Ctx, "rev-list"). - AddArguments(CmdArg("--max-count=" + strconv.Itoa(setting.Git.CommitsRangeSize*page))). - AddArguments(CmdArg("--skip=" + strconv.Itoa(skip))) + AddOptionFormat("--max-count=%d", setting.Git.CommitsRangeSize*page). + AddOptionFormat("--skip=%d", skip) gitCmd.AddDynamicArguments(revision) gitCmd.AddDashesAndList(file) err := gitCmd.Run(&RunOpts{ @@ -295,21 +299,21 @@ func (repo *Repository) CommitsBetweenLimit(last, before *Commit, limit, skip in var stdout []byte var err error if before == nil { - stdout, _, err = NewCommand(repo.Ctx, "rev-list", - "--max-count", CmdArg(strconv.Itoa(limit)), - "--skip", CmdArg(strconv.Itoa(skip))). + stdout, _, err = NewCommand(repo.Ctx, "rev-list"). + AddOptionValues("--max-count", strconv.Itoa(limit)). + AddOptionValues("--skip", strconv.Itoa(skip)). AddDynamicArguments(last.ID.String()).RunStdBytes(&RunOpts{Dir: repo.Path}) } else { - stdout, _, err = NewCommand(repo.Ctx, "rev-list", - "--max-count", CmdArg(strconv.Itoa(limit)), - "--skip", CmdArg(strconv.Itoa(skip))). + stdout, _, err = NewCommand(repo.Ctx, "rev-list"). + AddOptionValues("--max-count", strconv.Itoa(limit)). + AddOptionValues("--skip", strconv.Itoa(skip)). AddDynamicArguments(before.ID.String() + ".." + last.ID.String()).RunStdBytes(&RunOpts{Dir: repo.Path}) if err != nil && strings.Contains(err.Error(), "no merge base") { // future versions of git >= 2.28 are likely to return an error if before and last have become unrelated. // previously it would return the results of git rev-list --max-count n before last so let's try that... - stdout, _, err = NewCommand(repo.Ctx, "rev-list", - "--max-count", CmdArg(strconv.Itoa(limit)), - "--skip", CmdArg(strconv.Itoa(skip))). + stdout, _, err = NewCommand(repo.Ctx, "rev-list"). + AddOptionValues("--max-count", strconv.Itoa(limit)). + AddOptionValues("--skip", strconv.Itoa(skip)). AddDynamicArguments(before.ID.String(), last.ID.String()).RunStdBytes(&RunOpts{Dir: repo.Path}) } } @@ -349,12 +353,11 @@ func (repo *Repository) CommitsCountBetween(start, end string) (int64, error) { // commitsBefore the limit is depth, not total number of returned commits. func (repo *Repository) commitsBefore(id SHA1, limit int) ([]*Commit, error) { - cmd := NewCommand(repo.Ctx, "log") + cmd := NewCommand(repo.Ctx, "log", prettyLogFormat) if limit > 0 { - cmd.AddArguments(CmdArg("-"+strconv.Itoa(limit)), prettyLogFormat).AddDynamicArguments(id.String()) - } else { - cmd.AddArguments(prettyLogFormat).AddDynamicArguments(id.String()) + cmd.AddOptionFormat("-%d", limit) } + cmd.AddDynamicArguments(id.String()) stdout, _, runErr := cmd.RunStdBytes(&RunOpts{Dir: repo.Path}) if runErr != nil { @@ -393,10 +396,9 @@ func (repo *Repository) getCommitsBeforeLimit(id SHA1, num int) ([]*Commit, erro func (repo *Repository) getBranches(commit *Commit, limit int) ([]string, error) { if CheckGitVersionAtLeast("2.7.0") == nil { - stdout, _, err := NewCommand(repo.Ctx, "for-each-ref", - CmdArg("--count="+strconv.Itoa(limit)), - "--format=%(refname:strip=2)", "--contains"). - AddDynamicArguments(commit.ID.String(), BranchPrefix). + stdout, _, err := NewCommand(repo.Ctx, "for-each-ref", "--format=%(refname:strip=2)"). + AddOptionFormat("--count=%d", limit). + AddOptionValues("--contains", commit.ID.String(), BranchPrefix). RunStdString(&RunOpts{Dir: repo.Path}) if err != nil { return nil, err @@ -406,7 +408,7 @@ func (repo *Repository) getBranches(commit *Commit, limit int) ([]string, error) return branches, nil } - stdout, _, err := NewCommand(repo.Ctx, "branch", "--contains").AddDynamicArguments(commit.ID.String()).RunStdString(&RunOpts{Dir: repo.Path}) + stdout, _, err := NewCommand(repo.Ctx, "branch").AddOptionValues("--contains", commit.ID.String()).RunStdString(&RunOpts{Dir: repo.Path}) if err != nil { return nil, err } diff --git a/modules/git/repo_compare.go b/modules/git/repo_compare.go index b1b55c88a4446..9a4d66f2f521b 100644 --- a/modules/git/repo_compare.go +++ b/modules/git/repo_compare.go @@ -172,25 +172,21 @@ func (repo *Repository) GetDiffNumChangedFiles(base, head string, directComparis // GetDiffShortStat counts number of changed files, number of additions and deletions func (repo *Repository) GetDiffShortStat(base, head string) (numFiles, totalAdditions, totalDeletions int, err error) { - numFiles, totalAdditions, totalDeletions, err = GetDiffShortStat(repo.Ctx, repo.Path, CmdArgCheck(base+"..."+head)) + numFiles, totalAdditions, totalDeletions, err = GetDiffShortStat(repo.Ctx, repo.Path, nil, base+"..."+head) if err != nil && strings.Contains(err.Error(), "no merge base") { - return GetDiffShortStat(repo.Ctx, repo.Path, CmdArgCheck(base), CmdArgCheck(head)) + return GetDiffShortStat(repo.Ctx, repo.Path, nil, base, head) } return numFiles, totalAdditions, totalDeletions, err } // GetDiffShortStat counts number of changed files, number of additions and deletions -func GetDiffShortStat(ctx context.Context, repoPath string, args ...CmdArg) (numFiles, totalAdditions, totalDeletions int, err error) { +func GetDiffShortStat(ctx context.Context, repoPath string, trustedArgs TrustedCmdArgs, dynamicArgs ...string) (numFiles, totalAdditions, totalDeletions int, err error) { // Now if we call: // $ git diff --shortstat 1ebb35b98889ff77299f24d82da426b434b0cca0...788b8b1440462d477f45b0088875 // we get: // " 9902 files changed, 2034198 insertions(+), 298800 deletions(-)\n" - args = append([]CmdArg{ - "diff", - "--shortstat", - }, args...) - - stdout, _, err := NewCommand(ctx, args...).RunStdString(&RunOpts{Dir: repoPath}) + cmd := NewCommand(ctx, "diff", "--shortstat").AddArguments(trustedArgs...).AddDynamicArguments(dynamicArgs...) + stdout, _, err := cmd.RunStdString(&RunOpts{Dir: repoPath}) if err != nil { return 0, 0, 0, err } diff --git a/modules/git/repo_stats.go b/modules/git/repo_stats.go index d6e91f25a9a60..41f94e24f99ca 100644 --- a/modules/git/repo_stats.go +++ b/modules/git/repo_stats.go @@ -40,7 +40,7 @@ func (repo *Repository) GetCodeActivityStats(fromTime time.Time, branch string) since := fromTime.Format(time.RFC3339) - stdout, _, runErr := NewCommand(repo.Ctx, "rev-list", "--count", "--no-merges", "--branches=*", "--date=iso", CmdArg(fmt.Sprintf("--since='%s'", since))).RunStdString(&RunOpts{Dir: repo.Path}) + stdout, _, runErr := NewCommand(repo.Ctx, "rev-list", "--count", "--no-merges", "--branches=*", "--date=iso").AddOptionFormat("--since='%s'", since).RunStdString(&RunOpts{Dir: repo.Path}) if runErr != nil { return nil, runErr } @@ -60,7 +60,7 @@ func (repo *Repository) GetCodeActivityStats(fromTime time.Time, branch string) _ = stdoutWriter.Close() }() - gitCmd := NewCommand(repo.Ctx, "log", "--numstat", "--no-merges", "--pretty=format:---%n%h%n%aN%n%aE%n", "--date=iso", CmdArg(fmt.Sprintf("--since='%s'", since))) + gitCmd := NewCommand(repo.Ctx, "log", "--numstat", "--no-merges", "--pretty=format:---%n%h%n%aN%n%aE%n", "--date=iso").AddOptionFormat("--since='%s'", since) if len(branch) == 0 { gitCmd.AddArguments("--branches=*") } else { diff --git a/modules/git/repo_tag.go b/modules/git/repo_tag.go index 8aa06545d4cb1..ae877f0211002 100644 --- a/modules/git/repo_tag.go +++ b/modules/git/repo_tag.go @@ -121,7 +121,9 @@ func (repo *Repository) GetTagInfos(page, pageSize int) ([]*Tag, int, error) { rc := &RunOpts{Dir: repo.Path, Stdout: stdoutWriter, Stderr: &stderr} go func() { - err := NewCommand(repo.Ctx, "for-each-ref", CmdArg("--format="+forEachRefFmt.Flag()), "--sort", "-*creatordate", "refs/tags").Run(rc) + err := NewCommand(repo.Ctx, "for-each-ref"). + AddOptionFormat("--format=%s", forEachRefFmt.Flag()). + AddArguments("--sort", "-*creatordate", "refs/tags").Run(rc) if err != nil { _ = stdoutWriter.CloseWithError(ConcatenateError(err, stderr.String())) } else { diff --git a/modules/git/repo_tag_nogogit.go b/modules/git/repo_tag_nogogit.go index d3331cf9b73e1..9080ffcfd7820 100644 --- a/modules/git/repo_tag_nogogit.go +++ b/modules/git/repo_tag_nogogit.go @@ -25,7 +25,7 @@ func (repo *Repository) IsTagExist(name string) bool { // GetTags returns all tags of the repository. // returning at most limit tags, or all if limit is 0. func (repo *Repository) GetTags(skip, limit int) (tags []string, err error) { - tags, _, err = callShowRef(repo.Ctx, repo.Path, TagPrefix, []CmdArg{TagPrefix, "--sort=-taggerdate"}, skip, limit) + tags, _, err = callShowRef(repo.Ctx, repo.Path, TagPrefix, TrustedCmdArgs{TagPrefix, "--sort=-taggerdate"}, skip, limit) return tags, err } diff --git a/modules/git/repo_tree.go b/modules/git/repo_tree.go index 5fea5c0aea4bd..63c33379bf5dc 100644 --- a/modules/git/repo_tree.go +++ b/modules/git/repo_tree.go @@ -6,7 +6,6 @@ package git import ( "bytes" - "fmt" "os" "strings" "time" @@ -45,7 +44,7 @@ func (repo *Repository) CommitTree(author, committer *Signature, tree *Tree, opt _, _ = messageBytes.WriteString("\n") if opts.KeyID != "" || opts.AlwaysSign { - cmd.AddArguments(CmdArg(fmt.Sprintf("-S%s", opts.KeyID))) + cmd.AddOptionFormat("-S%s", opts.KeyID) } if opts.NoGPGSign { diff --git a/modules/git/tree_nogogit.go b/modules/git/tree_nogogit.go index 185317e7a7096..ef598d7e91327 100644 --- a/modules/git/tree_nogogit.go +++ b/modules/git/tree_nogogit.go @@ -100,14 +100,15 @@ func (t *Tree) ListEntries() (Entries, error) { // listEntriesRecursive returns all entries of current tree recursively including all subtrees // extraArgs could be "-l" to get the size, which is slower -func (t *Tree) listEntriesRecursive(extraArgs ...CmdArg) (Entries, error) { +func (t *Tree) listEntriesRecursive(extraArgs TrustedCmdArgs) (Entries, error) { if t.entriesRecursiveParsed { return t.entriesRecursive, nil } - args := append([]CmdArg{"ls-tree", "-t", "-r"}, extraArgs...) - args = append(args, CmdArg(t.ID.String())) - stdout, _, runErr := NewCommand(t.repo.Ctx, args...).RunStdBytes(&RunOpts{Dir: t.repo.Path}) + stdout, _, runErr := NewCommand(t.repo.Ctx, "ls-tree", "-t", "-r"). + AddArguments(extraArgs...). + AddDynamicArguments(t.ID.String()). + RunStdBytes(&RunOpts{Dir: t.repo.Path}) if runErr != nil { return nil, runErr } @@ -123,10 +124,10 @@ func (t *Tree) listEntriesRecursive(extraArgs ...CmdArg) (Entries, error) { // ListEntriesRecursiveFast returns all entries of current tree recursively including all subtrees, no size func (t *Tree) ListEntriesRecursiveFast() (Entries, error) { - return t.listEntriesRecursive() + return t.listEntriesRecursive(nil) } // ListEntriesRecursiveWithSize returns all entries of current tree recursively including all subtrees, with size func (t *Tree) ListEntriesRecursiveWithSize() (Entries, error) { - return t.listEntriesRecursive("--long") + return t.listEntriesRecursive(TrustedCmdArgs{"--long"}) } diff --git a/modules/gitgraph/graph.go b/modules/gitgraph/graph.go index baedfe5980632..331ad6b218de9 100644 --- a/modules/gitgraph/graph.go +++ b/modules/gitgraph/graph.go @@ -7,7 +7,6 @@ import ( "bufio" "bytes" "context" - "fmt" "os" "strings" @@ -33,12 +32,9 @@ func GetCommitGraph(r *git.Repository, page, maxAllowedColors int, hidePRRefs bo graphCmd.AddArguments("--all") } - graphCmd.AddArguments( - "-C", - "-M", - git.CmdArg(fmt.Sprintf("-n %d", setting.UI.GraphMaxCommitNum*page)), - "--date=iso", - git.CmdArg(fmt.Sprintf("--pretty=format:%s", format))) + graphCmd.AddArguments("-C", "-M", "--date=iso"). + AddOptionFormat("-n %d", setting.UI.GraphMaxCommitNum*page). + AddOptionFormat("--pretty=format:%s", format) if len(branches) > 0 { graphCmd.AddDynamicArguments(branches...) diff --git a/modules/httpcache/httpcache.go b/modules/httpcache/httpcache.go index d7d9ce0b7ebf9..f0caa30eb82b3 100644 --- a/modules/httpcache/httpcache.go +++ b/modules/httpcache/httpcache.go @@ -19,6 +19,8 @@ import ( func AddCacheControlToHeader(h http.Header, maxAge time.Duration, additionalDirectives ...string) { directives := make([]string, 0, 2+len(additionalDirectives)) + // "max-age=0 + must-revalidate" (aka "no-cache") is preferred instead of "no-store" + // because browsers may restore some input fields after navigate-back / reload a page. if setting.IsProd { if maxAge == 0 { directives = append(directives, "max-age=0", "private", "must-revalidate") diff --git a/modules/log/colors.go b/modules/log/colors.go index 02781afe84ebd..85e205cb6764d 100644 --- a/modules/log/colors.go +++ b/modules/log/colors.go @@ -383,6 +383,13 @@ func (cv *ColoredValue) Format(s fmt.State, c rune) { s.Write(*cv.resetBytes) } +// ColorFormatAsString returns the result of the ColorFormat without the color +func ColorFormatAsString(colorVal ColorFormatted) string { + s := new(strings.Builder) + _, _ = ColorFprintf(&protectedANSIWriter{w: s, mode: removeColor}, "%-v", colorVal) + return s.String() +} + // SetColorBytes will allow a user to set the colorBytes of a colored value func (cv *ColoredValue) SetColorBytes(colorBytes []byte) { cv.colorBytes = &colorBytes diff --git a/modules/log/log.go b/modules/log/log.go index 73f908dfab55a..9057cda37e6b1 100644 --- a/modules/log/log.go +++ b/modules/log/log.go @@ -9,6 +9,8 @@ import ( "runtime" "strings" "sync" + + "code.gitea.io/gitea/modules/process" ) type loggerMap struct { @@ -285,6 +287,15 @@ func (l *LoggerAsWriter) Log(msg string) { } func init() { + process.Trace = func(start bool, pid process.IDType, description string, parentPID process.IDType, typ string) { + if start && parentPID != "" { + Log(1, TRACE, "Start %s: %s (from %s) (%s)", NewColoredValue(pid, FgHiYellow), description, NewColoredValue(parentPID, FgYellow), NewColoredValue(typ, Reset)) + } else if start { + Log(1, TRACE, "Start %s: %s (%s)", NewColoredValue(pid, FgHiYellow), description, NewColoredValue(typ, Reset)) + } else { + Log(1, TRACE, "Done %s: %s", NewColoredValue(pid, FgHiYellow), NewColoredValue(description, Reset)) + } + } _, filename, _, _ := runtime.Caller(0) prefix = strings.TrimSuffix(filename, "modules/log/log.go") if prefix == filename { diff --git a/modules/packages/cargo/parser.go b/modules/packages/cargo/parser.go new file mode 100644 index 0000000000000..36cd44df847aa --- /dev/null +++ b/modules/packages/cargo/parser.go @@ -0,0 +1,169 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package cargo + +import ( + "encoding/binary" + "errors" + "io" + "regexp" + + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/validation" + + "github.com/hashicorp/go-version" +) + +const PropertyYanked = "cargo.yanked" + +var ( + ErrInvalidName = errors.New("package name is invalid") + ErrInvalidVersion = errors.New("package version is invalid") +) + +// Package represents a Cargo package +type Package struct { + Name string + Version string + Metadata *Metadata + Content io.Reader + ContentSize int64 +} + +// Metadata represents the metadata of a Cargo package +type Metadata struct { + Dependencies []*Dependency `json:"dependencies,omitempty"` + Features map[string][]string `json:"features,omitempty"` + Authors []string `json:"authors,omitempty"` + Description string `json:"description,omitempty"` + DocumentationURL string `json:"documentation_url,omitempty"` + ProjectURL string `json:"project_url,omitempty"` + Readme string `json:"readme,omitempty"` + Keywords []string `json:"keywords,omitempty"` + Categories []string `json:"categories,omitempty"` + License string `json:"license,omitempty"` + RepositoryURL string `json:"repository_url,omitempty"` + Links string `json:"links,omitempty"` +} + +type Dependency struct { + Name string `json:"name"` + Req string `json:"req"` + Features []string `json:"features"` + Optional bool `json:"optional"` + DefaultFeatures bool `json:"default_features"` + Target *string `json:"target"` + Kind string `json:"kind"` + Registry *string `json:"registry"` + Package *string `json:"package"` +} + +var nameMatch = regexp.MustCompile(`\A[a-zA-Z][a-zA-Z0-9-_]{0,63}\z`) + +// ParsePackage reads the metadata and content of a package +func ParsePackage(r io.Reader) (*Package, error) { + var size uint32 + if err := binary.Read(r, binary.LittleEndian, &size); err != nil { + return nil, err + } + + p, err := parsePackage(io.LimitReader(r, int64(size))) + if err != nil { + return nil, err + } + + if err := binary.Read(r, binary.LittleEndian, &size); err != nil { + return nil, err + } + + p.Content = io.LimitReader(r, int64(size)) + p.ContentSize = int64(size) + + return p, nil +} + +func parsePackage(r io.Reader) (*Package, error) { + var meta struct { + Name string `json:"name"` + Vers string `json:"vers"` + Deps []struct { + Name string `json:"name"` + VersionReq string `json:"version_req"` + Features []string `json:"features"` + Optional bool `json:"optional"` + DefaultFeatures bool `json:"default_features"` + Target *string `json:"target"` + Kind string `json:"kind"` + Registry *string `json:"registry"` + ExplicitNameInToml string `json:"explicit_name_in_toml"` + } `json:"deps"` + Features map[string][]string `json:"features"` + Authors []string `json:"authors"` + Description string `json:"description"` + Documentation string `json:"documentation"` + Homepage string `json:"homepage"` + Readme string `json:"readme"` + ReadmeFile string `json:"readme_file"` + Keywords []string `json:"keywords"` + Categories []string `json:"categories"` + License string `json:"license"` + LicenseFile string `json:"license_file"` + Repository string `json:"repository"` + Links string `json:"links"` + } + if err := json.NewDecoder(r).Decode(&meta); err != nil { + return nil, err + } + + if !nameMatch.MatchString(meta.Name) { + return nil, ErrInvalidName + } + + if _, err := version.NewSemver(meta.Vers); err != nil { + return nil, ErrInvalidVersion + } + + if !validation.IsValidURL(meta.Homepage) { + meta.Homepage = "" + } + if !validation.IsValidURL(meta.Documentation) { + meta.Documentation = "" + } + if !validation.IsValidURL(meta.Repository) { + meta.Repository = "" + } + + dependencies := make([]*Dependency, 0, len(meta.Deps)) + for _, dep := range meta.Deps { + dependencies = append(dependencies, &Dependency{ + Name: dep.Name, + Req: dep.VersionReq, + Features: dep.Features, + Optional: dep.Optional, + DefaultFeatures: dep.DefaultFeatures, + Target: dep.Target, + Kind: dep.Kind, + Registry: dep.Registry, + }) + } + + return &Package{ + Name: meta.Name, + Version: meta.Vers, + Metadata: &Metadata{ + Dependencies: dependencies, + Features: meta.Features, + Authors: meta.Authors, + Description: meta.Description, + DocumentationURL: meta.Documentation, + ProjectURL: meta.Homepage, + Readme: meta.Readme, + Keywords: meta.Keywords, + Categories: meta.Categories, + License: meta.License, + RepositoryURL: meta.Repository, + Links: meta.Links, + }, + }, nil +} diff --git a/modules/packages/cargo/parser_test.go b/modules/packages/cargo/parser_test.go new file mode 100644 index 0000000000000..2230a5b4999c9 --- /dev/null +++ b/modules/packages/cargo/parser_test.go @@ -0,0 +1,86 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package cargo + +import ( + "bytes" + "encoding/binary" + "io" + "strings" + "testing" + + "github.com/stretchr/testify/assert" +) + +const ( + description = "Package Description" + author = "KN4CK3R" + homepage = "https://gitea.io/" + license = "MIT" +) + +func TestParsePackage(t *testing.T) { + createPackage := func(name, version string) io.Reader { + metadata := `{ + "name":"` + name + `", + "vers":"` + version + `", + "description":"` + description + `", + "authors": ["` + author + `"], + "deps":[ + { + "name":"dep", + "version_req":"1.0" + } + ], + "homepage":"` + homepage + `", + "license":"` + license + `" +}` + + var buf bytes.Buffer + binary.Write(&buf, binary.LittleEndian, uint32(len(metadata))) + buf.WriteString(metadata) + binary.Write(&buf, binary.LittleEndian, uint32(4)) + buf.WriteString("test") + return &buf + } + + t.Run("InvalidName", func(t *testing.T) { + for _, name := range []string{"", "0test", "-test", "_test", strings.Repeat("a", 65)} { + data := createPackage(name, "1.0.0") + + cp, err := ParsePackage(data) + assert.Nil(t, cp) + assert.ErrorIs(t, err, ErrInvalidName) + } + }) + + t.Run("InvalidVersion", func(t *testing.T) { + for _, version := range []string{"", "1.", "-1.0", "1.0.0/1"} { + data := createPackage("test", version) + + cp, err := ParsePackage(data) + assert.Nil(t, cp) + assert.ErrorIs(t, err, ErrInvalidVersion) + } + }) + + t.Run("Valid", func(t *testing.T) { + data := createPackage("test", "1.0.0") + + cp, err := ParsePackage(data) + assert.NotNil(t, cp) + assert.NoError(t, err) + + assert.Equal(t, "test", cp.Name) + assert.Equal(t, "1.0.0", cp.Version) + assert.Equal(t, description, cp.Metadata.Description) + assert.Equal(t, []string{author}, cp.Metadata.Authors) + assert.Len(t, cp.Metadata.Dependencies, 1) + assert.Equal(t, "dep", cp.Metadata.Dependencies[0].Name) + assert.Equal(t, homepage, cp.Metadata.ProjectURL) + assert.Equal(t, license, cp.Metadata.License) + content, _ := io.ReadAll(cp.Content) + assert.Equal(t, "test", string(content)) + }) +} diff --git a/modules/process/manager.go b/modules/process/manager.go index 1272510067ffa..fdfca3db7a159 100644 --- a/modules/process/manager.go +++ b/modules/process/manager.go @@ -6,6 +6,7 @@ package process import ( "context" + "log" "runtime/pprof" "strconv" "sync" @@ -43,6 +44,18 @@ type IDType string // - it is simply an alias for context.CancelFunc and is only for documentary purposes type FinishedFunc = context.CancelFunc +var Trace = defaultTrace // this global can be overridden by particular logging packages - thus avoiding import cycles + +func defaultTrace(start bool, pid IDType, description string, parentPID IDType, typ string) { + if start && parentPID != "" { + log.Printf("start process %s: %s (from %s) (%s)", pid, description, parentPID, typ) + } else if start { + log.Printf("start process %s: %s (%s)", pid, description, typ) + } else { + log.Printf("end process %s: %s", pid, description) + } +} + // Manager manages all processes and counts PIDs. type Manager struct { mutex sync.Mutex @@ -154,6 +167,7 @@ func (pm *Manager) Add(ctx context.Context, description string, cancel context.C pm.processMap[pid] = process pm.mutex.Unlock() + Trace(true, pid, description, parentPID, processType) pprofCtx := pprof.WithLabels(ctx, pprof.Labels(DescriptionPProfLabel, description, PPIDPProfLabel, string(parentPID), PIDPProfLabel, string(pid), ProcessTypePProfLabel, processType)) if currentlyRunning { @@ -185,18 +199,12 @@ func (pm *Manager) nextPID() (start time.Time, pid IDType) { return start, pid } -// Remove a process from the ProcessManager. -func (pm *Manager) Remove(pid IDType) { - pm.mutex.Lock() - delete(pm.processMap, pid) - pm.mutex.Unlock() -} - func (pm *Manager) remove(process *process) { pm.mutex.Lock() defer pm.mutex.Unlock() if p := pm.processMap[process.PID]; p == process { delete(pm.processMap, process.PID) + Trace(false, process.PID, process.Description, process.ParentPID, process.Type) } } diff --git a/modules/process/manager_test.go b/modules/process/manager_test.go index 527072713ffa4..2e2e35d24a0bf 100644 --- a/modules/process/manager_test.go +++ b/modules/process/manager_test.go @@ -82,7 +82,7 @@ func TestManager_Remove(t *testing.T) { assert.NotEqual(t, GetContext(p1Ctx).GetPID(), GetContext(p2Ctx).GetPID(), "expected to get different pids got %s == %s", GetContext(p2Ctx).GetPID(), GetContext(p1Ctx).GetPID()) - pm.Remove(GetPID(p2Ctx)) + finished() _, exists := pm.processMap[GetPID(p2Ctx)] assert.False(t, exists, "PID %d is in the list but shouldn't", GetPID(p2Ctx)) diff --git a/modules/repository/create.go b/modules/repository/create.go index 454a192ddbb52..b9a72ad573743 100644 --- a/modules/repository/create.go +++ b/modules/repository/create.go @@ -30,7 +30,7 @@ import ( ) // CreateRepositoryByExample creates a repository for the user/organization. -func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository, overwriteOrAdopt bool) (err error) { +func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, repo *repo_model.Repository, overwriteOrAdopt, isFork bool) (err error) { if err = repo_model.IsUsableRepoName(repo.Name); err != nil { return err } @@ -67,8 +67,12 @@ func CreateRepositoryByExample(ctx context.Context, doer, u *user_model.User, re } // insert units for repo - units := make([]repo_model.RepoUnit, 0, len(unit.DefaultRepoUnits)) - for _, tp := range unit.DefaultRepoUnits { + defaultUnits := unit.DefaultRepoUnits + if isFork { + defaultUnits = unit.DefaultForkRepoUnits + } + units := make([]repo_model.RepoUnit, 0, len(defaultUnits)) + for _, tp := range defaultUnits { if tp == unit.TypeIssues { units = append(units, repo_model.RepoUnit{ RepoID: repo.ID, @@ -207,12 +211,13 @@ func CreateRepository(doer, u *user_model.User, opts CreateRepoOptions) (*repo_m IsEmpty: !opts.AutoInit, TrustModel: opts.TrustModel, IsMirror: opts.IsMirror, + DefaultBranch: opts.DefaultBranch, } var rollbackRepo *repo_model.Repository if err := db.WithTx(db.DefaultContext, func(ctx context.Context) error { - if err := CreateRepositoryByExample(ctx, doer, u, repo, false); err != nil { + if err := CreateRepositoryByExample(ctx, doer, u, repo, false, false); err != nil { return err } diff --git a/modules/repository/generate.go b/modules/repository/generate.go index b6a1d7b43ef09..31d5ebbb11f40 100644 --- a/modules/repository/generate.go +++ b/modules/repository/generate.go @@ -319,7 +319,7 @@ func GenerateRepository(ctx context.Context, doer, owner *user_model.User, templ TrustModel: templateRepo.TrustModel, } - if err = CreateRepositoryByExample(ctx, doer, owner, generateRepo, false); err != nil { + if err = CreateRepositoryByExample(ctx, doer, owner, generateRepo, false, false); err != nil { return nil, err } diff --git a/modules/repository/init.go b/modules/repository/init.go index 2b0d0be7bc99c..5705fe5b998e4 100644 --- a/modules/repository/init.go +++ b/modules/repository/init.go @@ -316,14 +316,13 @@ func initRepoCommit(ctx context.Context, tmpPath string, repo *repo_model.Reposi return fmt.Errorf("git add --all: %w", err) } - cmd := git.NewCommand(ctx, - "commit", git.CmdArg(fmt.Sprintf("--author='%s <%s>'", sig.Name, sig.Email)), - "-m", "Initial commit", - ) + cmd := git.NewCommand(ctx, "commit"). + AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email). + AddOptionValues("-m", "Initial commit") sign, keyID, signer, _ := asymkey_service.SignInitialCommit(ctx, tmpPath, u) if sign { - cmd.AddArguments(git.CmdArg("-S" + keyID)) + cmd.AddOptionFormat("-S%s", keyID) if repo.GetTrustModel() == repo_model.CommitterTrustModel || repo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel { // need to set the committer to the KeyID owner diff --git a/modules/setting/packages.go b/modules/setting/packages.go index d0cd80aa0358d..190c17dd8fb24 100644 --- a/modules/setting/packages.go +++ b/modules/setting/packages.go @@ -25,6 +25,7 @@ var ( LimitTotalOwnerCount int64 LimitTotalOwnerSize int64 + LimitSizeCargo int64 LimitSizeComposer int64 LimitSizeConan int64 LimitSizeConda int64 @@ -65,6 +66,7 @@ func newPackages() { } Packages.LimitTotalOwnerSize = mustBytes(sec, "LIMIT_TOTAL_OWNER_SIZE") + Packages.LimitSizeCargo = mustBytes(sec, "LIMIT_SIZE_CARGO") Packages.LimitSizeComposer = mustBytes(sec, "LIMIT_SIZE_COMPOSER") Packages.LimitSizeConan = mustBytes(sec, "LIMIT_SIZE_CONAN") Packages.LimitSizeConda = mustBytes(sec, "LIMIT_SIZE_CONDA") diff --git a/modules/setting/repository.go b/modules/setting/repository.go index d78b63a1f3f78..f53de17a4dd98 100644 --- a/modules/setting/repository.go +++ b/modules/setting/repository.go @@ -41,6 +41,7 @@ var ( EnablePushCreateOrg bool DisabledRepoUnits []string DefaultRepoUnits []string + DefaultForkRepoUnits []string PrefixArchiveFiles bool DisableMigrations bool DisableStars bool `ini:"DISABLE_STARS"` @@ -157,6 +158,7 @@ var ( EnablePushCreateOrg: false, DisabledRepoUnits: []string{}, DefaultRepoUnits: []string{}, + DefaultForkRepoUnits: []string{}, PrefixArchiveFiles: true, DisableMigrations: false, DisableStars: false, diff --git a/modules/setting/service.go b/modules/setting/service.go index 7b4bfc5c7b6b2..1d33ac6bce88f 100644 --- a/modules/setting/service.go +++ b/modules/setting/service.go @@ -46,6 +46,8 @@ var Service = struct { RecaptchaSecret string RecaptchaSitekey string RecaptchaURL string + CfTurnstileSecret string + CfTurnstileSitekey string HcaptchaSecret string HcaptchaSitekey string McaptchaSecret string @@ -137,6 +139,8 @@ func newService() { Service.RecaptchaSecret = sec.Key("RECAPTCHA_SECRET").MustString("") Service.RecaptchaSitekey = sec.Key("RECAPTCHA_SITEKEY").MustString("") Service.RecaptchaURL = sec.Key("RECAPTCHA_URL").MustString("https://www.google.com/recaptcha/") + Service.CfTurnstileSecret = sec.Key("CF_TURNSTILE_SECRET").MustString("") + Service.CfTurnstileSitekey = sec.Key("CF_TURNSTILE_SITEKEY").MustString("") Service.HcaptchaSecret = sec.Key("HCAPTCHA_SECRET").MustString("") Service.HcaptchaSitekey = sec.Key("HCAPTCHA_SITEKEY").MustString("") Service.McaptchaURL = sec.Key("MCAPTCHA_URL").MustString("https://demo.mcaptcha.org/") diff --git a/modules/setting/setting.go b/modules/setting/setting.go index f591727ca3421..a68a46f7add10 100644 --- a/modules/setting/setting.go +++ b/modules/setting/setting.go @@ -6,6 +6,7 @@ package setting import ( "encoding/base64" + "errors" "fmt" "math" "net" @@ -60,6 +61,7 @@ const ( ReCaptcha = "recaptcha" HCaptcha = "hcaptcha" MCaptcha = "mcaptcha" + CfTurnstile = "cfturnstile" ) // settings @@ -240,7 +242,6 @@ var ( CustomEmojisMap map[string]string `ini:"-"` SearchRepoDescription bool UseServiceWorker bool - OnlyShowRelevantRepos bool Notification struct { MinTimeout time.Duration @@ -466,8 +467,7 @@ func getAppPath() (string, error) { } if err != nil { - // FIXME: Once we switch to go 1.19 use !errors.Is(err, exec.ErrDot) - if !strings.Contains(err.Error(), "cannot run executable found relative to current directory") { + if !errors.Is(err, exec.ErrDot) { return "", err } appPath, err = filepath.Abs(os.Args[0]) @@ -1123,7 +1123,6 @@ func loadFromConf(allowEmpty bool, extraConfig string) { UI.DefaultShowFullName = Cfg.Section("ui").Key("DEFAULT_SHOW_FULL_NAME").MustBool(false) UI.SearchRepoDescription = Cfg.Section("ui").Key("SEARCH_REPO_DESCRIPTION").MustBool(true) UI.UseServiceWorker = Cfg.Section("ui").Key("USE_SERVICE_WORKER").MustBool(false) - UI.OnlyShowRelevantRepos = Cfg.Section("ui").Key("ONLY_SHOW_RELEVANT_REPOS").MustBool(false) HasRobotsTxt, err = util.IsFile(path.Join(CustomPath, "robots.txt")) if err != nil { diff --git a/modules/turnstile/turnstile.go b/modules/turnstile/turnstile.go new file mode 100644 index 0000000000000..38d0233446608 --- /dev/null +++ b/modules/turnstile/turnstile.go @@ -0,0 +1,92 @@ +// Copyright 2023 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package turnstile + +import ( + "context" + "fmt" + "io" + "net/http" + "net/url" + "strings" + + "code.gitea.io/gitea/modules/json" + "code.gitea.io/gitea/modules/setting" +) + +// Response is the structure of JSON returned from API +type Response struct { + Success bool `json:"success"` + ChallengeTS string `json:"challenge_ts"` + Hostname string `json:"hostname"` + ErrorCodes []ErrorCode `json:"error-codes"` + Action string `json:"login"` + Cdata string `json:"cdata"` +} + +// Verify calls Cloudflare Turnstile API to verify token +func Verify(ctx context.Context, response string) (bool, error) { + // Cloudflare turnstile official access instruction address: https://developers.cloudflare.com/turnstile/get-started/server-side-validation/ + post := url.Values{ + "secret": {setting.Service.CfTurnstileSecret}, + "response": {response}, + } + // Basically a copy of http.PostForm, but with a context + req, err := http.NewRequestWithContext(ctx, http.MethodPost, + "https://challenges.cloudflare.com/turnstile/v0/siteverify", strings.NewReader(post.Encode())) + if err != nil { + return false, fmt.Errorf("Failed to create CAPTCHA request: %w", err) + } + req.Header.Set("Content-Type", "application/x-www-form-urlencoded") + + resp, err := http.DefaultClient.Do(req) + if err != nil { + return false, fmt.Errorf("Failed to send CAPTCHA response: %w", err) + } + defer resp.Body.Close() + body, err := io.ReadAll(resp.Body) + if err != nil { + return false, fmt.Errorf("Failed to read CAPTCHA response: %w", err) + } + + var jsonResponse Response + if err := json.Unmarshal(body, &jsonResponse); err != nil { + return false, fmt.Errorf("Failed to parse CAPTCHA response: %w", err) + } + + var respErr error + if len(jsonResponse.ErrorCodes) > 0 { + respErr = jsonResponse.ErrorCodes[0] + } + return jsonResponse.Success, respErr +} + +// ErrorCode is a reCaptcha error +type ErrorCode string + +// String fulfills the Stringer interface +func (e ErrorCode) String() string { + switch e { + case "missing-input-secret": + return "The secret parameter was not passed." + case "invalid-input-secret": + return "The secret parameter was invalid or did not exist." + case "missing-input-response": + return "The response parameter was not passed." + case "invalid-input-response": + return "The response parameter is invalid or has expired." + case "bad-request": + return "The request was rejected because it was malformed." + case "timeout-or-duplicate": + return "The response parameter has already been validated before." + case "internal-error": + return "An internal error happened while validating the response. The request can be retried." + } + return string(e) +} + +// Error fulfills the error interface +func (e ErrorCode) Error() string { + return e.String() +} diff --git a/options/locale/locale_en-US.ini b/options/locale/locale_en-US.ini index 8465660cc0756..bc2e8cb91cfba 100644 --- a/options/locale/locale_en-US.ini +++ b/options/locale/locale_en-US.ini @@ -518,6 +518,7 @@ organization_leave_success = You have successfully left the organization %s. invalid_ssh_key = Cannot verify your SSH key: %s invalid_gpg_key = Cannot verify your GPG key: %s invalid_ssh_principal = Invalid principal: %s +must_use_public_key = The key you provided is a private key. Please do not upload your private key anywhere. Use your public key instead. unable_verify_ssh_key = "Cannot verify the SSH key; double-check it for mistakes." auth_failed = Authentication failed: %v @@ -1305,7 +1306,8 @@ issues.filter_label_no_select = All labels issues.filter_milestone = Milestone issues.filter_milestone_no_select = All milestones issues.filter_project = Project -issues.filter_project_no_select = All projects +issues.filter_project_all = All projects +issues.filter_project_none = No project issues.filter_assignee = Assignee issues.filter_assginee_no_select = All assignees issues.filter_poster = Author @@ -3150,6 +3152,11 @@ versions.on = on versions.view_all = View all dependency.id = ID dependency.version = Version +cargo.registry = Setup this registry in the Cargo configuration file (for example ~/.cargo/config.toml): +cargo.install = To install the package using Cargo, run the following command: +cargo.documentation = For more information on the Cargo registry, see the documentation. +cargo.details.repository_site = Repository Site +cargo.details.documentation_site = Documentation Site composer.registry = Setup this registry in your ~/.composer/config.json file: composer.install = To install the package using Composer, run the following command: composer.documentation = For more information on the Composer registry, see the documentation. @@ -3226,6 +3233,15 @@ settings.delete.description = Deleting a package is permanent and cannot be undo settings.delete.notice = You are about to delete %s (%s). This operation is irreversible, are you sure? settings.delete.success = The package has been deleted. settings.delete.error = Failed to delete the package. +owner.settings.cargo.title = Cargo Registry Index +owner.settings.cargo.initialize = Initialize Index +owner.settings.cargo.initialize.description = To use the Cargo registry a special index git repository is needed. Here you can (re)create it with the required config. +owner.settings.cargo.initialize.error = Failed to initialize Cargo index: %v +owner.settings.cargo.initialize.success = The Cargo index was successfully created. +owner.settings.cargo.rebuild = Rebuild Index +owner.settings.cargo.rebuild.description = If the index is out of sync with the cargo packages stored you can rebuild it here. +owner.settings.cargo.rebuild.error = Failed to rebuild Cargo index: %v +owner.settings.cargo.rebuild.success = The Cargo index was successfully rebuild. owner.settings.cleanuprules.title = Manage Cleanup Rules owner.settings.cleanuprules.add = Add Cleanup Rule owner.settings.cleanuprules.edit = Edit Cleanup Rule diff --git a/public/img/svg/gitea-cargo.svg b/public/img/svg/gitea-cargo.svg new file mode 100644 index 0000000000000..91d53941cad9f --- /dev/null +++ b/public/img/svg/gitea-cargo.svg @@ -0,0 +1 @@ + \ No newline at end of file diff --git a/routers/api/packages/api.go b/routers/api/packages/api.go index 7a07fea815e87..8ec9ae9bf70a8 100644 --- a/routers/api/packages/api.go +++ b/routers/api/packages/api.go @@ -14,6 +14,7 @@ import ( "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/web" + "code.gitea.io/gitea/routers/api/packages/cargo" "code.gitea.io/gitea/routers/api/packages/composer" "code.gitea.io/gitea/routers/api/packages/conan" "code.gitea.io/gitea/routers/api/packages/conda" @@ -71,6 +72,20 @@ func CommonRoutes(ctx gocontext.Context) *web.Route { }) r.Group("/{username}", func() { + r.Group("/cargo", func() { + r.Group("/api/v1/crates", func() { + r.Get("", cargo.SearchPackages) + r.Put("/new", reqPackageAccess(perm.AccessModeWrite), cargo.UploadPackage) + r.Group("/{package}", func() { + r.Group("/{version}", func() { + r.Get("/download", cargo.DownloadPackageFile) + r.Delete("/yank", reqPackageAccess(perm.AccessModeWrite), cargo.YankPackage) + r.Put("/unyank", reqPackageAccess(perm.AccessModeWrite), cargo.UnyankPackage) + }) + r.Get("/owners", cargo.ListOwners) + }) + }) + }, reqPackageAccess(perm.AccessModeRead)) r.Group("/composer", func() { r.Get("/packages.json", composer.ServiceIndex) r.Get("/search.json", composer.SearchPackages) diff --git a/routers/api/packages/cargo/cargo.go b/routers/api/packages/cargo/cargo.go new file mode 100644 index 0000000000000..e0bf5da13adb0 --- /dev/null +++ b/routers/api/packages/cargo/cargo.go @@ -0,0 +1,281 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package cargo + +import ( + "fmt" + "net/http" + "strconv" + "strings" + + "code.gitea.io/gitea/models/db" + packages_model "code.gitea.io/gitea/models/packages" + "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/log" + packages_module "code.gitea.io/gitea/modules/packages" + cargo_module "code.gitea.io/gitea/modules/packages/cargo" + "code.gitea.io/gitea/modules/util" + "code.gitea.io/gitea/routers/api/packages/helper" + "code.gitea.io/gitea/services/convert" + packages_service "code.gitea.io/gitea/services/packages" + cargo_service "code.gitea.io/gitea/services/packages/cargo" +) + +// https://doc.rust-lang.org/cargo/reference/registries.html#web-api +type StatusResponse struct { + OK bool `json:"ok"` + Errors []StatusMessage `json:"errors,omitempty"` +} + +type StatusMessage struct { + Message string `json:"detail"` +} + +func apiError(ctx *context.Context, status int, obj interface{}) { + helper.LogAndProcessError(ctx, status, obj, func(message string) { + ctx.JSON(status, StatusResponse{ + OK: false, + Errors: []StatusMessage{ + { + Message: message, + }, + }, + }) + }) +} + +type SearchResult struct { + Crates []*SearchResultCrate `json:"crates"` + Meta SearchResultMeta `json:"meta"` +} + +type SearchResultCrate struct { + Name string `json:"name"` + LatestVersion string `json:"max_version"` + Description string `json:"description"` +} + +type SearchResultMeta struct { + Total int64 `json:"total"` +} + +// https://doc.rust-lang.org/cargo/reference/registries.html#search +func SearchPackages(ctx *context.Context) { + page := ctx.FormInt("page") + if page < 1 { + page = 1 + } + perPage := ctx.FormInt("per_page") + paginator := db.ListOptions{ + Page: page, + PageSize: convert.ToCorrectPageSize(perPage), + } + + pvs, total, err := packages_model.SearchLatestVersions( + ctx, + &packages_model.PackageSearchOptions{ + OwnerID: ctx.Package.Owner.ID, + Type: packages_model.TypeCargo, + Name: packages_model.SearchValue{Value: ctx.FormTrim("q")}, + IsInternal: util.OptionalBoolFalse, + Paginator: &paginator, + }, + ) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + pds, err := packages_model.GetPackageDescriptors(ctx, pvs) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + crates := make([]*SearchResultCrate, 0, len(pvs)) + for _, pd := range pds { + crates = append(crates, &SearchResultCrate{ + Name: pd.Package.Name, + LatestVersion: pd.Version.Version, + Description: pd.Metadata.(*cargo_module.Metadata).Description, + }) + } + + ctx.JSON(http.StatusOK, SearchResult{ + Crates: crates, + Meta: SearchResultMeta{ + Total: total, + }, + }) +} + +type Owners struct { + Users []OwnerUser `json:"users"` +} + +type OwnerUser struct { + ID int64 `json:"id"` + Login string `json:"login"` + Name string `json:"name"` +} + +// https://doc.rust-lang.org/cargo/reference/registries.html#owners-list +func ListOwners(ctx *context.Context) { + ctx.JSON(http.StatusOK, Owners{ + Users: []OwnerUser{ + { + ID: ctx.Package.Owner.ID, + Login: ctx.Package.Owner.Name, + Name: ctx.Package.Owner.DisplayName(), + }, + }, + }) +} + +// DownloadPackageFile serves the content of a package +func DownloadPackageFile(ctx *context.Context) { + s, pf, err := packages_service.GetFileStreamByPackageNameAndVersion( + ctx, + &packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeCargo, + Name: ctx.Params("package"), + Version: ctx.Params("version"), + }, + &packages_service.PackageFileInfo{ + Filename: strings.ToLower(fmt.Sprintf("%s-%s.crate", ctx.Params("package"), ctx.Params("version"))), + }, + ) + if err != nil { + if err == packages_model.ErrPackageNotExist || err == packages_model.ErrPackageFileNotExist { + apiError(ctx, http.StatusNotFound, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer s.Close() + + ctx.ServeContent(s, &context.ServeHeaderOptions{ + Filename: pf.Name, + LastModified: pf.CreatedUnix.AsLocalTime(), + }) +} + +// https://doc.rust-lang.org/cargo/reference/registries.html#publish +func UploadPackage(ctx *context.Context) { + defer ctx.Req.Body.Close() + + cp, err := cargo_module.ParsePackage(ctx.Req.Body) + if err != nil { + apiError(ctx, http.StatusBadRequest, err) + return + } + + buf, err := packages_module.CreateHashedBufferFromReader(cp.Content, 32*1024*1024) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + defer buf.Close() + + if buf.Size() != cp.ContentSize { + apiError(ctx, http.StatusBadRequest, "invalid content size") + return + } + + pv, _, err := packages_service.CreatePackageAndAddFile( + &packages_service.PackageCreationInfo{ + PackageInfo: packages_service.PackageInfo{ + Owner: ctx.Package.Owner, + PackageType: packages_model.TypeCargo, + Name: cp.Name, + Version: cp.Version, + }, + SemverCompatible: true, + Creator: ctx.Doer, + Metadata: cp.Metadata, + VersionProperties: map[string]string{ + cargo_module.PropertyYanked: strconv.FormatBool(false), + }, + }, + &packages_service.PackageFileCreationInfo{ + PackageFileInfo: packages_service.PackageFileInfo{ + Filename: strings.ToLower(fmt.Sprintf("%s-%s.crate", cp.Name, cp.Version)), + }, + Creator: ctx.Doer, + Data: buf, + IsLead: true, + }, + ) + if err != nil { + switch err { + case packages_model.ErrDuplicatePackageVersion: + apiError(ctx, http.StatusConflict, err) + case packages_service.ErrQuotaTotalCount, packages_service.ErrQuotaTypeSize, packages_service.ErrQuotaTotalSize: + apiError(ctx, http.StatusForbidden, err) + default: + apiError(ctx, http.StatusInternalServerError, err) + } + return + } + + if err := cargo_service.AddOrUpdatePackageIndex(ctx, ctx.Doer, ctx.Package.Owner, pv.PackageID); err != nil { + if err := packages_service.DeletePackageVersionAndReferences(ctx, pv); err != nil { + log.Error("Rollback creation of package version: %v", err) + } + + apiError(ctx, http.StatusInternalServerError, err) + return + } + + ctx.JSON(http.StatusOK, StatusResponse{OK: true}) +} + +// https://doc.rust-lang.org/cargo/reference/registries.html#yank +func YankPackage(ctx *context.Context) { + yankPackage(ctx, true) +} + +// https://doc.rust-lang.org/cargo/reference/registries.html#unyank +func UnyankPackage(ctx *context.Context) { + yankPackage(ctx, false) +} + +func yankPackage(ctx *context.Context, yank bool) { + pv, err := packages_model.GetVersionByNameAndVersion(ctx, ctx.Package.Owner.ID, packages_model.TypeCargo, ctx.Params("package"), ctx.Params("version")) + if err != nil { + if err == packages_model.ErrPackageNotExist { + apiError(ctx, http.StatusNotFound, err) + return + } + apiError(ctx, http.StatusInternalServerError, err) + return + } + + pps, err := packages_model.GetPropertiesByName(ctx, packages_model.PropertyTypeVersion, pv.ID, cargo_module.PropertyYanked) + if err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + if len(pps) == 0 { + apiError(ctx, http.StatusInternalServerError, "Property not found") + return + } + + pp := pps[0] + pp.Value = strconv.FormatBool(yank) + + if err := packages_model.UpdateProperty(ctx, pp); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + if err := cargo_service.AddOrUpdatePackageIndex(ctx, ctx.Doer, ctx.Package.Owner, pv.PackageID); err != nil { + apiError(ctx, http.StatusInternalServerError, err) + return + } + + ctx.JSON(http.StatusOK, StatusResponse{OK: true}) +} diff --git a/routers/api/v1/packages/package.go b/routers/api/v1/packages/package.go index 5ffefc4862c1e..2a20d66d1e07c 100644 --- a/routers/api/v1/packages/package.go +++ b/routers/api/v1/packages/package.go @@ -40,7 +40,7 @@ func ListPackages(ctx *context.APIContext) { // in: query // description: package type filter // type: string - // enum: [composer, conan, conda, container, generic, helm, maven, npm, nuget, pub, pypi, rubygems, vagrant] + // enum: [cargo, composer, conan, conda, container, generic, helm, maven, npm, nuget, pub, pypi, rubygems, vagrant] // - name: q // in: query // description: name filter diff --git a/routers/web/explore/repo.go b/routers/web/explore/repo.go index 5271e39bbc91b..e9684dd2865d4 100644 --- a/routers/web/explore/repo.go +++ b/routers/web/explore/repo.go @@ -17,7 +17,8 @@ import ( const ( // tplExploreRepos explore repositories page template - tplExploreRepos base.TplName = "explore/repos" + tplExploreRepos base.TplName = "explore/repos" + relevantReposOnlyParam string = "no_filter" ) // RepoSearchOptions when calling search repositories @@ -81,13 +82,11 @@ func RenderRepoSearch(ctx *context.Context, opts *RepoSearchOptions) { default: ctx.Data["SortType"] = "recentupdate" orderBy = db.SearchOrderByRecentUpdated - onlyShowRelevant = setting.UI.OnlyShowRelevantRepos && !ctx.FormBool("no_filter") } + onlyShowRelevant = !ctx.FormBool(relevantReposOnlyParam) + keyword := ctx.FormTrim("q") - if keyword != "" { - onlyShowRelevant = false - } ctx.Data["OnlyShowRelevant"] = onlyShowRelevant @@ -139,7 +138,7 @@ func RenderRepoSearch(ctx *context.Context, opts *RepoSearchOptions) { pager.SetDefaultParams(ctx) pager.AddParam(ctx, "topic", "TopicOnly") pager.AddParam(ctx, "language", "Language") - pager.AddParamString("no_filter", ctx.FormString("no_filter")) + pager.AddParamString(relevantReposOnlyParam, ctx.FormString(relevantReposOnlyParam)) ctx.Data["Page"] = pager ctx.HTML(http.StatusOK, opts.TplName) diff --git a/routers/web/org/setting_packages.go b/routers/web/org/setting_packages.go index 80135ca2d0a01..21d25bd90a6aa 100644 --- a/routers/web/org/setting_packages.go +++ b/routers/web/org/setting_packages.go @@ -84,3 +84,23 @@ func PackagesRulePreview(ctx *context.Context) { ctx.HTML(http.StatusOK, tplSettingsPackagesRulePreview) } + +func InitializeCargoIndex(ctx *context.Context) { + ctx.Data["Title"] = ctx.Tr("packages.title") + ctx.Data["PageIsOrgSettings"] = true + ctx.Data["PageIsSettingsPackages"] = true + + shared.InitializeCargoIndex(ctx, ctx.ContextUser) + + ctx.Redirect(fmt.Sprintf("%s/org/%s/settings/packages", setting.AppSubURL, ctx.ContextUser.Name)) +} + +func RebuildCargoIndex(ctx *context.Context) { + ctx.Data["Title"] = ctx.Tr("packages.title") + ctx.Data["PageIsOrgSettings"] = true + ctx.Data["PageIsSettingsPackages"] = true + + shared.RebuildCargoIndex(ctx, ctx.ContextUser) + + ctx.Redirect(fmt.Sprintf("%s/org/%s/settings/packages", setting.AppSubURL, ctx.ContextUser.Name)) +} diff --git a/routers/web/repo/blame.go b/routers/web/repo/blame.go index 50bfa9d3bd289..def7cfcadbe6c 100644 --- a/routers/web/repo/blame.go +++ b/routers/web/repo/blame.go @@ -217,7 +217,7 @@ func renderBlame(ctx *context.Context, blameParts []git.BlamePart, commitNames m filename2attribute2info, err := ctx.Repo.GitRepo.CheckAttribute(git.CheckAttributeOpts{ CachedOnly: true, - Attributes: []git.CmdArg{"linguist-language", "gitlab-language"}, + Attributes: []string{"linguist-language", "gitlab-language"}, Filenames: []string{ctx.Repo.TreePath}, IndexFile: indexFilename, WorkTree: worktree, diff --git a/routers/web/repo/compare.go b/routers/web/repo/compare.go index 35f923d561ea5..c4b8c814e6756 100644 --- a/routers/web/repo/compare.go +++ b/routers/web/repo/compare.go @@ -560,7 +560,7 @@ func ParseCompareInfo(ctx *context.Context) *CompareInfo { func PrepareCompareDiff( ctx *context.Context, ci *CompareInfo, - whitespaceBehavior git.CmdArg, + whitespaceBehavior git.TrustedCmdArgs, ) bool { var ( repo = ctx.Repo.Repository diff --git a/routers/web/repo/http.go b/routers/web/repo/http.go index 2c91ed6178e05..89c86e764e3cf 100644 --- a/routers/web/repo/http.go +++ b/routers/web/repo/http.go @@ -498,7 +498,8 @@ func serviceRPC(ctx gocontext.Context, h serviceHandler, service string) { } var stderr bytes.Buffer - cmd := git.NewCommand(h.r.Context(), git.CmdArgCheck(service), "--stateless-rpc").AddDynamicArguments(h.dir) + // the service is generated by ourselves, so it's safe to trust it + cmd := git.NewCommand(h.r.Context(), git.ToTrustedCmdArgs([]string{service})...).AddArguments("--stateless-rpc").AddDynamicArguments(h.dir) cmd.SetDescription(fmt.Sprintf("%s %s %s [repo_path: %s]", git.GitExecutable, service, "--stateless-rpc", h.dir)) if err := cmd.Run(&git.RunOpts{ Dir: h.dir, @@ -570,7 +571,8 @@ func GetInfoRefs(ctx *context.Context) { } h.environ = append(os.Environ(), h.environ...) - refs, _, err := git.NewCommand(ctx, git.CmdArgCheck(service), "--stateless-rpc", "--advertise-refs", ".").RunStdBytes(&git.RunOpts{Env: h.environ, Dir: h.dir}) + // the service is generated by ourselves, so we can trust it + refs, _, err := git.NewCommand(ctx, git.ToTrustedCmdArgs([]string{service})...).AddArguments("--stateless-rpc", "--advertise-refs", ".").RunStdBytes(&git.RunOpts{Env: h.environ, Dir: h.dir}) if err != nil { log.Error(fmt.Sprintf("%v - %s", err, string(refs))) } diff --git a/routers/web/repo/issue.go b/routers/web/repo/issue.go index 5bff9e67f340a..2193da5110efc 100644 --- a/routers/web/repo/issue.go +++ b/routers/web/repo/issue.go @@ -363,16 +363,10 @@ func issues(ctx *context.Context, milestoneID, projectID int64, isPullOption uti return 0 } - projects, _, err := project_model.FindProjects(ctx, project_model.SearchOptions{ - RepoID: repo.ID, - Type: project_model.TypeRepository, - IsClosed: util.OptionalBoolOf(isShowClosed), - }) - if err != nil { - ctx.ServerError("FindProjects", err) + retrieveProjects(ctx, repo) + if ctx.Written() { return } - ctx.Data["Projects"] = projects ctx.Data["IssueStats"] = issueStats ctx.Data["SelLabelIDs"] = labelIDs diff --git a/routers/web/repo/lfs.go b/routers/web/repo/lfs.go index 41d5edcdd8ca0..869a69c377219 100644 --- a/routers/web/repo/lfs.go +++ b/routers/web/repo/lfs.go @@ -146,7 +146,7 @@ func LFSLocks(ctx *context.Context) { } name2attribute2info, err := gitRepo.CheckAttribute(git.CheckAttributeOpts{ - Attributes: []git.CmdArg{"lockable"}, + Attributes: []string{"lockable"}, Filenames: filenames, CachedOnly: true, }) diff --git a/routers/web/repo/pull.go b/routers/web/repo/pull.go index 71bc98d13d07c..11d336d4ecaae 100644 --- a/routers/web/repo/pull.go +++ b/routers/web/repo/pull.go @@ -926,59 +926,54 @@ func MergePullRequest(ctx *context.Context) { pr := issue.PullRequest pr.Issue = issue pr.Issue.Repo = ctx.Repo.Repository - manuallMerge := repo_model.MergeStyle(form.Do) == repo_model.MergeStyleManuallyMerged + manualMerge := repo_model.MergeStyle(form.Do) == repo_model.MergeStyleManuallyMerged forceMerge := form.ForceMerge != nil && *form.ForceMerge // start with merging by checking - if err := pull_service.CheckPullMergable(ctx, ctx.Doer, &ctx.Repo.Permission, pr, manuallMerge, forceMerge); err != nil { - if errors.Is(err, pull_service.ErrIsClosed) { + if err := pull_service.CheckPullMergable(ctx, ctx.Doer, &ctx.Repo.Permission, pr, manualMerge, forceMerge); err != nil { + switch { + case errors.Is(err, pull_service.ErrIsClosed): if issue.IsPull { ctx.Flash.Error(ctx.Tr("repo.pulls.is_closed")) - ctx.Redirect(issue.Link()) } else { ctx.Flash.Error(ctx.Tr("repo.issues.closed_title")) - ctx.Redirect(issue.Link()) } - } else if errors.Is(err, pull_service.ErrUserNotAllowedToMerge) { + case errors.Is(err, pull_service.ErrUserNotAllowedToMerge): ctx.Flash.Error(ctx.Tr("repo.pulls.update_not_allowed")) - ctx.Redirect(issue.Link()) - } else if errors.Is(err, pull_service.ErrHasMerged) { + case errors.Is(err, pull_service.ErrHasMerged): ctx.Flash.Error(ctx.Tr("repo.pulls.has_merged")) - ctx.Redirect(issue.Link()) - } else if errors.Is(err, pull_service.ErrIsWorkInProgress) { + case errors.Is(err, pull_service.ErrIsWorkInProgress): ctx.Flash.Error(ctx.Tr("repo.pulls.no_merge_wip")) - ctx.Redirect(issue.Link()) - } else if errors.Is(err, pull_service.ErrNotMergableState) { + case errors.Is(err, pull_service.ErrNotMergableState): ctx.Flash.Error(ctx.Tr("repo.pulls.no_merge_not_ready")) - ctx.Redirect(issue.Link()) - } else if models.IsErrDisallowedToMerge(err) { + case models.IsErrDisallowedToMerge(err): ctx.Flash.Error(ctx.Tr("repo.pulls.no_merge_not_ready")) - ctx.Redirect(issue.Link()) - } else if asymkey_service.IsErrWontSign(err) { - ctx.Flash.Error(err.Error()) // has not translation ... - ctx.Redirect(issue.Link()) - } else if errors.Is(err, pull_service.ErrDependenciesLeft) { + case asymkey_service.IsErrWontSign(err): + ctx.Flash.Error(err.Error()) // has no translation ... + case errors.Is(err, pull_service.ErrDependenciesLeft): ctx.Flash.Error(ctx.Tr("repo.issues.dependency.pr_close_blocked")) - ctx.Redirect(issue.Link()) - } else { + default: ctx.ServerError("WebCheck", err) + return } + + ctx.Redirect(issue.Link()) return } // handle manually-merged mark - if manuallMerge { + if manualMerge { if err := pull_service.MergedManually(pr, ctx.Doer, ctx.Repo.GitRepo, form.MergeCommitID); err != nil { - if models.IsErrInvalidMergeStyle(err) { + switch { + + case models.IsErrInvalidMergeStyle(err): ctx.Flash.Error(ctx.Tr("repo.pulls.invalid_merge_option")) - ctx.Redirect(issue.Link()) - } else if strings.Contains(err.Error(), "Wrong commit ID") { + case strings.Contains(err.Error(), "Wrong commit ID"): ctx.Flash.Error(ctx.Tr("repo.pulls.wrong_commit_id")) - ctx.Redirect(issue.Link()) - } else { + default: ctx.ServerError("MergedManually", err) + return } - return } ctx.Redirect(issue.Link()) diff --git a/routers/web/repo/setting.go b/routers/web/repo/setting.go index da52957548be8..2cc263e5bbfd3 100644 --- a/routers/web/repo/setting.go +++ b/routers/web/repo/setting.go @@ -1158,6 +1158,10 @@ func DeployKeysPost(ctx *context.Context) { ctx.Flash.Info(ctx.Tr("settings.ssh_disabled")) } else if asymkey_model.IsErrKeyUnableVerify(err) { ctx.Flash.Info(ctx.Tr("form.unable_verify_ssh_key")) + } else if err == asymkey_model.ErrKeyIsPrivate { + ctx.Data["HasError"] = true + ctx.Data["Err_Content"] = true + ctx.Flash.Error(ctx.Tr("form.must_use_public_key")) } else { ctx.Data["HasError"] = true ctx.Data["Err_Content"] = true diff --git a/routers/web/repo/view.go b/routers/web/repo/view.go index 769e4e895cd99..f314902374c38 100644 --- a/routers/web/repo/view.go +++ b/routers/web/repo/view.go @@ -509,7 +509,7 @@ func renderFile(ctx *context.Context, entry *git.TreeEntry, treeLink, rawLink st filename2attribute2info, err := ctx.Repo.GitRepo.CheckAttribute(git.CheckAttributeOpts{ CachedOnly: true, - Attributes: []git.CmdArg{"linguist-language", "gitlab-language"}, + Attributes: []string{"linguist-language", "gitlab-language"}, Filenames: []string{ctx.Repo.TreePath}, IndexFile: indexFilename, WorkTree: worktree, diff --git a/routers/web/shared/packages/packages.go b/routers/web/shared/packages/packages.go index b9aa40bdd22bc..30c25374d1b45 100644 --- a/routers/web/shared/packages/packages.go +++ b/routers/web/shared/packages/packages.go @@ -13,9 +13,11 @@ import ( user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/base" "code.gitea.io/gitea/modules/context" + "code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/util" "code.gitea.io/gitea/modules/web" "code.gitea.io/gitea/services/forms" + cargo_service "code.gitea.io/gitea/services/packages/cargo" container_service "code.gitea.io/gitea/services/packages/container" ) @@ -223,3 +225,23 @@ func getCleanupRuleByContext(ctx *context.Context, owner *user_model.User) *pack return nil } + +func InitializeCargoIndex(ctx *context.Context, owner *user_model.User) { + err := cargo_service.InitializeIndexRepository(ctx, owner, owner) + if err != nil { + log.Error("InitializeIndexRepository failed: %v", err) + ctx.Flash.Error(ctx.Tr("packages.owner.settings.cargo.initialize.error", err)) + } else { + ctx.Flash.Success(ctx.Tr("packages.owner.settings.cargo.initialize.success")) + } +} + +func RebuildCargoIndex(ctx *context.Context, owner *user_model.User) { + err := cargo_service.RebuildIndex(ctx, owner, owner) + if err != nil { + log.Error("RebuildIndex failed: %v", err) + ctx.Flash.Error(ctx.Tr("packages.owner.settings.cargo.rebuild.error", err)) + } else { + ctx.Flash.Success(ctx.Tr("packages.owner.settings.cargo.rebuild.success")) + } +} diff --git a/routers/web/user/setting/keys.go b/routers/web/user/setting/keys.go index 0ecc39ecd17ed..6debf95bbce06 100644 --- a/routers/web/user/setting/keys.go +++ b/routers/web/user/setting/keys.go @@ -159,6 +159,8 @@ func KeysPost(ctx *context.Context) { ctx.Flash.Info(ctx.Tr("settings.ssh_disabled")) } else if asymkey_model.IsErrKeyUnableVerify(err) { ctx.Flash.Info(ctx.Tr("form.unable_verify_ssh_key")) + } else if err == asymkey_model.ErrKeyIsPrivate { + ctx.Flash.Error(ctx.Tr("form.must_use_public_key")) } else { ctx.Flash.Error(ctx.Tr("form.invalid_ssh_key", err.Error())) } diff --git a/routers/web/user/setting/packages.go b/routers/web/user/setting/packages.go index f6f7195adf1f2..b3f8a3e41dfb3 100644 --- a/routers/web/user/setting/packages.go +++ b/routers/web/user/setting/packages.go @@ -77,3 +77,21 @@ func PackagesRulePreview(ctx *context.Context) { ctx.HTML(http.StatusOK, tplSettingsPackagesRulePreview) } + +func InitializeCargoIndex(ctx *context.Context) { + ctx.Data["Title"] = ctx.Tr("packages.title") + ctx.Data["PageIsSettingsPackages"] = true + + shared.InitializeCargoIndex(ctx, ctx.Doer) + + ctx.Redirect(setting.AppSubURL + "/user/settings/packages") +} + +func RebuildCargoIndex(ctx *context.Context) { + ctx.Data["Title"] = ctx.Tr("packages.title") + ctx.Data["PageIsSettingsPackages"] = true + + shared.RebuildCargoIndex(ctx, ctx.Doer) + + ctx.Redirect(setting.AppSubURL + "/user/settings/packages") +} diff --git a/routers/web/web.go b/routers/web/web.go index b7128fc3a9fcc..a024c0ac37ebd 100644 --- a/routers/web/web.go +++ b/routers/web/web.go @@ -468,6 +468,10 @@ func RegisterRoutes(m *web.Route) { m.Get("/preview", user_setting.PackagesRulePreview) }) }) + m.Group("/cargo", func() { + m.Post("/initialize", user_setting.InitializeCargoIndex) + m.Post("/rebuild", user_setting.RebuildCargoIndex) + }) }, packagesEnabled) m.Group("/secrets", func() { m.Get("", user_setting.Secrets) @@ -818,6 +822,10 @@ func RegisterRoutes(m *web.Route) { m.Get("/preview", org.PackagesRulePreview) }) }) + m.Group("/cargo", func() { + m.Post("/initialize", org.InitializeCargoIndex) + m.Post("/rebuild", org.RebuildCargoIndex) + }) }, packagesEnabled) }, func(ctx *context.Context) { ctx.Data["EnableOAuth2"] = setting.OAuth2.Enable diff --git a/services/auth/source/ldap/source_search.go b/services/auth/source/ldap/source_search.go index 68ebba391769d..16f13029f9259 100644 --- a/services/auth/source/ldap/source_search.go +++ b/services/auth/source/ldap/source_search.go @@ -196,22 +196,39 @@ func checkRestricted(l *ldap.Conn, ls *Source, userDN string) bool { } // List all group memberships of a user -func (source *Source) listLdapGroupMemberships(l *ldap.Conn, uid string) []string { +func (source *Source) listLdapGroupMemberships(l *ldap.Conn, uid string, applyGroupFilter bool) []string { var ldapGroups []string - groupFilter := fmt.Sprintf("(%s=%s)", source.GroupMemberUID, ldap.EscapeFilter(uid)) + var searchFilter string + + groupFilter, ok := source.sanitizedGroupFilter(source.GroupFilter) + if !ok { + return ldapGroups + } + + groupDN, ok := source.sanitizedGroupDN(source.GroupDN) + if !ok { + return ldapGroups + } + + if applyGroupFilter { + searchFilter = fmt.Sprintf("(&(%s)(%s=%s))", groupFilter, source.GroupMemberUID, ldap.EscapeFilter(uid)) + } else { + searchFilter = fmt.Sprintf("(%s=%s)", source.GroupMemberUID, ldap.EscapeFilter(uid)) + } + result, err := l.Search(ldap.NewSearchRequest( - source.GroupDN, + groupDN, ldap.ScopeWholeSubtree, ldap.NeverDerefAliases, 0, 0, false, - groupFilter, + searchFilter, []string{}, nil, )) if err != nil { - log.Error("Failed group search using filter[%s]: %v", groupFilter, err) + log.Error("Failed group search in LDAP with filter [%s]: %v", searchFilter, err) return ldapGroups } @@ -238,9 +255,7 @@ func (source *Source) mapLdapGroupsToTeams() map[string]map[string][]string { } // getMappedMemberships : returns the organizations and teams to modify the users membership -func (source *Source) getMappedMemberships(l *ldap.Conn, uid string) (map[string][]string, map[string][]string) { - // get all LDAP group memberships for user - usersLdapGroups := source.listLdapGroupMemberships(l, uid) +func (source *Source) getMappedMemberships(usersLdapGroups []string, uid string) (map[string][]string, map[string][]string) { // unmarshall LDAP group team map from configs ldapGroupsToTeams := source.mapLdapGroupsToTeams() membershipsToAdd := map[string][]string{} @@ -260,6 +275,14 @@ func (source *Source) getMappedMemberships(l *ldap.Conn, uid string) (map[string return membershipsToAdd, membershipsToRemove } +func (source *Source) getUserAttributeListedInGroup(entry *ldap.Entry) string { + if strings.ToLower(source.UserUID) == "dn" { + return entry.DN + } + + return entry.GetAttributeValue(source.UserUID) +} + // SearchEntry : search an LDAP source if an entry (name, passwd) is valid and in the specific filter func (source *Source) SearchEntry(name, passwd string, directBind bool) *SearchResult { // See https://tools.ietf.org/search/rfc4513#section-5.1.2 @@ -375,58 +398,30 @@ func (source *Source) SearchEntry(name, passwd string, directBind bool) *SearchR firstname := sr.Entries[0].GetAttributeValue(source.AttributeName) surname := sr.Entries[0].GetAttributeValue(source.AttributeSurname) mail := sr.Entries[0].GetAttributeValue(source.AttributeMail) - uid := sr.Entries[0].GetAttributeValue(source.UserUID) - if source.UserUID == "dn" || source.UserUID == "DN" { - uid = sr.Entries[0].DN - } - // Check group membership - if source.GroupsEnabled && source.GroupFilter != "" { - groupFilter, ok := source.sanitizedGroupFilter(source.GroupFilter) - if !ok { - return nil - } - groupDN, ok := source.sanitizedGroupDN(source.GroupDN) - if !ok { - return nil - } + teamsToAdd := make(map[string][]string) + teamsToRemove := make(map[string][]string) - log.Trace("Fetching groups '%v' with filter '%s' and base '%s'", source.GroupMemberUID, groupFilter, groupDN) - groupSearch := ldap.NewSearchRequest( - groupDN, ldap.ScopeWholeSubtree, ldap.NeverDerefAliases, 0, 0, false, groupFilter, - []string{source.GroupMemberUID}, - nil) + // Check group membership + if source.GroupsEnabled { + userAttributeListedInGroup := source.getUserAttributeListedInGroup(sr.Entries[0]) + usersLdapGroups := source.listLdapGroupMemberships(l, userAttributeListedInGroup, true) - srg, err := l.Search(groupSearch) - if err != nil { - log.Error("LDAP group search failed: %v", err) - return nil - } else if len(srg.Entries) < 1 { - log.Error("LDAP group search failed: 0 entries") + if source.GroupFilter != "" && len(usersLdapGroups) == 0 { return nil } - isMember := false - Entries: - for _, group := range srg.Entries { - for _, member := range group.GetAttributeValues(source.GroupMemberUID) { - if (source.UserUID == "dn" && member == sr.Entries[0].DN) || member == uid { - isMember = true - break Entries - } - } - } - - if !isMember { - log.Error("LDAP group membership test failed") - return nil + if source.GroupTeamMap != "" || source.GroupTeamMapRemoval { + teamsToAdd, teamsToRemove = source.getMappedMemberships(usersLdapGroups, userAttributeListedInGroup) } } if isAttributeSSHPublicKeySet { sshPublicKey = sr.Entries[0].GetAttributeValues(source.AttributeSSHPublicKey) } + isAdmin := checkAdmin(l, source, userDN) + var isRestricted bool if !isAdmin { isRestricted = checkRestricted(l, source, userDN) @@ -436,12 +431,6 @@ func (source *Source) SearchEntry(name, passwd string, directBind bool) *SearchR Avatar = sr.Entries[0].GetRawAttributeValue(source.AttributeAvatar) } - teamsToAdd := make(map[string][]string) - teamsToRemove := make(map[string][]string) - if source.GroupsEnabled && (source.GroupTeamMap != "" || source.GroupTeamMapRemoval) { - teamsToAdd, teamsToRemove = source.getMappedMemberships(l, uid) - } - if !directBind && source.AttributesInBind { // binds user (checking password) after looking-up attributes in BindDN context err = bindUser(l, userDN, passwd) @@ -520,19 +509,29 @@ func (source *Source) SearchEntries() ([]*SearchResult, error) { return nil, err } - result := make([]*SearchResult, len(sr.Entries)) + result := make([]*SearchResult, 0, len(sr.Entries)) - for i, v := range sr.Entries { + for _, v := range sr.Entries { teamsToAdd := make(map[string][]string) teamsToRemove := make(map[string][]string) - if source.GroupsEnabled && (source.GroupTeamMap != "" || source.GroupTeamMapRemoval) { - userAttributeListedInGroup := v.GetAttributeValue(source.UserUID) - if source.UserUID == "dn" || source.UserUID == "DN" { - userAttributeListedInGroup = v.DN + + if source.GroupsEnabled { + userAttributeListedInGroup := source.getUserAttributeListedInGroup(v) + + if source.GroupFilter != "" { + usersLdapGroups := source.listLdapGroupMemberships(l, userAttributeListedInGroup, true) + if len(usersLdapGroups) == 0 { + continue + } + } + + if source.GroupTeamMap != "" || source.GroupTeamMapRemoval { + usersLdapGroups := source.listLdapGroupMemberships(l, userAttributeListedInGroup, false) + teamsToAdd, teamsToRemove = source.getMappedMemberships(usersLdapGroups, userAttributeListedInGroup) } - teamsToAdd, teamsToRemove = source.getMappedMemberships(l, userAttributeListedInGroup) } - result[i] = &SearchResult{ + + user := &SearchResult{ Username: v.GetAttributeValue(source.AttributeUsername), Name: v.GetAttributeValue(source.AttributeName), Surname: v.GetAttributeValue(source.AttributeSurname), @@ -541,16 +540,22 @@ func (source *Source) SearchEntries() ([]*SearchResult, error) { LdapTeamAdd: teamsToAdd, LdapTeamRemove: teamsToRemove, } - if !result[i].IsAdmin { - result[i].IsRestricted = checkRestricted(l, source, v.DN) + + if !user.IsAdmin { + user.IsRestricted = checkRestricted(l, source, v.DN) } + if isAttributeSSHPublicKeySet { - result[i].SSHPublicKey = v.GetAttributeValues(source.AttributeSSHPublicKey) + user.SSHPublicKey = v.GetAttributeValues(source.AttributeSSHPublicKey) } + if isAtributeAvatarSet { - result[i].Avatar = v.GetRawAttributeValue(source.AttributeAvatar) + user.Avatar = v.GetRawAttributeValue(source.AttributeAvatar) } - result[i].LowerName = strings.ToLower(result[i].Username) + + user.LowerName = strings.ToLower(user.Username) + + result = append(result, user) } return result, nil diff --git a/services/automerge/automerge.go b/services/automerge/automerge.go index 15d94e79209eb..74cfb8da8fdf4 100644 --- a/services/automerge/automerge.go +++ b/services/automerge/automerge.go @@ -164,7 +164,7 @@ func getPullRequestsByHeadSHA(ctx context.Context, sha string, repo *repo_model. func handlePull(pullID int64, sha string) { ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), - fmt.Sprintf("Handle AutoMerge of pull[%d] with sha[%s]", pullID, sha)) + fmt.Sprintf("Handle AutoMerge of PR[%d] with sha[%s]", pullID, sha)) defer finished() pr, err := issues_model.GetPullRequestByID(ctx, pullID) @@ -176,7 +176,7 @@ func handlePull(pullID int64, sha string) { // Check if there is a scheduled pr in the db exists, scheduledPRM, err := pull_model.GetScheduledMergeByPullID(ctx, pr.ID) if err != nil { - log.Error("pull[%d] GetScheduledMergeByPullID: %v", pr.ID, err) + log.Error("%-v GetScheduledMergeByPullID: %v", pr, err) return } if !exists { @@ -188,13 +188,13 @@ func handlePull(pullID int64, sha string) { // did not succeed or was not finished yet. if err = pr.LoadHeadRepo(ctx); err != nil { - log.Error("pull[%d] LoadHeadRepo: %v", pr.ID, err) + log.Error("%-v LoadHeadRepo: %v", pr, err) return } headGitRepo, err := git.OpenRepository(ctx, pr.HeadRepo.RepoPath()) if err != nil { - log.Error("OpenRepository: %v", err) + log.Error("OpenRepository %-v: %v", pr.HeadRepo, err) return } defer headGitRepo.Close() @@ -202,40 +202,40 @@ func handlePull(pullID int64, sha string) { headBranchExist := headGitRepo.IsBranchExist(pr.HeadBranch) if pr.HeadRepo == nil || !headBranchExist { - log.Warn("Head branch of auto merge pr does not exist [HeadRepoID: %d, Branch: %s, PR ID: %d]", pr.HeadRepoID, pr.HeadBranch, pr.ID) + log.Warn("Head branch of auto merge %-v does not exist [HeadRepoID: %d, Branch: %s]", pr, pr.HeadRepoID, pr.HeadBranch) return } // Check if all checks succeeded pass, err := pull_service.IsPullCommitStatusPass(ctx, pr) if err != nil { - log.Error("IsPullCommitStatusPass: %v", err) + log.Error("%-v IsPullCommitStatusPass: %v", pr, err) return } if !pass { - log.Info("Scheduled auto merge pr has unsuccessful status checks [PullID: %d]", pr.ID) + log.Info("Scheduled auto merge %-v has unsuccessful status checks", pr) return } // Merge if all checks succeeded doer, err := user_model.GetUserByID(ctx, scheduledPRM.DoerID) if err != nil { - log.Error("GetUserByIDCtx: %v", err) + log.Error("Unable to get scheduled User[%d]: %v", scheduledPRM.DoerID, err) return } perm, err := access_model.GetUserRepoPermission(ctx, pr.HeadRepo, doer) if err != nil { - log.Error("GetUserRepoPermission: %v", err) + log.Error("GetUserRepoPermission %-v: %v", pr.HeadRepo, err) return } if err := pull_service.CheckPullMergable(ctx, doer, &perm, pr, false, false); err != nil { if errors.Is(pull_service.ErrUserNotAllowedToMerge, err) { - log.Info("PR %d was scheduled to automerge by an unauthorized user", pr.ID) + log.Info("%-v was scheduled to automerge by an unauthorized user", pr) return } - log.Error("pull[%d] CheckPullMergable: %v", pr.ID, err) + log.Error("%-v CheckPullMergable: %v", pr, err) return } @@ -244,13 +244,13 @@ func handlePull(pullID int64, sha string) { baseGitRepo = headGitRepo } else { if err = pr.LoadBaseRepo(ctx); err != nil { - log.Error("LoadBaseRepo: %v", err) + log.Error("%-v LoadBaseRepo: %v", pr, err) return } baseGitRepo, err = git.OpenRepository(ctx, pr.BaseRepo.RepoPath()) if err != nil { - log.Error("OpenRepository: %v", err) + log.Error("OpenRepository %-v: %v", pr.BaseRepo, err) return } defer baseGitRepo.Close() diff --git a/services/cron/tasks_basic.go b/services/cron/tasks_basic.go index 05aef6623d4f7..2e6560ec0c9dd 100644 --- a/services/cron/tasks_basic.go +++ b/services/cron/tasks_basic.go @@ -16,7 +16,7 @@ import ( "code.gitea.io/gitea/services/auth" "code.gitea.io/gitea/services/migrations" mirror_service "code.gitea.io/gitea/services/mirror" - packages_service "code.gitea.io/gitea/services/packages" + packages_cleanup_service "code.gitea.io/gitea/services/packages/cleanup" repo_service "code.gitea.io/gitea/services/repository" archiver_service "code.gitea.io/gitea/services/repository/archiver" ) @@ -59,11 +59,7 @@ func registerRepoHealthCheck() { }, func(ctx context.Context, _ *user_model.User, config Config) error { rhcConfig := config.(*RepoHealthCheckConfig) // the git args are set by config, they can be safe to be trusted - args := make([]git.CmdArg, 0, len(rhcConfig.Args)) - for _, arg := range rhcConfig.Args { - args = append(args, git.CmdArg(arg)) - } - return repo_service.GitFsckRepos(ctx, rhcConfig.Timeout, args) + return repo_service.GitFsckRepos(ctx, rhcConfig.Timeout, git.ToTrustedCmdArgs(rhcConfig.Args)) }) } @@ -156,7 +152,7 @@ func registerCleanupPackages() { OlderThan: 24 * time.Hour, }, func(ctx context.Context, _ *user_model.User, config Config) error { realConfig := config.(*OlderThanConfig) - return packages_service.Cleanup(ctx, realConfig.OlderThan) + return packages_cleanup_service.Cleanup(ctx, realConfig.OlderThan) }) } diff --git a/services/cron/tasks_extended.go b/services/cron/tasks_extended.go index 520d940edf3c5..3e0dbd132eb31 100644 --- a/services/cron/tasks_extended.go +++ b/services/cron/tasks_extended.go @@ -61,11 +61,7 @@ func registerGarbageCollectRepositories() { }, func(ctx context.Context, _ *user_model.User, config Config) error { rhcConfig := config.(*RepoHealthCheckConfig) // the git args are set by config, they can be safe to be trusted - args := make([]git.CmdArg, 0, len(rhcConfig.Args)) - for _, arg := range rhcConfig.Args { - args = append(args, git.CmdArg(arg)) - } - return repo_service.GitGcRepos(ctx, rhcConfig.Timeout, args...) + return repo_service.GitGcRepos(ctx, rhcConfig.Timeout, git.ToTrustedCmdArgs(rhcConfig.Args)) }) } diff --git a/services/forms/package_form.go b/services/forms/package_form.go index e78e64ef7ed18..558ed54b6559e 100644 --- a/services/forms/package_form.go +++ b/services/forms/package_form.go @@ -15,7 +15,7 @@ import ( type PackageCleanupRuleForm struct { ID int64 Enabled bool - Type string `binding:"Required;In(composer,conan,conda,container,generic,helm,maven,npm,nuget,pub,pypi,rubygems,vagrant)"` + Type string `binding:"Required;In(cargo,composer,conan,conda,container,generic,helm,maven,npm,nuget,pub,pypi,rubygems,vagrant)"` KeepCount int `binding:"In(0,1,5,10,25,50,100)"` KeepPattern string `binding:"RegexPattern"` RemoveDays int `binding:"In(0,7,14,30,60,90,180)"` diff --git a/services/gitdiff/gitdiff.go b/services/gitdiff/gitdiff.go index d3ee93ec945b7..4a74c1a8944e5 100644 --- a/services/gitdiff/gitdiff.go +++ b/services/gitdiff/gitdiff.go @@ -1056,7 +1056,7 @@ type DiffOptions struct { MaxLines int MaxLineCharacters int MaxFiles int - WhitespaceBehavior git.CmdArg + WhitespaceBehavior git.TrustedCmdArgs DirectComparison bool } @@ -1071,38 +1071,22 @@ func GetDiff(gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff return nil, err } - argsLength := 6 - if len(opts.WhitespaceBehavior) > 0 { - argsLength++ - } - if len(opts.SkipTo) > 0 { - argsLength++ - } - if len(files) > 0 { - argsLength += len(files) + 1 - } - - diffArgs := make([]git.CmdArg, 0, argsLength) + cmdDiff := git.NewCommand(gitRepo.Ctx) if (len(opts.BeforeCommitID) == 0 || opts.BeforeCommitID == git.EmptySHA) && commit.ParentCount() == 0 { - diffArgs = append(diffArgs, "diff", "--src-prefix=\\a/", "--dst-prefix=\\b/", "-M") - if len(opts.WhitespaceBehavior) != 0 { - diffArgs = append(diffArgs, opts.WhitespaceBehavior) - } - // append empty tree ref - diffArgs = append(diffArgs, "4b825dc642cb6eb9a060e54bf8d69288fbee4904") - diffArgs = append(diffArgs, git.CmdArgCheck(opts.AfterCommitID)) + cmdDiff.AddArguments("diff", "--src-prefix=\\a/", "--dst-prefix=\\b/", "-M"). + AddArguments(opts.WhitespaceBehavior...). + AddArguments("4b825dc642cb6eb9a060e54bf8d69288fbee4904"). // append empty tree ref + AddDynamicArguments(opts.AfterCommitID) } else { actualBeforeCommitID := opts.BeforeCommitID if len(actualBeforeCommitID) == 0 { parentCommit, _ := commit.Parent(0) actualBeforeCommitID = parentCommit.ID.String() } - diffArgs = append(diffArgs, "diff", "--src-prefix=\\a/", "--dst-prefix=\\b/", "-M") - if len(opts.WhitespaceBehavior) != 0 { - diffArgs = append(diffArgs, opts.WhitespaceBehavior) - } - diffArgs = append(diffArgs, git.CmdArgCheck(actualBeforeCommitID)) - diffArgs = append(diffArgs, git.CmdArgCheck(opts.AfterCommitID)) + + cmdDiff.AddArguments("diff", "--src-prefix=\\a/", "--dst-prefix=\\b/", "-M"). + AddArguments(opts.WhitespaceBehavior...). + AddDynamicArguments(actualBeforeCommitID, opts.AfterCommitID) opts.BeforeCommitID = actualBeforeCommitID } @@ -1111,16 +1095,11 @@ func GetDiff(gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff // the skipping for us parsePatchSkipToFile := opts.SkipTo if opts.SkipTo != "" && git.CheckGitVersionAtLeast("2.31") == nil { - diffArgs = append(diffArgs, git.CmdArg("--skip-to="+opts.SkipTo)) + cmdDiff.AddOptionFormat("--skip-to=%s", opts.SkipTo) parsePatchSkipToFile = "" } - if len(files) > 0 { - diffArgs = append(diffArgs, "--") - for _, file := range files { - diffArgs = append(diffArgs, git.CmdArg(file)) // it's safe to cast it to CmdArg because there is a "--" before - } - } + cmdDiff.AddDashesAndList(files...) reader, writer := io.Pipe() defer func() { @@ -1128,10 +1107,9 @@ func GetDiff(gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff _ = writer.Close() }() - go func(ctx context.Context, diffArgs []git.CmdArg, repoPath string, writer *io.PipeWriter) { - cmd := git.NewCommand(ctx, diffArgs...) - cmd.SetDescription(fmt.Sprintf("GetDiffRange [repo_path: %s]", repoPath)) - if err := cmd.Run(&git.RunOpts{ + go func() { + cmdDiff.SetDescription(fmt.Sprintf("GetDiffRange [repo_path: %s]", repoPath)) + if err := cmdDiff.Run(&git.RunOpts{ Timeout: time.Duration(setting.Git.Timeout.Default) * time.Second, Dir: repoPath, Stderr: os.Stderr, @@ -1141,7 +1119,7 @@ func GetDiff(gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff } _ = writer.Close() - }(gitRepo.Ctx, diffArgs, repoPath, writer) + }() diff, err := ParsePatch(opts.MaxLines, opts.MaxLineCharacters, opts.MaxFiles, reader, parsePatchSkipToFile) if err != nil { @@ -1201,16 +1179,16 @@ func GetDiff(gitRepo *git.Repository, opts *DiffOptions, files ...string) (*Diff separator = ".." } - shortstatArgs := []git.CmdArg{git.CmdArgCheck(opts.BeforeCommitID + separator + opts.AfterCommitID)} + diffPaths := []string{opts.BeforeCommitID + separator + opts.AfterCommitID} if len(opts.BeforeCommitID) == 0 || opts.BeforeCommitID == git.EmptySHA { - shortstatArgs = []git.CmdArg{git.EmptyTreeSHA, git.CmdArgCheck(opts.AfterCommitID)} + diffPaths = []string{git.EmptyTreeSHA, opts.AfterCommitID} } - diff.NumFiles, diff.TotalAddition, diff.TotalDeletion, err = git.GetDiffShortStat(gitRepo.Ctx, repoPath, shortstatArgs...) + diff.NumFiles, diff.TotalAddition, diff.TotalDeletion, err = git.GetDiffShortStat(gitRepo.Ctx, repoPath, nil, diffPaths...) if err != nil && strings.Contains(err.Error(), "no merge base") { // git >= 2.28 now returns an error if base and head have become unrelated. // previously it would return the results of git diff --shortstat base head so let's try that... - shortstatArgs = []git.CmdArg{git.CmdArgCheck(opts.BeforeCommitID), git.CmdArgCheck(opts.AfterCommitID)} - diff.NumFiles, diff.TotalAddition, diff.TotalDeletion, err = git.GetDiffShortStat(gitRepo.Ctx, repoPath, shortstatArgs...) + diffPaths = []string{opts.BeforeCommitID, opts.AfterCommitID} + diff.NumFiles, diff.TotalAddition, diff.TotalDeletion, err = git.GetDiffShortStat(gitRepo.Ctx, repoPath, nil, diffPaths...) } if err != nil { return nil, err @@ -1324,17 +1302,17 @@ func CommentMustAsDiff(c *issues_model.Comment) *Diff { } // GetWhitespaceFlag returns git diff flag for treating whitespaces -func GetWhitespaceFlag(whitespaceBehavior string) git.CmdArg { - whitespaceFlags := map[string]string{ - "ignore-all": "-w", - "ignore-change": "-b", - "ignore-eol": "--ignore-space-at-eol", - "show-all": "", +func GetWhitespaceFlag(whitespaceBehavior string) git.TrustedCmdArgs { + whitespaceFlags := map[string]git.TrustedCmdArgs{ + "ignore-all": {"-w"}, + "ignore-change": {"-b"}, + "ignore-eol": {"--ignore-space-at-eol"}, + "show-all": nil, } if flag, ok := whitespaceFlags[whitespaceBehavior]; ok { - return git.CmdArg(flag) + return flag } log.Warn("unknown whitespace behavior: %q, default to 'show-all'", whitespaceBehavior) - return "" + return nil } diff --git a/services/gitdiff/gitdiff_test.go b/services/gitdiff/gitdiff_test.go index 267f0e4cff6ad..eb9ed862e82f4 100644 --- a/services/gitdiff/gitdiff_test.go +++ b/services/gitdiff/gitdiff_test.go @@ -626,7 +626,7 @@ func TestGetDiffRangeWithWhitespaceBehavior(t *testing.T) { return } defer gitRepo.Close() - for _, behavior := range []git.CmdArg{"-w", "--ignore-space-at-eol", "-b", ""} { + for _, behavior := range []git.TrustedCmdArgs{{"-w"}, {"--ignore-space-at-eol"}, {"-b"}, nil} { diffs, err := GetDiff(gitRepo, &DiffOptions{ AfterCommitID: "bd7063cc7c04689c4d082183d32a604ed27a24f9", diff --git a/services/mirror/mirror_pull.go b/services/mirror/mirror_pull.go index 98e8d122a5dfa..7dee90352ea22 100644 --- a/services/mirror/mirror_pull.go +++ b/services/mirror/mirror_pull.go @@ -203,11 +203,11 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo log.Trace("SyncMirrors [repo: %-v]: running git remote update...", m.Repo) - gitArgs := []git.CmdArg{"remote", "update"} + cmd := git.NewCommand(ctx, "remote", "update") if m.EnablePrune { - gitArgs = append(gitArgs, "--prune") + cmd.AddArguments("--prune") } - gitArgs = append(gitArgs, git.CmdArgCheck(m.GetRemoteName())) + cmd.AddDynamicArguments(m.GetRemoteName()) remoteURL, remoteErr := git.GetRemoteURL(ctx, repoPath, m.GetRemoteName()) if remoteErr != nil { @@ -217,7 +217,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo stdoutBuilder := strings.Builder{} stderrBuilder := strings.Builder{} - if err := git.NewCommand(ctx, gitArgs...). + if err := cmd. SetDescription(fmt.Sprintf("Mirror.runSync: %s", m.Repo.FullName())). Run(&git.RunOpts{ Timeout: timeout, @@ -243,7 +243,7 @@ func runSync(ctx context.Context, m *repo_model.Mirror) ([]*mirrorSyncResult, bo // Successful prune - reattempt mirror stderrBuilder.Reset() stdoutBuilder.Reset() - if err = git.NewCommand(ctx, gitArgs...). + if err = cmd. SetDescription(fmt.Sprintf("Mirror.runSync: %s", m.Repo.FullName())). Run(&git.RunOpts{ Timeout: timeout, diff --git a/services/mirror/mirror_push.go b/services/mirror/mirror_push.go index c0c68a3f54161..2c1b00b60c294 100644 --- a/services/mirror/mirror_push.go +++ b/services/mirror/mirror_push.go @@ -37,10 +37,10 @@ func AddPushMirrorRemote(ctx context.Context, m *repo_model.PushMirror, addr str if _, _, err := cmd.RunStdString(&git.RunOpts{Dir: path}); err != nil { return err } - if _, _, err := git.NewCommand(ctx, "config", "--add", git.CmdArg("remote."+m.RemoteName+".push"), "+refs/heads/*:refs/heads/*").RunStdString(&git.RunOpts{Dir: path}); err != nil { + if _, _, err := git.NewCommand(ctx, "config", "--add").AddDynamicArguments("remote."+m.RemoteName+".push", "+refs/heads/*:refs/heads/*").RunStdString(&git.RunOpts{Dir: path}); err != nil { return err } - if _, _, err := git.NewCommand(ctx, "config", "--add", git.CmdArg("remote."+m.RemoteName+".push"), "+refs/tags/*:refs/tags/*").RunStdString(&git.RunOpts{Dir: path}); err != nil { + if _, _, err := git.NewCommand(ctx, "config", "--add").AddDynamicArguments("remote."+m.RemoteName+".push", "+refs/tags/*:refs/tags/*").RunStdString(&git.RunOpts{Dir: path}); err != nil { return err } return nil diff --git a/services/packages/cargo/index.go b/services/packages/cargo/index.go new file mode 100644 index 0000000000000..e58a47281628b --- /dev/null +++ b/services/packages/cargo/index.go @@ -0,0 +1,290 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package cargo + +import ( + "bytes" + "context" + "errors" + "fmt" + "io" + "path" + "strconv" + "time" + + packages_model "code.gitea.io/gitea/models/packages" + repo_model "code.gitea.io/gitea/models/repo" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/json" + cargo_module "code.gitea.io/gitea/modules/packages/cargo" + repo_module "code.gitea.io/gitea/modules/repository" + "code.gitea.io/gitea/modules/setting" + "code.gitea.io/gitea/modules/util" + files_service "code.gitea.io/gitea/services/repository/files" +) + +const ( + IndexRepositoryName = "_cargo-index" + ConfigFileName = "config.json" +) + +// https://doc.rust-lang.org/cargo/reference/registries.html#index-format + +func BuildPackagePath(name string) string { + switch len(name) { + case 0: + panic("Cargo package name can not be empty") + case 1: + return path.Join("1", name) + case 2: + return path.Join("2", name) + case 3: + return path.Join("3", string(name[0]), name) + default: + return path.Join(name[0:2], name[2:4], name) + } +} + +func InitializeIndexRepository(ctx context.Context, doer, owner *user_model.User) error { + repo, err := getOrCreateIndexRepository(ctx, doer, owner) + if err != nil { + return err + } + + if err := createOrUpdateConfigFile(ctx, repo, doer, owner); err != nil { + return fmt.Errorf("createOrUpdateConfigFile: %w", err) + } + + return nil +} + +func RebuildIndex(ctx context.Context, doer, owner *user_model.User) error { + repo, err := getOrCreateIndexRepository(ctx, doer, owner) + if err != nil { + return err + } + + ps, err := packages_model.GetPackagesByType(ctx, owner.ID, packages_model.TypeCargo) + if err != nil { + return fmt.Errorf("GetPackagesByType: %w", err) + } + + return alterRepositoryContent( + ctx, + doer, + repo, + "Rebuild Cargo Index", + func(t *files_service.TemporaryUploadRepository) error { + // Remove all existing content but the Cargo config + files, err := t.LsFiles() + if err != nil { + return err + } + for i, file := range files { + if file == ConfigFileName { + files[i] = files[len(files)-1] + files = files[:len(files)-1] + break + } + } + if err := t.RemoveFilesFromIndex(files...); err != nil { + return err + } + + // Add all packages + for _, p := range ps { + if err := addOrUpdatePackageIndex(ctx, t, p); err != nil { + return err + } + } + + return nil + }, + ) +} + +func AddOrUpdatePackageIndex(ctx context.Context, doer, owner *user_model.User, packageID int64) error { + repo, err := getOrCreateIndexRepository(ctx, doer, owner) + if err != nil { + return err + } + + p, err := packages_model.GetPackageByID(ctx, packageID) + if err != nil { + return fmt.Errorf("GetPackageByID[%d]: %w", packageID, err) + } + + return alterRepositoryContent( + ctx, + doer, + repo, + "Update "+p.Name, + func(t *files_service.TemporaryUploadRepository) error { + return addOrUpdatePackageIndex(ctx, t, p) + }, + ) +} + +type IndexVersionEntry struct { + Name string `json:"name"` + Version string `json:"vers"` + Dependencies []*cargo_module.Dependency `json:"deps"` + FileChecksum string `json:"cksum"` + Features map[string][]string `json:"features"` + Yanked bool `json:"yanked"` + Links string `json:"links,omitempty"` +} + +func addOrUpdatePackageIndex(ctx context.Context, t *files_service.TemporaryUploadRepository, p *packages_model.Package) error { + pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ + PackageID: p.ID, + Sort: packages_model.SortVersionAsc, + }) + if err != nil { + return fmt.Errorf("SearchVersions[%s]: %w", p.Name, err) + } + if len(pvs) == 0 { + return nil + } + + pds, err := packages_model.GetPackageDescriptors(ctx, pvs) + if err != nil { + return fmt.Errorf("GetPackageDescriptors[%s]: %w", p.Name, err) + } + + var b bytes.Buffer + for _, pd := range pds { + metadata := pd.Metadata.(*cargo_module.Metadata) + + dependencies := metadata.Dependencies + if dependencies == nil { + dependencies = make([]*cargo_module.Dependency, 0) + } + + features := metadata.Features + if features == nil { + features = make(map[string][]string) + } + + yanked, _ := strconv.ParseBool(pd.VersionProperties.GetByName(cargo_module.PropertyYanked)) + entry, err := json.Marshal(&IndexVersionEntry{ + Name: pd.Package.Name, + Version: pd.Version.Version, + Dependencies: dependencies, + FileChecksum: pd.Files[0].Blob.HashSHA256, + Features: features, + Yanked: yanked, + Links: metadata.Links, + }) + if err != nil { + return err + } + + b.Write(entry) + b.WriteString("\n") + } + + return writeObjectToIndex(t, BuildPackagePath(pds[0].Package.LowerName), &b) +} + +func getOrCreateIndexRepository(ctx context.Context, doer, owner *user_model.User) (*repo_model.Repository, error) { + repo, err := repo_model.GetRepositoryByOwnerAndName(ctx, owner.Name, IndexRepositoryName) + if err != nil { + if errors.Is(err, util.ErrNotExist) { + repo, err = repo_module.CreateRepository(doer, owner, repo_module.CreateRepoOptions{ + Name: IndexRepositoryName, + }) + if err != nil { + return nil, fmt.Errorf("CreateRepository: %w", err) + } + } else { + return nil, fmt.Errorf("GetRepositoryByOwnerAndName: %w", err) + } + } + + return repo, nil +} + +type Config struct { + DownloadURL string `json:"dl"` + APIURL string `json:"api"` +} + +func createOrUpdateConfigFile(ctx context.Context, repo *repo_model.Repository, doer, owner *user_model.User) error { + return alterRepositoryContent( + ctx, + doer, + repo, + "Initialize Cargo Config", + func(t *files_service.TemporaryUploadRepository) error { + var b bytes.Buffer + err := json.NewEncoder(&b).Encode(Config{ + DownloadURL: setting.AppURL + "api/packages/" + owner.Name + "/cargo/api/v1/crates", + APIURL: setting.AppURL + "api/packages/" + owner.Name + "/cargo", + }) + if err != nil { + return err + } + + return writeObjectToIndex(t, ConfigFileName, &b) + }, + ) +} + +// This is a shorter version of CreateOrUpdateRepoFile which allows to perform multiple actions on a git repository +func alterRepositoryContent(ctx context.Context, doer *user_model.User, repo *repo_model.Repository, commitMessage string, fn func(*files_service.TemporaryUploadRepository) error) error { + t, err := files_service.NewTemporaryUploadRepository(ctx, repo) + if err != nil { + return err + } + defer t.Close() + + var lastCommitID string + if err := t.Clone(repo.DefaultBranch); err != nil { + if !git.IsErrBranchNotExist(err) || !repo.IsEmpty { + return err + } + if err := t.Init(); err != nil { + return err + } + } else { + if err := t.SetDefaultIndex(); err != nil { + return err + } + + commit, err := t.GetBranchCommit(repo.DefaultBranch) + if err != nil { + return err + } + + lastCommitID = commit.ID.String() + } + + if err := fn(t); err != nil { + return err + } + + treeHash, err := t.WriteTree() + if err != nil { + return err + } + + now := time.Now() + commitHash, err := t.CommitTreeWithDate(lastCommitID, doer, doer, treeHash, commitMessage, false, now, now) + if err != nil { + return err + } + + return t.Push(doer, commitHash, repo.DefaultBranch) +} + +func writeObjectToIndex(t *files_service.TemporaryUploadRepository, path string, r io.Reader) error { + hash, err := t.HashObject(r) + if err != nil { + return err + } + + return t.AddObjectToIndex("100644", hash, path) +} diff --git a/services/packages/cleanup/cleanup.go b/services/packages/cleanup/cleanup.go new file mode 100644 index 0000000000000..2d62a028a4c6d --- /dev/null +++ b/services/packages/cleanup/cleanup.go @@ -0,0 +1,154 @@ +// Copyright 2022 The Gitea Authors. All rights reserved. +// SPDX-License-Identifier: MIT + +package container + +import ( + "context" + "fmt" + "time" + + "code.gitea.io/gitea/models/db" + packages_model "code.gitea.io/gitea/models/packages" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/log" + packages_module "code.gitea.io/gitea/modules/packages" + "code.gitea.io/gitea/modules/util" + packages_service "code.gitea.io/gitea/services/packages" + cargo_service "code.gitea.io/gitea/services/packages/cargo" + container_service "code.gitea.io/gitea/services/packages/container" +) + +// Cleanup removes expired package data +func Cleanup(taskCtx context.Context, olderThan time.Duration) error { + ctx, committer, err := db.TxContext(taskCtx) + if err != nil { + return err + } + defer committer.Close() + + err = packages_model.IterateEnabledCleanupRules(ctx, func(ctx context.Context, pcr *packages_model.PackageCleanupRule) error { + select { + case <-taskCtx.Done(): + return db.ErrCancelledf("While processing package cleanup rules") + default: + } + + if err := pcr.CompiledPattern(); err != nil { + return fmt.Errorf("CleanupRule [%d]: CompilePattern failed: %w", pcr.ID, err) + } + + olderThan := time.Now().AddDate(0, 0, -pcr.RemoveDays) + + packages, err := packages_model.GetPackagesByType(ctx, pcr.OwnerID, pcr.Type) + if err != nil { + return fmt.Errorf("CleanupRule [%d]: GetPackagesByType failed: %w", pcr.ID, err) + } + + for _, p := range packages { + pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ + PackageID: p.ID, + IsInternal: util.OptionalBoolFalse, + Sort: packages_model.SortCreatedDesc, + Paginator: db.NewAbsoluteListOptions(pcr.KeepCount, 200), + }) + if err != nil { + return fmt.Errorf("CleanupRule [%d]: SearchVersions failed: %w", pcr.ID, err) + } + versionDeleted := false + for _, pv := range pvs { + if pcr.Type == packages_model.TypeContainer { + if skip, err := container_service.ShouldBeSkipped(ctx, pcr, p, pv); err != nil { + return fmt.Errorf("CleanupRule [%d]: container.ShouldBeSkipped failed: %w", pcr.ID, err) + } else if skip { + log.Debug("Rule[%d]: keep '%s/%s' (container)", pcr.ID, p.Name, pv.Version) + continue + } + } + + toMatch := pv.LowerVersion + if pcr.MatchFullName { + toMatch = p.LowerName + "/" + pv.LowerVersion + } + + if pcr.KeepPatternMatcher != nil && pcr.KeepPatternMatcher.MatchString(toMatch) { + log.Debug("Rule[%d]: keep '%s/%s' (keep pattern)", pcr.ID, p.Name, pv.Version) + continue + } + if pv.CreatedUnix.AsLocalTime().After(olderThan) { + log.Debug("Rule[%d]: keep '%s/%s' (remove days)", pcr.ID, p.Name, pv.Version) + continue + } + if pcr.RemovePatternMatcher != nil && !pcr.RemovePatternMatcher.MatchString(toMatch) { + log.Debug("Rule[%d]: keep '%s/%s' (remove pattern)", pcr.ID, p.Name, pv.Version) + continue + } + + log.Debug("Rule[%d]: remove '%s/%s'", pcr.ID, p.Name, pv.Version) + + if err := packages_service.DeletePackageVersionAndReferences(ctx, pv); err != nil { + return fmt.Errorf("CleanupRule [%d]: DeletePackageVersionAndReferences failed: %w", pcr.ID, err) + } + + versionDeleted = true + } + + if versionDeleted { + if pcr.Type == packages_model.TypeCargo { + owner, err := user_model.GetUserByID(ctx, pcr.OwnerID) + if err != nil { + return fmt.Errorf("GetUserByID failed: %w", err) + } + if err := cargo_service.AddOrUpdatePackageIndex(ctx, owner, owner, p.ID); err != nil { + return fmt.Errorf("CleanupRule [%d]: cargo.AddOrUpdatePackageIndex failed: %w", pcr.ID, err) + } + } + } + } + return nil + }) + if err != nil { + return err + } + + if err := container_service.Cleanup(ctx, olderThan); err != nil { + return err + } + + ps, err := packages_model.FindUnreferencedPackages(ctx) + if err != nil { + return err + } + for _, p := range ps { + if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypePackage, p.ID); err != nil { + return err + } + if err := packages_model.DeletePackageByID(ctx, p.ID); err != nil { + return err + } + } + + pbs, err := packages_model.FindExpiredUnreferencedBlobs(ctx, olderThan) + if err != nil { + return err + } + + for _, pb := range pbs { + if err := packages_model.DeleteBlobByID(ctx, pb.ID); err != nil { + return err + } + } + + if err := committer.Commit(); err != nil { + return err + } + + contentStore := packages_module.NewContentStore() + for _, pb := range pbs { + if err := contentStore.Delete(packages_module.BlobHash256Key(pb.HashSHA256)); err != nil { + log.Error("Error deleting package blob [%v]: %v", pb.ID, err) + } + } + + return nil +} diff --git a/services/packages/packages.go b/services/packages/packages.go index 9e52cb1450962..f5028407552c0 100644 --- a/services/packages/packages.go +++ b/services/packages/packages.go @@ -10,7 +10,6 @@ import ( "fmt" "io" "strings" - "time" "code.gitea.io/gitea/models/db" packages_model "code.gitea.io/gitea/models/packages" @@ -22,7 +21,6 @@ import ( packages_module "code.gitea.io/gitea/modules/packages" "code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/util" - container_service "code.gitea.io/gitea/services/packages/container" ) var ( @@ -335,6 +333,8 @@ func CheckSizeQuotaExceeded(ctx context.Context, doer, owner *user_model.User, p var typeSpecificSize int64 switch packageType { + case packages_model.TypeCargo: + typeSpecificSize = setting.Packages.LimitSizeCargo case packages_model.TypeComposer: typeSpecificSize = setting.Packages.LimitSizeComposer case packages_model.TypeConan: @@ -448,123 +448,6 @@ func DeletePackageFile(ctx context.Context, pf *packages_model.PackageFile) erro return packages_model.DeleteFileByID(ctx, pf.ID) } -// Cleanup removes expired package data -func Cleanup(taskCtx context.Context, olderThan time.Duration) error { - ctx, committer, err := db.TxContext(taskCtx) - if err != nil { - return err - } - defer committer.Close() - - err = packages_model.IterateEnabledCleanupRules(ctx, func(ctx context.Context, pcr *packages_model.PackageCleanupRule) error { - select { - case <-taskCtx.Done(): - return db.ErrCancelledf("While processing package cleanup rules") - default: - } - - if err := pcr.CompiledPattern(); err != nil { - return fmt.Errorf("CleanupRule [%d]: CompilePattern failed: %w", pcr.ID, err) - } - - olderThan := time.Now().AddDate(0, 0, -pcr.RemoveDays) - - packages, err := packages_model.GetPackagesByType(ctx, pcr.OwnerID, pcr.Type) - if err != nil { - return fmt.Errorf("CleanupRule [%d]: GetPackagesByType failed: %w", pcr.ID, err) - } - - for _, p := range packages { - pvs, _, err := packages_model.SearchVersions(ctx, &packages_model.PackageSearchOptions{ - PackageID: p.ID, - IsInternal: util.OptionalBoolFalse, - Sort: packages_model.SortCreatedDesc, - Paginator: db.NewAbsoluteListOptions(pcr.KeepCount, 200), - }) - if err != nil { - return fmt.Errorf("CleanupRule [%d]: SearchVersions failed: %w", pcr.ID, err) - } - for _, pv := range pvs { - if skip, err := container_service.ShouldBeSkipped(ctx, pcr, p, pv); err != nil { - return fmt.Errorf("CleanupRule [%d]: container.ShouldBeSkipped failed: %w", pcr.ID, err) - } else if skip { - log.Debug("Rule[%d]: keep '%s/%s' (container)", pcr.ID, p.Name, pv.Version) - continue - } - - toMatch := pv.LowerVersion - if pcr.MatchFullName { - toMatch = p.LowerName + "/" + pv.LowerVersion - } - - if pcr.KeepPatternMatcher != nil && pcr.KeepPatternMatcher.MatchString(toMatch) { - log.Debug("Rule[%d]: keep '%s/%s' (keep pattern)", pcr.ID, p.Name, pv.Version) - continue - } - if pv.CreatedUnix.AsLocalTime().After(olderThan) { - log.Debug("Rule[%d]: keep '%s/%s' (remove days)", pcr.ID, p.Name, pv.Version) - continue - } - if pcr.RemovePatternMatcher != nil && !pcr.RemovePatternMatcher.MatchString(toMatch) { - log.Debug("Rule[%d]: keep '%s/%s' (remove pattern)", pcr.ID, p.Name, pv.Version) - continue - } - - log.Debug("Rule[%d]: remove '%s/%s'", pcr.ID, p.Name, pv.Version) - - if err := DeletePackageVersionAndReferences(ctx, pv); err != nil { - return fmt.Errorf("CleanupRule [%d]: DeletePackageVersionAndReferences failed: %w", pcr.ID, err) - } - } - } - return nil - }) - if err != nil { - return err - } - - if err := container_service.Cleanup(ctx, olderThan); err != nil { - return err - } - - ps, err := packages_model.FindUnreferencedPackages(ctx) - if err != nil { - return err - } - for _, p := range ps { - if err := packages_model.DeleteAllProperties(ctx, packages_model.PropertyTypePackage, p.ID); err != nil { - return err - } - if err := packages_model.DeletePackageByID(ctx, p.ID); err != nil { - return err - } - } - - pbs, err := packages_model.FindExpiredUnreferencedBlobs(ctx, olderThan) - if err != nil { - return err - } - - for _, pb := range pbs { - if err := packages_model.DeleteBlobByID(ctx, pb.ID); err != nil { - return err - } - } - - if err := committer.Commit(); err != nil { - return err - } - - contentStore := packages_module.NewContentStore() - for _, pb := range pbs { - if err := contentStore.Delete(packages_module.BlobHash256Key(pb.HashSHA256)); err != nil { - log.Error("Error deleting package blob [%v]: %v", pb.ID, err) - } - } - - return nil -} - // GetFileStreamByPackageNameAndVersion returns the content of the specific package file func GetFileStreamByPackageNameAndVersion(ctx context.Context, pvi *PackageInfo, pfi *PackageFileInfo) (io.ReadSeekCloser, *packages_model.PackageFile, error) { log.Trace("Getting package file stream: %v, %v, %s, %s, %s, %s", pvi.Owner.ID, pvi.PackageType, pvi.Name, pvi.Version, pfi.Filename, pfi.CompositeKey) diff --git a/services/pull/check.go b/services/pull/check.go index db86378909c49..481491c73bb09 100644 --- a/services/pull/check.go +++ b/services/pull/check.go @@ -8,7 +8,6 @@ import ( "context" "errors" "fmt" - "os" "strconv" "strings" @@ -27,7 +26,6 @@ import ( "code.gitea.io/gitea/modules/process" "code.gitea.io/gitea/modules/queue" "code.gitea.io/gitea/modules/timeutil" - "code.gitea.io/gitea/modules/util" asymkey_service "code.gitea.io/gitea/services/asymkey" ) @@ -50,14 +48,14 @@ func AddToTaskQueue(pr *issues_model.PullRequest) { pr.Status = issues_model.PullRequestStatusChecking err := pr.UpdateColsIfNotMerged(db.DefaultContext, "status") if err != nil { - log.Error("AddToTaskQueue.UpdateCols[%d].(add to queue): %v", pr.ID, err) + log.Error("AddToTaskQueue(%-v).UpdateCols.(add to queue): %v", pr, err) } else { - log.Trace("Adding PR ID: %d to the test pull requests queue", pr.ID) + log.Trace("Adding %-v to the test pull requests queue", pr) } return err }) if err != nil && err != queue.ErrAlreadyInQueue { - log.Error("Error adding prID %d to the test pull requests queue: %v", pr.ID, err) + log.Error("Error adding %-v to the test pull requests queue: %v", pr, err) } } @@ -69,12 +67,14 @@ func CheckPullMergable(stdCtx context.Context, doer *user_model.User, perm *acce } if err := pr.LoadIssue(ctx); err != nil { + log.Error("Unable to load issue[%d] for %-v: %v", pr.IssueID, pr, err) return err } else if pr.Issue.IsClosed { return ErrIsClosed } if allowedMerge, err := IsUserAllowedToMerge(ctx, pr, *perm, doer); err != nil { + log.Error("Error whilst checking if %-v is allowed to merge %-v: %v", doer, pr, err) return err } else if !allowedMerge { return ErrUserNotAllowedToMerge @@ -98,15 +98,19 @@ func CheckPullMergable(stdCtx context.Context, doer *user_model.User, perm *acce } if err := CheckPullBranchProtections(ctx, pr, false); err != nil { - if models.IsErrDisallowedToMerge(err) { - if force { - if isRepoAdmin, err2 := access_model.IsUserRepoAdmin(ctx, pr.BaseRepo, doer); err2 != nil { - return err2 - } else if !isRepoAdmin { - return err - } - } - } else { + if !models.IsErrDisallowedToMerge(err) { + log.Error("Error whilst checking pull branch protection for %-v: %v", pr, err) + return err + } + + if !force { + return err + } + + if isRepoAdmin, err2 := access_model.IsUserRepoAdmin(ctx, pr.BaseRepo, doer); err2 != nil { + log.Error("Unable to check if %-v is a repo admin in %-v: %v", doer, pr.BaseRepo, err2) + return err2 + } else if !isRepoAdmin { return err } } @@ -144,7 +148,7 @@ func isSignedIfRequired(ctx context.Context, pr *issues_model.PullRequest, doer // checkAndUpdateStatus checks if pull request is possible to leaving checking status, // and set to be either conflict or mergeable. func checkAndUpdateStatus(ctx context.Context, pr *issues_model.PullRequest) { - // Status is not changed to conflict means mergeable. + // If status has not been changed to conflict by testPatch then we are mergeable if pr.Status == issues_model.PullRequestStatusChecking { pr.Status = issues_model.PullRequestStatusMergeable } @@ -152,79 +156,69 @@ func checkAndUpdateStatus(ctx context.Context, pr *issues_model.PullRequest) { // Make sure there is no waiting test to process before leaving the checking status. has, err := prPatchCheckerQueue.Has(strconv.FormatInt(pr.ID, 10)) if err != nil { - log.Error("Unable to check if the queue is waiting to reprocess pr.ID %d. Error: %v", pr.ID, err) + log.Error("Unable to check if the queue is waiting to reprocess %-v. Error: %v", pr, err) } - if !has { - if err := pr.UpdateColsIfNotMerged(ctx, "merge_base", "status", "conflicted_files", "changed_protected_files"); err != nil { - log.Error("Update[%d]: %v", pr.ID, err) - } + if has { + log.Trace("Not updating status for %-v as it is due to be rechecked", pr) + return + } + + if err := pr.UpdateColsIfNotMerged(ctx, "merge_base", "status", "conflicted_files", "changed_protected_files"); err != nil { + log.Error("Update[%-v]: %v", pr, err) } } -// getMergeCommit checks if a pull request got merged +// getMergeCommit checks if a pull request has been merged // Returns the git.Commit of the pull request if merged func getMergeCommit(ctx context.Context, pr *issues_model.PullRequest) (*git.Commit, error) { - if pr.BaseRepo == nil { - var err error - pr.BaseRepo, err = repo_model.GetRepositoryByID(ctx, pr.BaseRepoID) - if err != nil { - return nil, fmt.Errorf("GetRepositoryByID: %w", err) - } - } - - indexTmpPath, err := os.MkdirTemp(os.TempDir(), "gitea-"+pr.BaseRepo.Name) - if err != nil { - return nil, fmt.Errorf("Failed to create temp dir for repository %s: %w", pr.BaseRepo.RepoPath(), err) + if err := pr.LoadBaseRepo(ctx); err != nil { + return nil, fmt.Errorf("unable to load base repo for %s: %w", pr, err) } - defer func() { - if err := util.RemoveAll(indexTmpPath); err != nil { - log.Warn("Unable to remove temporary index path: %s: Error: %v", indexTmpPath, err) - } - }() - headFile := pr.GetGitRefName() + prHeadRef := pr.GetGitRefName() - // Check if a pull request is merged into BaseBranch - _, _, err = git.NewCommand(ctx, "merge-base", "--is-ancestor").AddDynamicArguments(headFile, pr.BaseBranch). - RunStdString(&git.RunOpts{Dir: pr.BaseRepo.RepoPath(), Env: []string{"GIT_INDEX_FILE=" + indexTmpPath, "GIT_DIR=" + pr.BaseRepo.RepoPath()}}) - if err != nil { - // Errors are signaled by a non-zero status that is not 1 + // Check if the pull request is merged into BaseBranch + if _, _, err := git.NewCommand(ctx, "merge-base", "--is-ancestor"). + AddDynamicArguments(prHeadRef, pr.BaseBranch). + RunStdString(&git.RunOpts{Dir: pr.BaseRepo.RepoPath()}); err != nil { if strings.Contains(err.Error(), "exit status 1") { + // prHeadRef is not an ancestor of the base branch return nil, nil } - return nil, fmt.Errorf("git merge-base --is-ancestor: %w", err) + // Errors are signaled by a non-zero status that is not 1 + return nil, fmt.Errorf("%-v git merge-base --is-ancestor: %w", pr, err) } - commitIDBytes, err := os.ReadFile(pr.BaseRepo.RepoPath() + "/" + headFile) + // If merge-base successfully exits then prHeadRef is an ancestor of pr.BaseBranch + + // Find the head commit id + prHeadCommitID, err := git.GetFullCommitID(ctx, pr.BaseRepo.RepoPath(), prHeadRef) if err != nil { - return nil, fmt.Errorf("ReadFile(%s): %w", headFile, err) + return nil, fmt.Errorf("GetFullCommitID(%s) in %s: %w", prHeadRef, pr.BaseRepo.FullName(), err) } - commitID := string(commitIDBytes) - if len(commitID) < git.SHAFullLength { - return nil, fmt.Errorf(`ReadFile(%s): invalid commit-ID "%s"`, headFile, commitID) - } - cmd := commitID[:git.SHAFullLength] + ".." + pr.BaseBranch // Get the commit from BaseBranch where the pull request got merged - mergeCommit, _, err := git.NewCommand(ctx, "rev-list", "--ancestry-path", "--merges", "--reverse").AddDynamicArguments(cmd). - RunStdString(&git.RunOpts{Dir: "", Env: []string{"GIT_INDEX_FILE=" + indexTmpPath, "GIT_DIR=" + pr.BaseRepo.RepoPath()}}) + mergeCommit, _, err := git.NewCommand(ctx, "rev-list", "--ancestry-path", "--merges", "--reverse"). + AddDynamicArguments(prHeadCommitID + ".." + pr.BaseBranch). + RunStdString(&git.RunOpts{Dir: pr.BaseRepo.RepoPath()}) if err != nil { return nil, fmt.Errorf("git rev-list --ancestry-path --merges --reverse: %w", err) } else if len(mergeCommit) < git.SHAFullLength { // PR was maybe fast-forwarded, so just use last commit of PR - mergeCommit = commitID[:git.SHAFullLength] + mergeCommit = prHeadCommitID } + mergeCommit = strings.TrimSpace(mergeCommit) gitRepo, err := git.OpenRepository(ctx, pr.BaseRepo.RepoPath()) if err != nil { - return nil, fmt.Errorf("OpenRepository: %w", err) + return nil, fmt.Errorf("%-v OpenRepository: %w", pr.BaseRepo, err) } defer gitRepo.Close() - commit, err := gitRepo.GetCommit(mergeCommit[:git.SHAFullLength]) + commit, err := gitRepo.GetCommit(mergeCommit) if err != nil { - return nil, fmt.Errorf("GetMergeCommit[%v]: %w", mergeCommit[:git.SHAFullLength], err) + return nil, fmt.Errorf("GetMergeCommit[%s]: %w", mergeCommit, err) } return commit, nil @@ -234,7 +228,7 @@ func getMergeCommit(ctx context.Context, pr *issues_model.PullRequest) (*git.Com // When a pull request got manually merged mark the pull request as merged func manuallyMerged(ctx context.Context, pr *issues_model.PullRequest) bool { if err := pr.LoadBaseRepo(ctx); err != nil { - log.Error("PullRequest[%d].LoadBaseRepo: %v", pr.ID, err) + log.Error("%-v LoadBaseRepo: %v", pr, err) return false } @@ -244,47 +238,50 @@ func manuallyMerged(ctx context.Context, pr *issues_model.PullRequest) bool { return false } } else { - log.Error("PullRequest[%d].BaseRepo.GetUnit(unit.TypePullRequests): %v", pr.ID, err) + log.Error("%-v BaseRepo.GetUnit(unit.TypePullRequests): %v", pr, err) return false } commit, err := getMergeCommit(ctx, pr) if err != nil { - log.Error("PullRequest[%d].getMergeCommit: %v", pr.ID, err) + log.Error("%-v getMergeCommit: %v", pr, err) + return false + } + + if commit == nil { + // no merge commit found return false } - if commit != nil { - pr.MergedCommitID = commit.ID.String() - pr.MergedUnix = timeutil.TimeStamp(commit.Author.When.Unix()) - pr.Status = issues_model.PullRequestStatusManuallyMerged - merger, _ := user_model.GetUserByEmail(commit.Author.Email) - - // When the commit author is unknown set the BaseRepo owner as merger - if merger == nil { - if pr.BaseRepo.Owner == nil { - if err = pr.BaseRepo.GetOwner(ctx); err != nil { - log.Error("BaseRepo.GetOwner[%d]: %v", pr.ID, err) - return false - } + + pr.MergedCommitID = commit.ID.String() + pr.MergedUnix = timeutil.TimeStamp(commit.Author.When.Unix()) + pr.Status = issues_model.PullRequestStatusManuallyMerged + merger, _ := user_model.GetUserByEmail(commit.Author.Email) + + // When the commit author is unknown set the BaseRepo owner as merger + if merger == nil { + if pr.BaseRepo.Owner == nil { + if err = pr.BaseRepo.GetOwner(ctx); err != nil { + log.Error("%-v BaseRepo.GetOwner: %v", pr, err) + return false } - merger = pr.BaseRepo.Owner } - pr.Merger = merger - pr.MergerID = merger.ID + merger = pr.BaseRepo.Owner + } + pr.Merger = merger + pr.MergerID = merger.ID - if merged, err := pr.SetMerged(ctx); err != nil { - log.Error("PullRequest[%d].setMerged : %v", pr.ID, err) - return false - } else if !merged { - return false - } + if merged, err := pr.SetMerged(ctx); err != nil { + log.Error("%-v setMerged : %v", pr, err) + return false + } else if !merged { + return false + } - notification.NotifyMergePullRequest(ctx, merger, pr) + notification.NotifyMergePullRequest(ctx, merger, pr) - log.Info("manuallyMerged[%d]: Marked as manually merged into %s/%s by commit id: %s", pr.ID, pr.BaseRepo.Name, pr.BaseBranch, commit.ID.String()) - return true - } - return false + log.Info("manuallyMerged[%-v]: Marked as manually merged into %s/%s by commit id: %s", pr, pr.BaseRepo.Name, pr.BaseBranch, commit.ID.String()) + return true } // InitializePullRequests checks and tests untested patches of pull requests. @@ -300,10 +297,10 @@ func InitializePullRequests(ctx context.Context) { return default: if err := prPatchCheckerQueue.PushFunc(strconv.FormatInt(prID, 10), func() error { - log.Trace("Adding PR ID: %d to the pull requests patch checking queue", prID) + log.Trace("Adding PR[%d] to the pull requests patch checking queue", prID) return nil }); err != nil { - log.Error("Error adding prID: %s to the pull requests patch checking queue %v", prID, err) + log.Error("Error adding PR[%d] to the pull requests patch checking queue %v", prID, err) } } } @@ -327,23 +324,30 @@ func testPR(id int64) { pr, err := issues_model.GetPullRequestByID(ctx, id) if err != nil { - log.Error("GetPullRequestByID[%d]: %v", id, err) + log.Error("Unable to GetPullRequestByID[%d] for testPR: %v", id, err) return } + log.Trace("Testing %-v", pr) + defer func() { + log.Trace("Done testing %-v (status: %s)", pr, pr.Status) + }() + if pr.HasMerged { + log.Trace("%-v is already merged (status: %s, merge commit: %s)", pr, pr.Status, pr.MergedCommitID) return } if manuallyMerged(ctx, pr) { + log.Trace("%-v is manually merged (status: %s, merge commit: %s)", pr, pr.Status, pr.MergedCommitID) return } if err := TestPatch(pr); err != nil { - log.Error("testPatch[%d]: %v", pr.ID, err) + log.Error("testPatch[%-v]: %v", pr, err) pr.Status = issues_model.PullRequestStatusError if err := pr.UpdateCols("status"); err != nil { - log.Error("update pr [%d] status to PullRequestStatusError failed: %v", pr.ID, err) + log.Error("update pr [%-v] status to PullRequestStatusError failed: %v", pr, err) } return } diff --git a/services/pull/merge.go b/services/pull/merge.go index 7ffbdb78b0207..edd5b601da934 100644 --- a/services/pull/merge.go +++ b/services/pull/merge.go @@ -370,16 +370,16 @@ func rawMerge(ctx context.Context, pr *issues_model.PullRequest, doer *user_mode sig := doer.NewGitSig() committer := sig - // Determine if we should sign - var signArg git.CmdArg - sign, keyID, signer, _ := asymkey_service.SignMerge(ctx, pr, doer, tmpBasePath, "HEAD", trackingBranch) + // Determine if we should sign. If no signKeyID, use --no-gpg-sign to countermand the sign config (from gitconfig) + var signArgs git.TrustedCmdArgs + sign, signKeyID, signer, _ := asymkey_service.SignMerge(ctx, pr, doer, tmpBasePath, "HEAD", trackingBranch) if sign { - signArg = git.CmdArg("-S" + keyID) if pr.BaseRepo.GetTrustModel() == repo_model.CommitterTrustModel || pr.BaseRepo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel { committer = signer } + signArgs = git.ToTrustedCmdArgs([]string{"-S" + signKeyID}) } else { - signArg = git.CmdArg("--no-gpg-sign") + signArgs = append(signArgs, "--no-gpg-sign") } commitTimeStr := time.Now().Format(time.RFC3339) @@ -403,7 +403,7 @@ func rawMerge(ctx context.Context, pr *issues_model.PullRequest, doer *user_mode return "", err } - if err := commitAndSignNoAuthor(ctx, pr, message, signArg, tmpBasePath, env); err != nil { + if err := commitAndSignNoAuthor(ctx, pr, message, signArgs, tmpBasePath, env); err != nil { log.Error("Unable to make final commit: %v", err) return "", err } @@ -505,7 +505,7 @@ func rawMerge(ctx context.Context, pr *issues_model.PullRequest, doer *user_mode return "", err } if mergeStyle == repo_model.MergeStyleRebaseMerge { - if err := commitAndSignNoAuthor(ctx, pr, message, signArg, tmpBasePath, env); err != nil { + if err := commitAndSignNoAuthor(ctx, pr, message, signArgs, tmpBasePath, env); err != nil { log.Error("Unable to make final commit: %v", err) return "", err } @@ -523,35 +523,22 @@ func rawMerge(ctx context.Context, pr *issues_model.PullRequest, doer *user_mode return "", fmt.Errorf("LoadPoster: %w", err) } sig := pr.Issue.Poster.NewGitSig() - if signArg == "" { - if err := git.NewCommand(ctx, "commit", git.CmdArg(fmt.Sprintf("--author='%s <%s>'", sig.Name, sig.Email)), "-m").AddDynamicArguments(message). - Run(&git.RunOpts{ - Env: env, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - log.Error("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - return "", fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - } - } else { - if setting.Repository.PullRequest.AddCoCommitterTrailers && committer.String() != sig.String() { - // add trailer - message += fmt.Sprintf("\nCo-authored-by: %s\nCo-committed-by: %s\n", sig.String(), sig.String()) - } - if err := git.NewCommand(ctx, "commit"). - AddArguments(signArg). - AddArguments(git.CmdArg(fmt.Sprintf("--author='%s <%s>'", sig.Name, sig.Email))). - AddArguments("-m").AddDynamicArguments(message). - Run(&git.RunOpts{ - Env: env, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - log.Error("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - return "", fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - } + if setting.Repository.PullRequest.AddCoCommitterTrailers && committer.String() != sig.String() { + // add trailer + message += fmt.Sprintf("\nCo-authored-by: %s\nCo-committed-by: %s\n", sig.String(), sig.String()) + } + if err := git.NewCommand(ctx, "commit"). + AddArguments(signArgs...). + AddOptionFormat("--author='%s <%s>'", sig.Name, sig.Email). + AddOptionValues("-m", message). + Run(&git.RunOpts{ + Env: env, + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, + }); err != nil { + log.Error("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) + return "", fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) } outbuf.Reset() errbuf.Reset() @@ -649,30 +636,17 @@ func rawMerge(ctx context.Context, pr *issues_model.PullRequest, doer *user_mode return mergeCommitID, nil } -func commitAndSignNoAuthor(ctx context.Context, pr *issues_model.PullRequest, message string, signArg git.CmdArg, tmpBasePath string, env []string) error { +func commitAndSignNoAuthor(ctx context.Context, pr *issues_model.PullRequest, message string, signArgs git.TrustedCmdArgs, tmpBasePath string, env []string) error { var outbuf, errbuf strings.Builder - if signArg == "" { - if err := git.NewCommand(ctx, "commit", "-m").AddDynamicArguments(message). - Run(&git.RunOpts{ - Env: env, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - log.Error("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - } - } else { - if err := git.NewCommand(ctx, "commit").AddArguments(signArg).AddArguments("-m").AddDynamicArguments(message). - Run(&git.RunOpts{ - Env: env, - Dir: tmpBasePath, - Stdout: &outbuf, - Stderr: &errbuf, - }); err != nil { - log.Error("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) - } + if err := git.NewCommand(ctx, "commit").AddArguments(signArgs...).AddOptionValues("-m", message). + Run(&git.RunOpts{ + Env: env, + Dir: tmpBasePath, + Stdout: &outbuf, + Stderr: &errbuf, + }); err != nil { + log.Error("git commit [%s:%s -> %s:%s]: %v\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) + return fmt.Errorf("git commit [%s:%s -> %s:%s]: %w\n%s\n%s", pr.HeadRepo.FullName(), pr.HeadBranch, pr.BaseRepo.FullName(), pr.BaseBranch, err, outbuf.String(), errbuf.String()) } return nil } diff --git a/services/pull/patch.go b/services/pull/patch.go index 26a72a7371bf1..c2ccc75bdccdd 100644 --- a/services/pull/patch.go +++ b/services/pull/patch.go @@ -60,7 +60,7 @@ var patchErrorSuffices = []string{ // TestPatch will test whether a simple patch will apply func TestPatch(pr *issues_model.PullRequest) error { - ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), fmt.Sprintf("TestPatch: Repo[%d]#%d", pr.BaseRepoID, pr.Index)) + ctx, _, finished := process.GetManager().AddContext(graceful.GetManager().HammerContext(), fmt.Sprintf("TestPatch: %s", pr)) defer finished() // Clone base repo. @@ -376,16 +376,16 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo * prConfig := prUnit.PullRequestsConfig() // 6. Prepare the arguments to apply the patch against the index - args := []git.CmdArg{"apply", "--check", "--cached"} + cmdApply := git.NewCommand(gitRepo.Ctx, "apply", "--check", "--cached") if prConfig.IgnoreWhitespaceConflicts { - args = append(args, "--ignore-whitespace") + cmdApply.AddArguments("--ignore-whitespace") } is3way := false if git.CheckGitVersionAtLeast("2.32.0") == nil { - args = append(args, "--3way") + cmdApply.AddArguments("--3way") is3way = true } - args = append(args, git.CmdArgCheck(patchPath)) + cmdApply.AddDynamicArguments(patchPath) // 7. Prep the pipe: // - Here we could do the equivalent of: @@ -407,71 +407,70 @@ func checkConflicts(ctx context.Context, pr *issues_model.PullRequest, gitRepo * // 8. Run the check command conflict = false - err = git.NewCommand(gitRepo.Ctx, args...). - Run(&git.RunOpts{ - Dir: tmpBasePath, - Stderr: stderrWriter, - PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error { - // Close the writer end of the pipe to begin processing - _ = stderrWriter.Close() - defer func() { - // Close the reader on return to terminate the git command if necessary - _ = stderrReader.Close() - }() - - const prefix = "error: patch failed:" - const errorPrefix = "error: " - const threewayFailed = "Failed to perform three-way merge..." - const appliedPatchPrefix = "Applied patch to '" - const withConflicts = "' with conflicts." - - conflicts := make(container.Set[string]) - - // Now scan the output from the command - scanner := bufio.NewScanner(stderrReader) - for scanner.Scan() { - line := scanner.Text() - log.Trace("PullRequest[%d].testPatch: stderr: %s", pr.ID, line) - if strings.HasPrefix(line, prefix) { - conflict = true - filepath := strings.TrimSpace(strings.Split(line[len(prefix):], ":")[0]) - conflicts.Add(filepath) - } else if is3way && line == threewayFailed { - conflict = true - } else if strings.HasPrefix(line, errorPrefix) { - conflict = true - for _, suffix := range patchErrorSuffices { - if strings.HasSuffix(line, suffix) { - filepath := strings.TrimSpace(strings.TrimSuffix(line[len(errorPrefix):], suffix)) - if filepath != "" { - conflicts.Add(filepath) - } - break + err = cmdApply.Run(&git.RunOpts{ + Dir: tmpBasePath, + Stderr: stderrWriter, + PipelineFunc: func(ctx context.Context, cancel context.CancelFunc) error { + // Close the writer end of the pipe to begin processing + _ = stderrWriter.Close() + defer func() { + // Close the reader on return to terminate the git command if necessary + _ = stderrReader.Close() + }() + + const prefix = "error: patch failed:" + const errorPrefix = "error: " + const threewayFailed = "Failed to perform three-way merge..." + const appliedPatchPrefix = "Applied patch to '" + const withConflicts = "' with conflicts." + + conflicts := make(container.Set[string]) + + // Now scan the output from the command + scanner := bufio.NewScanner(stderrReader) + for scanner.Scan() { + line := scanner.Text() + log.Trace("PullRequest[%d].testPatch: stderr: %s", pr.ID, line) + if strings.HasPrefix(line, prefix) { + conflict = true + filepath := strings.TrimSpace(strings.Split(line[len(prefix):], ":")[0]) + conflicts.Add(filepath) + } else if is3way && line == threewayFailed { + conflict = true + } else if strings.HasPrefix(line, errorPrefix) { + conflict = true + for _, suffix := range patchErrorSuffices { + if strings.HasSuffix(line, suffix) { + filepath := strings.TrimSpace(strings.TrimSuffix(line[len(errorPrefix):], suffix)) + if filepath != "" { + conflicts.Add(filepath) } - } - } else if is3way && strings.HasPrefix(line, appliedPatchPrefix) && strings.HasSuffix(line, withConflicts) { - conflict = true - filepath := strings.TrimPrefix(strings.TrimSuffix(line, withConflicts), appliedPatchPrefix) - if filepath != "" { - conflicts.Add(filepath) + break } } - // only list 10 conflicted files - if len(conflicts) >= 10 { - break + } else if is3way && strings.HasPrefix(line, appliedPatchPrefix) && strings.HasSuffix(line, withConflicts) { + conflict = true + filepath := strings.TrimPrefix(strings.TrimSuffix(line, withConflicts), appliedPatchPrefix) + if filepath != "" { + conflicts.Add(filepath) } } + // only list 10 conflicted files + if len(conflicts) >= 10 { + break + } + } - if len(conflicts) > 0 { - pr.ConflictedFiles = make([]string, 0, len(conflicts)) - for key := range conflicts { - pr.ConflictedFiles = append(pr.ConflictedFiles, key) - } + if len(conflicts) > 0 { + pr.ConflictedFiles = make([]string, 0, len(conflicts)) + for key := range conflicts { + pr.ConflictedFiles = append(pr.ConflictedFiles, key) } + } - return nil - }, - }) + return nil + }, + }) // 9. Check if the found conflictedfiles is non-zero, "err" could be non-nil, so we should ignore it if we found conflicts. // Note: `"err" could be non-nil` is due that if enable 3-way merge, it doesn't return any error on found conflicts. diff --git a/services/repository/adopt.go b/services/repository/adopt.go index 8ebf2b6a3e60f..280c4cc035a8f 100644 --- a/services/repository/adopt.go +++ b/services/repository/adopt.go @@ -67,7 +67,7 @@ func AdoptRepository(doer, u *user_model.User, opts repo_module.CreateRepoOption } } - if err := repo_module.CreateRepositoryByExample(ctx, doer, u, repo, true); err != nil { + if err := repo_module.CreateRepositoryByExample(ctx, doer, u, repo, true, false); err != nil { return err } if err := adoptRepository(ctx, repoPath, doer, repo, opts); err != nil { diff --git a/services/repository/check.go b/services/repository/check.go index e9d65aea4a8ce..3a1f0b7f306bd 100644 --- a/services/repository/check.go +++ b/services/repository/check.go @@ -23,7 +23,7 @@ import ( ) // GitFsckRepos calls 'git fsck' to check repository health. -func GitFsckRepos(ctx context.Context, timeout time.Duration, args []git.CmdArg) error { +func GitFsckRepos(ctx context.Context, timeout time.Duration, args git.TrustedCmdArgs) error { log.Trace("Doing: GitFsck") if err := db.Iterate( @@ -47,10 +47,10 @@ func GitFsckRepos(ctx context.Context, timeout time.Duration, args []git.CmdArg) } // GitFsckRepo calls 'git fsck' to check an individual repository's health. -func GitFsckRepo(ctx context.Context, repo *repo_model.Repository, timeout time.Duration, args []git.CmdArg) error { +func GitFsckRepo(ctx context.Context, repo *repo_model.Repository, timeout time.Duration, args git.TrustedCmdArgs) error { log.Trace("Running health check on repository %-v", repo) repoPath := repo.RepoPath() - if err := git.Fsck(ctx, repoPath, timeout, args...); err != nil { + if err := git.Fsck(ctx, repoPath, timeout, args); err != nil { log.Warn("Failed to health check repository (%-v): %v", repo, err) if err = system_model.CreateRepositoryNotice("Failed to health check repository (%s): %v", repo.FullName(), err); err != nil { log.Error("CreateRepositoryNotice: %v", err) @@ -60,9 +60,8 @@ func GitFsckRepo(ctx context.Context, repo *repo_model.Repository, timeout time. } // GitGcRepos calls 'git gc' to remove unnecessary files and optimize the local repository -func GitGcRepos(ctx context.Context, timeout time.Duration, args ...git.CmdArg) error { +func GitGcRepos(ctx context.Context, timeout time.Duration, args git.TrustedCmdArgs) error { log.Trace("Doing: GitGcRepos") - args = append([]git.CmdArg{"gc"}, args...) if err := db.Iterate( ctx, @@ -86,9 +85,9 @@ func GitGcRepos(ctx context.Context, timeout time.Duration, args ...git.CmdArg) } // GitGcRepo calls 'git gc' to remove unnecessary files and optimize the local repository -func GitGcRepo(ctx context.Context, repo *repo_model.Repository, timeout time.Duration, args []git.CmdArg) error { +func GitGcRepo(ctx context.Context, repo *repo_model.Repository, timeout time.Duration, args git.TrustedCmdArgs) error { log.Trace("Running git gc on %-v", repo) - command := git.NewCommand(ctx, args...). + command := git.NewCommand(ctx, "gc").AddArguments(args...). SetDescription(fmt.Sprintf("Repository Garbage Collection: %s", repo.FullName())) var stdout string var err error diff --git a/services/repository/files/patch.go b/services/repository/files/patch.go index 73ee0fa815e4c..f65199cfcb94e 100644 --- a/services/repository/files/patch.go +++ b/services/repository/files/patch.go @@ -141,14 +141,12 @@ func ApplyDiffPatch(ctx context.Context, repo *repo_model.Repository, doer *user stdout := &strings.Builder{} stderr := &strings.Builder{} - args := []git.CmdArg{"apply", "--index", "--recount", "--cached", "--ignore-whitespace", "--whitespace=fix", "--binary"} - + cmdApply := git.NewCommand(ctx, "apply", "--index", "--recount", "--cached", "--ignore-whitespace", "--whitespace=fix", "--binary") if git.CheckGitVersionAtLeast("2.32") == nil { - args = append(args, "-3") + cmdApply.AddArguments("-3") } - cmd := git.NewCommand(ctx, args...) - if err := cmd.Run(&git.RunOpts{ + if err := cmdApply.Run(&git.RunOpts{ Dir: t.basePath, Stdout: stdout, Stderr: stderr, diff --git a/services/repository/files/temp_repo.go b/services/repository/files/temp_repo.go index 1f3375cdcc83c..a086d15a4f99e 100644 --- a/services/repository/files/temp_repo.go +++ b/services/repository/files/temp_repo.go @@ -233,11 +233,9 @@ func (t *TemporaryUploadRepository) CommitTreeWithDate(parent string, author, co _, _ = messageBytes.WriteString(message) _, _ = messageBytes.WriteString("\n") - var args []git.CmdArg + cmdCommitTree := git.NewCommand(t.ctx, "commit-tree").AddDynamicArguments(treeHash) if parent != "" { - args = []git.CmdArg{"commit-tree", git.CmdArgCheck(treeHash), "-p", git.CmdArgCheck(parent)} - } else { - args = []git.CmdArg{"commit-tree", git.CmdArgCheck(treeHash)} + cmdCommitTree.AddOptionValues("-p", parent) } var sign bool @@ -249,7 +247,7 @@ func (t *TemporaryUploadRepository) CommitTreeWithDate(parent string, author, co sign, keyID, signer, _ = asymkey_service.SignInitialCommit(t.ctx, t.repo.RepoPath(), author) } if sign { - args = append(args, git.CmdArg("-S"+keyID)) + cmdCommitTree.AddOptionFormat("-S%s", keyID) if t.repo.GetTrustModel() == repo_model.CommitterTrustModel || t.repo.GetTrustModel() == repo_model.CollaboratorCommitterTrustModel { if committerSig.Name != authorSig.Name || committerSig.Email != authorSig.Email { // Add trailers @@ -264,7 +262,7 @@ func (t *TemporaryUploadRepository) CommitTreeWithDate(parent string, author, co committerSig = signer } } else { - args = append(args, "--no-gpg-sign") + cmdCommitTree.AddArguments("--no-gpg-sign") } if signoff { @@ -281,7 +279,7 @@ func (t *TemporaryUploadRepository) CommitTreeWithDate(parent string, author, co stdout := new(bytes.Buffer) stderr := new(bytes.Buffer) - if err := git.NewCommand(t.ctx, args...). + if err := cmdCommitTree. Run(&git.RunOpts{ Env: env, Dir: t.basePath, @@ -364,7 +362,7 @@ func (t *TemporaryUploadRepository) DiffIndex() (*gitdiff.Diff, error) { t.repo.FullName(), err, stderr) } - diff.NumFiles, diff.TotalAddition, diff.TotalDeletion, err = git.GetDiffShortStat(t.ctx, t.basePath, "--cached", "HEAD") + diff.NumFiles, diff.TotalAddition, diff.TotalDeletion, err = git.GetDiffShortStat(t.ctx, t.basePath, git.TrustedCmdArgs{"--cached"}, "HEAD") if err != nil { return nil, err } diff --git a/services/repository/files/update.go b/services/repository/files/update.go index 58b7a5e082b58..45a469239665f 100644 --- a/services/repository/files/update.go +++ b/services/repository/files/update.go @@ -370,7 +370,7 @@ func CreateOrUpdateRepoFile(ctx context.Context, repo *repo_model.Repository, do if setting.LFS.StartServer && hasOldBranch { // Check there is no way this can return multiple infos filename2attribute2info, err := t.gitRepo.CheckAttribute(git.CheckAttributeOpts{ - Attributes: []git.CmdArg{"filter"}, + Attributes: []string{"filter"}, Filenames: []string{treePath}, CachedOnly: true, }) diff --git a/services/repository/files/upload.go b/services/repository/files/upload.go index e7289dd60d23b..cf2f7019b68c1 100644 --- a/services/repository/files/upload.go +++ b/services/repository/files/upload.go @@ -96,7 +96,7 @@ func UploadRepoFiles(ctx context.Context, repo *repo_model.Repository, doer *use var filename2attribute2info map[string]map[string]string if setting.LFS.StartServer { filename2attribute2info, err = t.gitRepo.CheckAttribute(git.CheckAttributeOpts{ - Attributes: []git.CmdArg{"filter"}, + Attributes: []string{"filter"}, Filenames: names, CachedOnly: true, }) diff --git a/services/repository/fork.go b/services/repository/fork.go index ad534be887f1b..c3ca89e02e7f8 100644 --- a/services/repository/fork.go +++ b/services/repository/fork.go @@ -119,7 +119,7 @@ func ForkRepository(ctx context.Context, doer, owner *user_model.User, opts Fork }() err = db.WithTx(ctx, func(txCtx context.Context) error { - if err = repo_module.CreateRepositoryByExample(txCtx, doer, owner, repo, false); err != nil { + if err = repo_module.CreateRepositoryByExample(txCtx, doer, owner, repo, false, true); err != nil { return err } diff --git a/templates/base/footer.tmpl b/templates/base/footer.tmpl index 4d3e08a5977e7..e3cac806a4d91 100644 --- a/templates/base/footer.tmpl +++ b/templates/base/footer.tmpl @@ -16,10 +16,13 @@ {{if .EnableCaptcha}} {{if eq .CaptchaType "recaptcha"}} - + {{end}} {{if eq .CaptchaType "hcaptcha"}} - + + {{end}} + {{if eq .CaptchaType "cfturnstile"}} + {{end}} {{end}} diff --git a/templates/org/settings/packages.tmpl b/templates/org/settings/packages.tmpl index 412a9813c21b4..04e5d45b50705 100644 --- a/templates/org/settings/packages.tmpl +++ b/templates/org/settings/packages.tmpl @@ -7,6 +7,7 @@
{{template "base/alert" .}} {{template "package/shared/cleanup_rules/list" .}} + {{template "package/shared/cargo" .}}
diff --git a/templates/package/content/cargo.tmpl b/templates/package/content/cargo.tmpl new file mode 100644 index 0000000000000..54c40a5b0dc0a --- /dev/null +++ b/templates/package/content/cargo.tmpl @@ -0,0 +1,62 @@ +{{if eq .PackageDescriptor.Package.Type "cargo"}} +

{{.locale.Tr "packages.installation"}}

+
+
+
+ +
[registry]
+default = "gitea"
+
+[registries.gitea]
+index = "{{AppUrl}}{{.PackageDescriptor.Owner.Name}}/_cargo-index.git"
+
+[net]
+git-fetch-with-cli = true
+
+
+ +
cargo add {{.PackageDescriptor.Package.Name}}@{{.PackageDescriptor.Version.Version}}
+
+
+ +
+
+
+ + {{if or .PackageDescriptor.Metadata.Description .PackageDescriptor.Metadata.Readme}} +

{{.locale.Tr "packages.about"}}

+ {{if .PackageDescriptor.Metadata.Description}}
{{.PackageDescriptor.Metadata.Description}}
{{end}} + {{if .PackageDescriptor.Metadata.Readme}}
{{RenderMarkdownToHtml .PackageDescriptor.Metadata.Readme}}
{{end}} + {{end}} + + {{if .PackageDescriptor.Metadata.Dependencies}} +

{{.locale.Tr "packages.dependencies"}}

+
+ + + + + + + + + {{range .PackageDescriptor.Metadata.Dependencies}} + + + + + {{end}} + +
{{.locale.Tr "packages.dependency.id"}}{{.locale.Tr "packages.dependency.version"}}
{{.Name}}{{.Req}}
+
+ {{end}} + + {{if .PackageDescriptor.Metadata.Keywords}} +

{{.locale.Tr "packages.keywords"}}

+
+ {{range .PackageDescriptor.Metadata.Keywords}} + {{.}} + {{end}} +
+ {{end}} +{{end}} diff --git a/templates/package/metadata/cargo.tmpl b/templates/package/metadata/cargo.tmpl new file mode 100644 index 0000000000000..68232d48321c5 --- /dev/null +++ b/templates/package/metadata/cargo.tmpl @@ -0,0 +1,7 @@ +{{if eq .PackageDescriptor.Package.Type "cargo"}} + {{range .PackageDescriptor.Metadata.Authors}}
{{svg "octicon-person" 16 "mr-3"}} {{.}}
{{end}} + {{if .PackageDescriptor.Metadata.ProjectURL}}
{{svg "octicon-link-external" 16 "mr-3"}} {{.locale.Tr "packages.details.project_site"}}
{{end}} + {{if .PackageDescriptor.Metadata.RepositoryURL}}
{{svg "octicon-link-external" 16 "mr-3"}} {{.locale.Tr "packages.cargo.details.repository_site"}}
{{end}} + {{if .PackageDescriptor.Metadata.DocumentationURL}}
{{svg "octicon-link-external" 16 "mr-3"}} {{.locale.Tr "packages.cargo.details.documentation_site"}}
{{end}} + {{if .PackageDescriptor.Metadata.License}}
{{svg "octicon-law" 16 "mr-3"}} {{.PackageDescriptor.Metadata.License}}
{{end}} +{{end}} diff --git a/templates/package/shared/cargo.tmpl b/templates/package/shared/cargo.tmpl new file mode 100644 index 0000000000000..b9c2ef4d2ba43 --- /dev/null +++ b/templates/package/shared/cargo.tmpl @@ -0,0 +1,24 @@ +

+ {{.locale.Tr "packages.owner.settings.cargo.title"}} +

+
+
+
+ +
+
+ {{.CsrfTokenHtml}} + +
+
+ +
+
+ {{.CsrfTokenHtml}} + +
+
+ +
+
+
diff --git a/templates/package/view.tmpl b/templates/package/view.tmpl index 4611cdb8d840f..6209e6cb02e27 100644 --- a/templates/package/view.tmpl +++ b/templates/package/view.tmpl @@ -19,6 +19,7 @@
+ {{template "package/content/cargo" .}} {{template "package/content/composer" .}} {{template "package/content/conan" .}} {{template "package/content/conda" .}} @@ -43,6 +44,7 @@ {{end}}
{{svg "octicon-calendar" 16 "mr-3"}} {{TimeSinceUnix .PackageDescriptor.Version.CreatedUnix $.locale}}
{{svg "octicon-download" 16 "mr-3"}} {{.PackageDescriptor.Version.DownloadCount}}
+ {{template "package/metadata/cargo" .}} {{template "package/metadata/composer" .}} {{template "package/metadata/conan" .}} {{template "package/metadata/conda" .}} diff --git a/templates/repo/diff/box.tmpl b/templates/repo/diff/box.tmpl index f74714499ae54..798a7eae14d1b 100644 --- a/templates/repo/diff/box.tmpl +++ b/templates/repo/diff/box.tmpl @@ -76,19 +76,18 @@ {{$isImage := or (call $.IsBlobAnImage $blobBase) (call $.IsBlobAnImage $blobHead)}} {{$isCsv := (call $.IsCsvFile $file)}} {{$showFileViewToggle := or $isImage (and (not $file.IsIncomplete) $isCsv)}} -
+ {{$isExpandable := or (gt $file.Addition 0) (gt $file.Deletion 0) $file.IsBin}} +

- {{if or (gt $file.Addition 0) (gt $file.Deletion 0) $file.IsBin}} - - {{if $file.ShouldBeHidden}} - {{svg "octicon-chevron-right" 18}} - {{else}} - {{svg "octicon-chevron-down" 18}} - {{end}} - - {{end}} -
+ + {{if $file.ShouldBeHidden}} + {{svg "octicon-chevron-right" 18}} + {{else}} + {{svg "octicon-chevron-down" 18}} + {{end}} + + - - + {{template "repo/issue/view_content/update_branch_by_merge" (dict "locale" .locale "Issue" .Issue "UpdateAllowed" .UpdateAllowed "UpdateByRebaseAllowed" .UpdateByRebaseAllowed "Link" .Link)}} {{else if .Issue.PullRequest.IsChecking}}
{{svg "octicon-sync"}} @@ -282,44 +283,7 @@
{{end}} {{end}} - {{if and (gt .Issue.PullRequest.CommitsBehind 0) (not .Issue.IsClosed) (not .Issue.PullRequest.IsChecking) (not .IsPullFilesConflicted) (not .IsPullRequestBroken) (not $canAutoMerge)}} -
-
-
- {{svg "octicon-alert"}} - {{$.locale.Tr "repo.pulls.outdated_with_base_branch"}} -
-
- {{if and .UpdateAllowed .UpdateByRebaseAllowed}} -
-
- - - -
-
- {{end}} - {{if and .UpdateAllowed (not .UpdateByRebaseAllowed)}} -
- {{.CsrfTokenHtml}} - -
- {{end}} -
-
- {{end}} + {{template "repo/issue/view_content/update_branch_by_merge" (dict "locale" .locale "Issue" .Issue "UpdateAllowed" .UpdateAllowed "UpdateByRebaseAllowed" .UpdateByRebaseAllowed "Link" .Link)}} {{if .Issue.PullRequest.IsEmpty}}
diff --git a/templates/repo/issue/view_content/update_branch_by_merge.tmpl b/templates/repo/issue/view_content/update_branch_by_merge.tmpl new file mode 100644 index 0000000000000..3bc8dcca9752e --- /dev/null +++ b/templates/repo/issue/view_content/update_branch_by_merge.tmpl @@ -0,0 +1,39 @@ +{{$canAutoMerge := false}} +{{if and (gt .Issue.PullRequest.CommitsBehind 0) (not .Issue.IsClosed) (not .Issue.PullRequest.IsChecking) (not .IsPullFilesConflicted) (not .IsPullRequestBroken) (not $canAutoMerge)}} +
+
+
+ {{svg "octicon-alert"}} + {{$.locale.Tr "repo.pulls.outdated_with_base_branch"}} +
+
+ {{if and .UpdateAllowed .UpdateByRebaseAllowed}} +
+
+ + + +
+
+ {{end}} + {{if and .UpdateAllowed (not .UpdateByRebaseAllowed)}} +
+ {{.CsrfTokenHtml}} + +
+ {{end}} +
+
+{{end}} diff --git a/templates/swagger/v1_json.tmpl b/templates/swagger/v1_json.tmpl index 5a3f2c2bb9e98..d6e3009b35b0d 100644 --- a/templates/swagger/v1_json.tmpl +++ b/templates/swagger/v1_json.tmpl @@ -2100,6 +2100,7 @@ }, { "enum": [ + "cargo", "composer", "conan", "conda", diff --git a/templates/user/auth/captcha.tmpl b/templates/user/auth/captcha.tmpl index 87b22a0720eda..a794c8f543ea8 100644 --- a/templates/user/auth/captcha.tmpl +++ b/templates/user/auth/captcha.tmpl @@ -9,16 +9,20 @@
{{else if eq .CaptchaType "recaptcha"}}
-
+
{{else if eq .CaptchaType "hcaptcha"}}
-
+
{{else if eq .CaptchaType "mcaptcha"}}
{{.locale.Tr "captcha"}}
-
+
+
+{{else if eq .CaptchaType "cfturnstile"}} +
+
{{end}}{{end}} diff --git a/templates/user/dashboard/navbar.tmpl b/templates/user/dashboard/navbar.tmpl index 4b20bf52680f2..ab3d5029d145d 100644 --- a/templates/user/dashboard/navbar.tmpl +++ b/templates/user/dashboard/navbar.tmpl @@ -7,8 +7,8 @@ {{.ContextUser.ShortName 40}} {{if .ContextUser.IsOrganization}} - {{if .ContextUser.Visibility.IsLimited}}
{{.locale.Tr "org.settings.visibility.limited_shortname"}}
{{end}} - {{if .ContextUser.Visibility.IsPrivate}}
{{.locale.Tr "org.settings.visibility.private_shortname"}}
{{end}} + {{if .ContextUser.Visibility.IsLimited}}
{{.locale.Tr "org.settings.visibility.limited_shortname"}}
{{end}} + {{if .ContextUser.Visibility.IsPrivate}}
{{.locale.Tr "org.settings.visibility.private_shortname"}}
{{end}}
{{end}} {{svg "octicon-triangle-down" 14 "dropdown icon"}} @@ -27,8 +27,8 @@ {{avatar .}} {{.ShortName 40}} - {{if .Visibility.IsLimited}}
{{$.locale.Tr "org.settings.visibility.limited_shortname"}}
{{end}} - {{if .Visibility.IsPrivate}}
{{$.locale.Tr "org.settings.visibility.private_shortname"}}
{{end}} + {{if .Visibility.IsLimited}}
{{$.locale.Tr "org.settings.visibility.limited_shortname"}}
{{end}} + {{if .Visibility.IsPrivate}}
{{$.locale.Tr "org.settings.visibility.private_shortname"}}
{{end}}
{{end}} diff --git a/templates/user/settings/packages.tmpl b/templates/user/settings/packages.tmpl index 9aae14f934326..472d84327a729 100644 --- a/templates/user/settings/packages.tmpl +++ b/templates/user/settings/packages.tmpl @@ -4,6 +4,7 @@
{{template "base/alert" .}} {{template "package/shared/cleanup_rules/list" .}} + {{template "package/shared/cargo" .}}
{{template "base/footer" .}} diff --git a/tests/integration/api_packages_cargo_test.go b/tests/integration/api_packages_cargo_test.go new file mode 100644 index 0000000000000..0c542eaf1e857 --- /dev/null +++ b/tests/integration/api_packages_cargo_test.go @@ -0,0 +1,341 @@ +// Copyright 2021 The Gitea Authors. All rights reserved. +// Use of this source code is governed by a MIT-style +// license that can be found in the LICENSE file. + +package integration + +import ( + "bytes" + "encoding/binary" + "fmt" + "io" + "net/http" + neturl "net/url" + "testing" + + "code.gitea.io/gitea/models/db" + "code.gitea.io/gitea/models/packages" + repo_model "code.gitea.io/gitea/models/repo" + "code.gitea.io/gitea/models/unittest" + user_model "code.gitea.io/gitea/models/user" + "code.gitea.io/gitea/modules/git" + "code.gitea.io/gitea/modules/json" + cargo_module "code.gitea.io/gitea/modules/packages/cargo" + "code.gitea.io/gitea/modules/setting" + cargo_router "code.gitea.io/gitea/routers/api/packages/cargo" + cargo_service "code.gitea.io/gitea/services/packages/cargo" + "code.gitea.io/gitea/tests" + + "github.com/stretchr/testify/assert" +) + +func TestPackageCargo(t *testing.T) { + onGiteaRun(t, testPackageCargo) +} + +func testPackageCargo(t *testing.T, _ *neturl.URL) { + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ID: 2}) + + packageName := "cargo-package" + packageVersion := "1.0.3" + packageDescription := "Package Description" + packageAuthor := "KN4CK3R" + packageHomepage := "https://gitea.io/" + packageLicense := "MIT" + + createPackage := func(name, version string) io.Reader { + metadata := `{ + "name":"` + name + `", + "vers":"` + version + `", + "description":"` + packageDescription + `", + "authors": ["` + packageAuthor + `"], + "deps":[ + { + "name":"dep", + "version_req":"1.0", + "registry": "https://gitea.io/user/_cargo-index", + "kind": "normal", + "default_features": true + } + ], + "homepage":"` + packageHomepage + `", + "license":"` + packageLicense + `" +}` + + var buf bytes.Buffer + binary.Write(&buf, binary.LittleEndian, uint32(len(metadata))) + buf.WriteString(metadata) + binary.Write(&buf, binary.LittleEndian, uint32(4)) + buf.WriteString("test") + return &buf + } + + err := cargo_service.InitializeIndexRepository(db.DefaultContext, user, user) + assert.NoError(t, err) + + repo, err := repo_model.GetRepositoryByOwnerAndName(db.DefaultContext, user.Name, cargo_service.IndexRepositoryName) + assert.NotNil(t, repo) + assert.NoError(t, err) + + readGitContent := func(t *testing.T, path string) string { + gitRepo, err := git.OpenRepository(db.DefaultContext, repo.RepoPath()) + assert.NoError(t, err) + defer gitRepo.Close() + + commit, err := gitRepo.GetBranchCommit(repo.DefaultBranch) + assert.NoError(t, err) + + blob, err := commit.GetBlobByPath(path) + assert.NoError(t, err) + + content, err := blob.GetBlobContent() + assert.NoError(t, err) + + return content + } + + root := fmt.Sprintf("%sapi/packages/%s/cargo", setting.AppURL, user.Name) + url := fmt.Sprintf("%s/api/v1/crates", root) + + t.Run("Index", func(t *testing.T) { + t.Run("Config", func(t *testing.T) { + defer tests.PrintCurrentTest(t)() + + content := readGitContent(t, cargo_service.ConfigFileName) + + var config cargo_service.Config + err := json.Unmarshal([]byte(content), &config) + assert.NoError(t, err) + + assert.Equal(t, url, config.DownloadURL) + assert.Equal(t, root, config.APIURL) + }) + }) + + t.Run("Upload", func(t *testing.T) { + t.Run("InvalidNameOrVersion", func(t *testing.T) { + defer tests.PrintCurrentTest(t)() + + content := createPackage("0test", "1.0.0") + + req := NewRequestWithBody(t, "PUT", url+"/new", content) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusBadRequest) + + var status cargo_router.StatusResponse + DecodeJSON(t, resp, &status) + assert.False(t, status.OK) + + content = createPackage("test", "-1.0.0") + + req = NewRequestWithBody(t, "PUT", url+"/new", content) + req = AddBasicAuthHeader(req, user.Name) + resp = MakeRequest(t, req, http.StatusBadRequest) + + DecodeJSON(t, resp, &status) + assert.False(t, status.OK) + }) + + t.Run("InvalidContent", func(t *testing.T) { + defer tests.PrintCurrentTest(t)() + + metadata := `{"name":"test","vers":"1.0.0"}` + + var buf bytes.Buffer + binary.Write(&buf, binary.LittleEndian, uint32(len(metadata))) + buf.WriteString(metadata) + binary.Write(&buf, binary.LittleEndian, uint32(4)) + buf.WriteString("te") + + req := NewRequestWithBody(t, "PUT", url+"/new", &buf) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusBadRequest) + }) + + t.Run("Valid", func(t *testing.T) { + defer tests.PrintCurrentTest(t)() + + req := NewRequestWithBody(t, "PUT", url+"/new", createPackage(packageName, packageVersion)) + MakeRequest(t, req, http.StatusUnauthorized) + + req = NewRequestWithBody(t, "PUT", url+"/new", createPackage(packageName, packageVersion)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + var status cargo_router.StatusResponse + DecodeJSON(t, resp, &status) + assert.True(t, status.OK) + + pvs, err := packages.GetVersionsByPackageType(db.DefaultContext, user.ID, packages.TypeCargo) + assert.NoError(t, err) + assert.Len(t, pvs, 1) + + pd, err := packages.GetPackageDescriptor(db.DefaultContext, pvs[0]) + assert.NoError(t, err) + assert.NotNil(t, pd.SemVer) + assert.IsType(t, &cargo_module.Metadata{}, pd.Metadata) + assert.Equal(t, packageName, pd.Package.Name) + assert.Equal(t, packageVersion, pd.Version.Version) + + pfs, err := packages.GetFilesByVersionID(db.DefaultContext, pvs[0].ID) + assert.NoError(t, err) + assert.Len(t, pfs, 1) + assert.Equal(t, fmt.Sprintf("%s-%s.crate", packageName, packageVersion), pfs[0].Name) + assert.True(t, pfs[0].IsLead) + + pb, err := packages.GetBlobByID(db.DefaultContext, pfs[0].BlobID) + assert.NoError(t, err) + assert.EqualValues(t, 4, pb.Size) + + req = NewRequestWithBody(t, "PUT", url+"/new", createPackage(packageName, packageVersion)) + req = AddBasicAuthHeader(req, user.Name) + MakeRequest(t, req, http.StatusConflict) + + t.Run("Index", func(t *testing.T) { + t.Run("Entry", func(t *testing.T) { + defer tests.PrintCurrentTest(t)() + + content := readGitContent(t, cargo_service.BuildPackagePath(packageName)) + + var entry cargo_service.IndexVersionEntry + err := json.Unmarshal([]byte(content), &entry) + assert.NoError(t, err) + + assert.Equal(t, packageName, entry.Name) + assert.Equal(t, packageVersion, entry.Version) + assert.Equal(t, pb.HashSHA256, entry.FileChecksum) + assert.False(t, entry.Yanked) + assert.Len(t, entry.Dependencies, 1) + dep := entry.Dependencies[0] + assert.Equal(t, "dep", dep.Name) + assert.Equal(t, "1.0", dep.Req) + assert.Equal(t, "normal", dep.Kind) + assert.True(t, dep.DefaultFeatures) + assert.Empty(t, dep.Features) + assert.False(t, dep.Optional) + assert.Nil(t, dep.Target) + assert.NotNil(t, dep.Registry) + assert.Equal(t, "https://gitea.io/user/_cargo-index", *dep.Registry) + assert.Nil(t, dep.Package) + }) + + t.Run("Rebuild", func(t *testing.T) { + defer tests.PrintCurrentTest(t)() + + err := cargo_service.RebuildIndex(db.DefaultContext, user, user) + assert.NoError(t, err) + + _ = readGitContent(t, cargo_service.BuildPackagePath(packageName)) + }) + }) + }) + }) + + t.Run("Download", func(t *testing.T) { + defer tests.PrintCurrentTest(t)() + + pv, err := packages.GetVersionByNameAndVersion(db.DefaultContext, user.ID, packages.TypeCargo, packageName, packageVersion) + assert.NoError(t, err) + assert.EqualValues(t, 0, pv.DownloadCount) + + pfs, err := packages.GetFilesByVersionID(db.DefaultContext, pv.ID) + assert.NoError(t, err) + assert.Len(t, pfs, 1) + + req := NewRequest(t, "GET", fmt.Sprintf("%s/%s/%s/download", url, neturl.PathEscape(packageName), neturl.PathEscape(pv.Version))) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + assert.Equal(t, "test", resp.Body.String()) + + pv, err = packages.GetVersionByNameAndVersion(db.DefaultContext, user.ID, packages.TypeCargo, packageName, packageVersion) + assert.NoError(t, err) + assert.EqualValues(t, 1, pv.DownloadCount) + }) + + t.Run("Search", func(t *testing.T) { + defer tests.PrintCurrentTest(t)() + + cases := []struct { + Query string + Page int + PerPage int + ExpectedTotal int64 + ExpectedResults int + }{ + {"", 0, 0, 1, 1}, + {"", 1, 10, 1, 1}, + {"cargo", 1, 0, 1, 1}, + {"cargo", 1, 10, 1, 1}, + {"cargo", 2, 10, 1, 0}, + {"test", 0, 10, 0, 0}, + } + + for i, c := range cases { + req := NewRequest(t, "GET", fmt.Sprintf("%s?q=%s&page=%d&per_page=%d", url, c.Query, c.Page, c.PerPage)) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + var result cargo_router.SearchResult + DecodeJSON(t, resp, &result) + + assert.Equal(t, c.ExpectedTotal, result.Meta.Total, "case %d: unexpected total hits", i) + assert.Len(t, result.Crates, c.ExpectedResults, "case %d: unexpected result count", i) + } + }) + + t.Run("Yank", func(t *testing.T) { + defer tests.PrintCurrentTest(t)() + + req := NewRequest(t, "DELETE", fmt.Sprintf("%s/%s/%s/yank", url, neturl.PathEscape(packageName), neturl.PathEscape(packageVersion))) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + var status cargo_router.StatusResponse + DecodeJSON(t, resp, &status) + assert.True(t, status.OK) + + content := readGitContent(t, cargo_service.BuildPackagePath(packageName)) + + var entry cargo_service.IndexVersionEntry + err := json.Unmarshal([]byte(content), &entry) + assert.NoError(t, err) + + assert.True(t, entry.Yanked) + }) + + t.Run("Unyank", func(t *testing.T) { + defer tests.PrintCurrentTest(t)() + + req := NewRequest(t, "PUT", fmt.Sprintf("%s/%s/%s/unyank", url, neturl.PathEscape(packageName), neturl.PathEscape(packageVersion))) + req = AddBasicAuthHeader(req, user.Name) + resp := MakeRequest(t, req, http.StatusOK) + + var status cargo_router.StatusResponse + DecodeJSON(t, resp, &status) + assert.True(t, status.OK) + + content := readGitContent(t, cargo_service.BuildPackagePath(packageName)) + + var entry cargo_service.IndexVersionEntry + err := json.Unmarshal([]byte(content), &entry) + assert.NoError(t, err) + + assert.False(t, entry.Yanked) + }) + + t.Run("ListOwners", func(t *testing.T) { + defer tests.PrintCurrentTest(t)() + + req := NewRequest(t, "GET", fmt.Sprintf("%s/%s/owners", url, neturl.PathEscape(packageName))) + resp := MakeRequest(t, req, http.StatusOK) + + var owners cargo_router.Owners + DecodeJSON(t, resp, &owners) + + assert.Len(t, owners.Users, 1) + assert.Equal(t, user.ID, owners.Users[0].ID) + assert.Equal(t, user.Name, owners.Users[0].Login) + assert.Equal(t, user.DisplayName(), owners.Users[0].Name) + }) +} diff --git a/tests/integration/api_packages_test.go b/tests/integration/api_packages_test.go index a6c335eb7e0ed..39852e212c387 100644 --- a/tests/integration/api_packages_test.go +++ b/tests/integration/api_packages_test.go @@ -21,6 +21,7 @@ import ( "code.gitea.io/gitea/modules/setting" api "code.gitea.io/gitea/modules/structs" packages_service "code.gitea.io/gitea/services/packages" + packages_cleanup_service "code.gitea.io/gitea/services/packages/cleanup" "code.gitea.io/gitea/tests" "github.com/stretchr/testify/assert" @@ -246,7 +247,7 @@ func TestPackageCleanup(t *testing.T) { _, err = packages_model.GetInternalVersionByNameAndVersion(db.DefaultContext, 2, packages_model.TypeContainer, "test", container_model.UploadVersion) assert.NoError(t, err) - err = packages_service.Cleanup(db.DefaultContext, duration) + err = packages_cleanup_service.Cleanup(db.DefaultContext, duration) assert.NoError(t, err) pbs, err = packages_model.FindExpiredUnreferencedBlobs(db.DefaultContext, duration) @@ -383,7 +384,7 @@ func TestPackageCleanup(t *testing.T) { pcr, err := packages_model.InsertCleanupRule(db.DefaultContext, c.Rule) assert.NoError(t, err) - err = packages_service.Cleanup(db.DefaultContext, duration) + err = packages_cleanup_service.Cleanup(db.DefaultContext, duration) assert.NoError(t, err) for _, v := range c.Versions { diff --git a/tests/integration/auth_ldap_test.go b/tests/integration/auth_ldap_test.go index 88571ecd614e4..9dfa4c73d83ea 100644 --- a/tests/integration/auth_ldap_test.go +++ b/tests/integration/auth_ldap_test.go @@ -11,12 +11,14 @@ import ( "testing" "code.gitea.io/gitea/models" + auth_model "code.gitea.io/gitea/models/auth" "code.gitea.io/gitea/models/db" "code.gitea.io/gitea/models/organization" "code.gitea.io/gitea/models/unittest" user_model "code.gitea.io/gitea/models/user" "code.gitea.io/gitea/modules/translation" "code.gitea.io/gitea/services/auth" + "code.gitea.io/gitea/services/auth/source/ldap" "code.gitea.io/gitea/tests" "github.com/stretchr/testify/assert" @@ -102,13 +104,28 @@ func getLDAPServerHost() string { return host } -func addAuthSourceLDAP(t *testing.T, sshKeyAttribute string, groupMapParams ...string) { +func getLDAPServerPort() string { + port := os.Getenv("TEST_LDAP_PORT") + if len(port) == 0 { + port = "389" + } + return port +} + +func addAuthSourceLDAP(t *testing.T, sshKeyAttribute, groupFilter string, groupMapParams ...string) { groupTeamMapRemoval := "off" groupTeamMap := "" if len(groupMapParams) == 2 { groupTeamMapRemoval = groupMapParams[0] groupTeamMap = groupMapParams[1] } + + // Modify user filter to test group filter explicitly + userFilter := "(&(objectClass=inetOrgPerson)(memberOf=cn=git,ou=people,dc=planetexpress,dc=com)(uid=%s))" + if groupFilter != "" { + userFilter = "(&(objectClass=inetOrgPerson)(uid=%s))" + } + session := loginUser(t, "user1") csrf := GetCSRF(t, session, "/admin/auths/new") req := NewRequestWithValues(t, "POST", "/admin/auths/new", map[string]string{ @@ -116,11 +133,11 @@ func addAuthSourceLDAP(t *testing.T, sshKeyAttribute string, groupMapParams ...s "type": "2", "name": "ldap", "host": getLDAPServerHost(), - "port": "389", + "port": getLDAPServerPort(), "bind_dn": "uid=gitea,ou=service,dc=planetexpress,dc=com", "bind_password": "password", "user_base": "ou=people,dc=planetexpress,dc=com", - "filter": "(&(objectClass=inetOrgPerson)(memberOf=cn=git,ou=people,dc=planetexpress,dc=com)(uid=%s))", + "filter": userFilter, "admin_filter": "(memberOf=cn=admin_staff,ou=people,dc=planetexpress,dc=com)", "restricted_filter": "(uid=leela)", "attribute_username": "uid", @@ -133,6 +150,7 @@ func addAuthSourceLDAP(t *testing.T, sshKeyAttribute string, groupMapParams ...s "groups_enabled": "on", "group_dn": "ou=people,dc=planetexpress,dc=com", "group_member_uid": "member", + "group_filter": groupFilter, "group_team_map": groupTeamMap, "group_team_map_removal": groupTeamMapRemoval, "user_uid": "DN", @@ -146,7 +164,7 @@ func TestLDAPUserSignin(t *testing.T) { return } defer tests.PrepareTestEnv(t)() - addAuthSourceLDAP(t, "") + addAuthSourceLDAP(t, "", "") u := gitLDAPUsers[0] @@ -163,7 +181,7 @@ func TestLDAPUserSignin(t *testing.T) { func TestLDAPAuthChange(t *testing.T) { defer tests.PrepareTestEnv(t)() - addAuthSourceLDAP(t, "") + addAuthSourceLDAP(t, "", "") session := loginUser(t, "user1") req := NewRequest(t, "GET", "/admin/auths") @@ -221,7 +239,7 @@ func TestLDAPUserSync(t *testing.T) { return } defer tests.PrepareTestEnv(t)() - addAuthSourceLDAP(t, "") + addAuthSourceLDAP(t, "", "") auth.SyncExternalUsers(context.Background(), true) session := loginUser(t, "user1") @@ -266,13 +284,72 @@ func TestLDAPUserSync(t *testing.T) { } } +func TestLDAPUserSyncWithGroupFilter(t *testing.T) { + if skipLDAPTests() { + t.Skip() + return + } + defer tests.PrepareTestEnv(t)() + addAuthSourceLDAP(t, "", "(cn=git)") + + // Assert a user not a member of the LDAP group "cn=git" cannot login + // This test may look like TestLDAPUserSigninFailed but it is not. + // The later test uses user filter containing group membership filter (memberOf) + // This test is for the case when LDAP user records may not be linked with + // all groups the user is a member of, the user filter is modified accordingly inside + // the addAuthSourceLDAP based on the value of the groupFilter + u := otherLDAPUsers[0] + testLoginFailed(t, u.UserName, u.Password, translation.NewLocale("en-US").Tr("form.username_password_incorrect")) + + auth.SyncExternalUsers(context.Background(), true) + + // Assert members of LDAP group "cn=git" are added + for _, gitLDAPUser := range gitLDAPUsers { + unittest.BeanExists(t, &user_model.User{ + Name: gitLDAPUser.UserName, + }) + } + + // Assert everyone else is not added + for _, gitLDAPUser := range otherLDAPUsers { + unittest.AssertNotExistsBean(t, &user_model.User{ + Name: gitLDAPUser.UserName, + }) + } + + ldapSource := unittest.AssertExistsAndLoadBean(t, &auth_model.Source{ + Name: "ldap", + }) + ldapConfig := ldapSource.Cfg.(*ldap.Source) + ldapConfig.GroupFilter = "(cn=ship_crew)" + auth_model.UpdateSource(ldapSource) + + auth.SyncExternalUsers(context.Background(), true) + + for _, gitLDAPUser := range gitLDAPUsers { + if gitLDAPUser.UserName == "fry" || gitLDAPUser.UserName == "leela" || gitLDAPUser.UserName == "bender" { + // Assert members of the LDAP group "cn-ship_crew" are still active + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ + Name: gitLDAPUser.UserName, + }) + assert.True(t, user.IsActive, "User %s should be active", gitLDAPUser.UserName) + } else { + // Assert everyone else is inactive + user := unittest.AssertExistsAndLoadBean(t, &user_model.User{ + Name: gitLDAPUser.UserName, + }) + assert.False(t, user.IsActive, "User %s should be inactive", gitLDAPUser.UserName) + } + } +} + func TestLDAPUserSigninFailed(t *testing.T) { if skipLDAPTests() { t.Skip() return } defer tests.PrepareTestEnv(t)() - addAuthSourceLDAP(t, "") + addAuthSourceLDAP(t, "", "") u := otherLDAPUsers[0] testLoginFailed(t, u.UserName, u.Password, translation.NewLocale("en-US").Tr("form.username_password_incorrect")) @@ -284,7 +361,7 @@ func TestLDAPUserSSHKeySync(t *testing.T) { return } defer tests.PrepareTestEnv(t)() - addAuthSourceLDAP(t, "sshPublicKey") + addAuthSourceLDAP(t, "sshPublicKey", "") auth.SyncExternalUsers(context.Background(), true) @@ -317,7 +394,7 @@ func TestLDAPGroupTeamSyncAddMember(t *testing.T) { return } defer tests.PrepareTestEnv(t)() - addAuthSourceLDAP(t, "", "on", `{"cn=ship_crew,ou=people,dc=planetexpress,dc=com":{"org26": ["team11"]},"cn=admin_staff,ou=people,dc=planetexpress,dc=com": {"non-existent": ["non-existent"]}}`) + addAuthSourceLDAP(t, "", "", "on", `{"cn=ship_crew,ou=people,dc=planetexpress,dc=com":{"org26": ["team11"]},"cn=admin_staff,ou=people,dc=planetexpress,dc=com": {"non-existent": ["non-existent"]}}`) org, err := organization.GetOrgByName("org26") assert.NoError(t, err) team, err := organization.GetTeam(db.DefaultContext, org.ID, "team11") @@ -362,7 +439,7 @@ func TestLDAPGroupTeamSyncRemoveMember(t *testing.T) { return } defer tests.PrepareTestEnv(t)() - addAuthSourceLDAP(t, "", "on", `{"cn=dispatch,ou=people,dc=planetexpress,dc=com": {"org26": ["team11"]}}`) + addAuthSourceLDAP(t, "", "", "on", `{"cn=dispatch,ou=people,dc=planetexpress,dc=com": {"org26": ["team11"]}}`) org, err := organization.GetOrgByName("org26") assert.NoError(t, err) team, err := organization.GetTeam(db.DefaultContext, org.ID, "team11") @@ -398,7 +475,7 @@ func TestBrokenLDAPMapUserSignin(t *testing.T) { return } defer tests.PrepareTestEnv(t)() - addAuthSourceLDAP(t, "", "on", `{"NOT_A_VALID_JSON"["MISSING_DOUBLE_POINT"]}`) + addAuthSourceLDAP(t, "", "", "on", `{"NOT_A_VALID_JSON"["MISSING_DOUBLE_POINT"]}`) u := gitLDAPUsers[0] diff --git a/tests/integration/git_helper_for_declarative_test.go b/tests/integration/git_helper_for_declarative_test.go index 9e3ff9c4484d3..1e99783096ee6 100644 --- a/tests/integration/git_helper_for_declarative_test.go +++ b/tests/integration/git_helper_for_declarative_test.go @@ -154,16 +154,16 @@ func doGitAddRemote(dstPath, remoteName string, u *url.URL) func(*testing.T) { } } -func doGitPushTestRepository(dstPath string, args ...git.CmdArg) func(*testing.T) { +func doGitPushTestRepository(dstPath string, args ...string) func(*testing.T) { return func(t *testing.T) { - _, _, err := git.NewCommand(git.DefaultContext, append([]git.CmdArg{"push", "-u"}, args...)...).RunStdString(&git.RunOpts{Dir: dstPath}) + _, _, err := git.NewCommand(git.DefaultContext, "push", "-u").AddArguments(git.ToTrustedCmdArgs(args)...).RunStdString(&git.RunOpts{Dir: dstPath}) assert.NoError(t, err) } } -func doGitPushTestRepositoryFail(dstPath string, args ...git.CmdArg) func(*testing.T) { +func doGitPushTestRepositoryFail(dstPath string, args ...string) func(*testing.T) { return func(t *testing.T) { - _, _, err := git.NewCommand(git.DefaultContext, append([]git.CmdArg{"push"}, args...)...).RunStdString(&git.RunOpts{Dir: dstPath}) + _, _, err := git.NewCommand(git.DefaultContext, "push").AddArguments(git.ToTrustedCmdArgs(args)...).RunStdString(&git.RunOpts{Dir: dstPath}) assert.Error(t, err) } } @@ -175,23 +175,23 @@ func doGitCreateBranch(dstPath, branch string) func(*testing.T) { } } -func doGitCheckoutBranch(dstPath string, args ...git.CmdArg) func(*testing.T) { +func doGitCheckoutBranch(dstPath string, args ...string) func(*testing.T) { return func(t *testing.T) { - _, _, err := git.NewCommandNoGlobals(append(append(git.AllowLFSFiltersArgs(), "checkout"), args...)...).RunStdString(&git.RunOpts{Dir: dstPath}) + _, _, err := git.NewCommandContextNoGlobals(git.DefaultContext, git.AllowLFSFiltersArgs()...).AddArguments("checkout").AddArguments(git.ToTrustedCmdArgs(args)...).RunStdString(&git.RunOpts{Dir: dstPath}) assert.NoError(t, err) } } -func doGitMerge(dstPath string, args ...git.CmdArg) func(*testing.T) { +func doGitMerge(dstPath string, args ...string) func(*testing.T) { return func(t *testing.T) { - _, _, err := git.NewCommand(git.DefaultContext, append([]git.CmdArg{"merge"}, args...)...).RunStdString(&git.RunOpts{Dir: dstPath}) + _, _, err := git.NewCommand(git.DefaultContext, "merge").AddArguments(git.ToTrustedCmdArgs(args)...).RunStdString(&git.RunOpts{Dir: dstPath}) assert.NoError(t, err) } } -func doGitPull(dstPath string, args ...git.CmdArg) func(*testing.T) { +func doGitPull(dstPath string, args ...string) func(*testing.T) { return func(t *testing.T) { - _, _, err := git.NewCommandNoGlobals(append(append(git.AllowLFSFiltersArgs(), "pull"), args...)...).RunStdString(&git.RunOpts{Dir: dstPath}) + _, _, err := git.NewCommandContextNoGlobals(git.DefaultContext, git.AllowLFSFiltersArgs()...).AddArguments("pull").AddArguments(git.ToTrustedCmdArgs(args)...).RunStdString(&git.RunOpts{Dir: dstPath}) assert.NoError(t, err) } } diff --git a/tests/integration/git_test.go b/tests/integration/git_test.go index a11bad21b7b08..420a8676b9ed6 100644 --- a/tests/integration/git_test.go +++ b/tests/integration/git_test.go @@ -509,7 +509,7 @@ func doCreatePRAndSetManuallyMerged(ctx, baseCtx APITestContext, dstPath, baseBr })) t.Run("CreateHeadBranch", doGitCreateBranch(dstPath, headBranch)) - t.Run("PushToHeadBranch", doGitPushTestRepository(dstPath, "origin", git.CmdArgCheck(headBranch))) + t.Run("PushToHeadBranch", doGitPushTestRepository(dstPath, "origin", headBranch)) t.Run("CreateEmptyPullRequest", func(t *testing.T) { pr, err = doAPICreatePullRequest(ctx, baseCtx.Username, baseCtx.Reponame, baseBranch, headBranch)(t) assert.NoError(t, err) diff --git a/tests/integration/pull_merge_test.go b/tests/integration/pull_merge_test.go index e80822a7520af..090a27c39efbc 100644 --- a/tests/integration/pull_merge_test.go +++ b/tests/integration/pull_merge_test.go @@ -287,7 +287,7 @@ func TestCantMergeUnrelated(t *testing.T) { assert.NoError(t, err) sha := strings.TrimSpace(stdout.String()) - _, _, err = git.NewCommand(git.DefaultContext, "update-index", "--add", "--replace", "--cacheinfo", "100644", git.CmdArgCheck(sha), "somewher-over-the-rainbow").RunStdString(&git.RunOpts{Dir: path}) + _, _, err = git.NewCommand(git.DefaultContext, "update-index", "--add", "--replace", "--cacheinfo").AddDynamicArguments("100644", sha, "somewher-over-the-rainbow").RunStdString(&git.RunOpts{Dir: path}) assert.NoError(t, err) treeSha, _, err := git.NewCommand(git.DefaultContext, "write-tree").RunStdString(&git.RunOpts{Dir: path}) diff --git a/tests/mysql.ini.tmpl b/tests/mysql.ini.tmpl index b7ed98c34e91c..1dd7bfab2a49c 100644 --- a/tests/mysql.ini.tmpl +++ b/tests/mysql.ini.tmpl @@ -126,6 +126,7 @@ INTERNAL_TOKEN = eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJuYmYiOjE0OTU1NTE2MTh9.h ENABLED = true [email.incoming] +; temporarily disabled because the incoming mail tests are flaky due to the IMAP server (during integration tests) couldn't be not ready in time sometimes. ENABLED = false HOST = smtpimap PORT = 993 diff --git a/web_src/js/features/captcha.js b/web_src/js/features/captcha.js new file mode 100644 index 0000000000000..3da5dbda41854 --- /dev/null +++ b/web_src/js/features/captcha.js @@ -0,0 +1,51 @@ +import {isDarkTheme} from '../utils.js'; + +export async function initCaptcha() { + const captchaEl = document.querySelector('#captcha'); + if (!captchaEl) return; + + const siteKey = captchaEl.getAttribute('data-sitekey'); + const isDark = isDarkTheme(); + + const params = { + sitekey: siteKey, + theme: isDark ? 'dark' : 'light' + }; + + switch (captchaEl.getAttribute('data-captcha-type')) { + case 'g-recaptcha': { + if (window.grecaptcha) { + window.grecaptcha.ready(() => { + window.grecaptcha.render(captchaEl, params); + }); + } + break; + } + case 'cf-turnstile': { + if (window.turnstile) { + window.turnstile.render(captchaEl, params); + } + break; + } + case 'h-captcha': { + if (window.hcaptcha) { + window.hcaptcha.render(captchaEl, params); + } + break; + } + case 'm-captcha': { + const {default: mCaptcha} = await import(/* webpackChunkName: "mcaptcha-vanilla-glue" */'@mcaptcha/vanilla-glue'); + mCaptcha.INPUT_NAME = 'm-captcha-response'; + const instanceURL = captchaEl.getAttribute('data-instance-url'); + + mCaptcha.default({ + siteKey: { + instanceUrl: new URL(instanceURL), + key: siteKey, + } + }); + break; + } + default: + } +} diff --git a/web_src/js/features/mcaptcha.js b/web_src/js/features/mcaptcha.js deleted file mode 100644 index 725e2e28acf11..0000000000000 --- a/web_src/js/features/mcaptcha.js +++ /dev/null @@ -1,16 +0,0 @@ -export async function initMcaptcha() { - const mCaptchaEl = document.querySelector('.m-captcha'); - if (!mCaptchaEl) return; - - const {default: mCaptcha} = await import(/* webpackChunkName: "mcaptcha-vanilla-glue" */'@mcaptcha/vanilla-glue'); - mCaptcha.INPUT_NAME = 'm-captcha-response'; - const siteKey = mCaptchaEl.getAttribute('data-sitekey'); - const instanceURL = mCaptchaEl.getAttribute('data-instance-url'); - - mCaptcha.default({ - siteKey: { - instanceUrl: new URL(instanceURL), - key: siteKey, - } - }); -} diff --git a/web_src/js/features/repo-findfile.js b/web_src/js/features/repo-findfile.js index 7b8833e793c69..f279dfeeb73aa 100644 --- a/web_src/js/features/repo-findfile.js +++ b/web_src/js/features/repo-findfile.js @@ -72,6 +72,10 @@ export function filterRepoFilesWeighted(files, filter) { return filterResult; } +export function escapePath(s) { + return s.split('/').map(encodeURIComponent).join('/'); +} + function filterRepoFiles(filter) { const treeLink = $repoFindFileInput.attr('data-url-tree-link'); $repoFindFileTableBody.empty(); @@ -83,7 +87,7 @@ function filterRepoFiles(filter) { for (const r of filterResult) { const $row = $(tmplRow); const $a = $row.find('a'); - $a.attr('href', `${treeLink}/${r.matchResult.join('')}`); + $a.attr('href', `${treeLink}/${escapePath(r.matchResult.join(''))}`); const $octiconFile = $(svg('octicon-file')).addClass('mr-3'); $a.append($octiconFile); // if the target file path is "abc/xyz", to search "bx", then the matchResult is ['a', 'b', 'c/', 'x', 'yz'] diff --git a/web_src/js/features/repo-findfile.test.js b/web_src/js/features/repo-findfile.test.js index a90b0bf0a2cab..50321853960be 100644 --- a/web_src/js/features/repo-findfile.test.js +++ b/web_src/js/features/repo-findfile.test.js @@ -1,5 +1,5 @@ import {describe, expect, test} from 'vitest'; -import {strSubMatch, calcMatchedWeight, filterRepoFilesWeighted} from './repo-findfile.js'; +import {strSubMatch, calcMatchedWeight, filterRepoFilesWeighted, escapePath} from './repo-findfile.js'; describe('Repo Find Files', () => { test('strSubMatch', () => { @@ -32,4 +32,9 @@ describe('Repo Find Files', () => { expect(res).toHaveLength(2); expect(res[0].matchResult).toEqual(['', 'word', '.txt']); }); + + test('escapePath', () => { + expect(escapePath('a/b/c')).toEqual('a/b/c'); + expect(escapePath('a/b/ c')).toEqual('a/b/%20c'); + }); }); diff --git a/web_src/js/index.js b/web_src/js/index.js index 14483f3fa2541..74d80776b5ad0 100644 --- a/web_src/js/index.js +++ b/web_src/js/index.js @@ -88,8 +88,8 @@ import {initCommonOrganization} from './features/common-organization.js'; import {initRepoWikiForm} from './features/repo-wiki.js'; import {initRepoCommentForm, initRepository} from './features/repo-legacy.js'; import {initFormattingReplacements} from './features/formatting.js'; -import {initMcaptcha} from './features/mcaptcha.js'; import {initCopyContent} from './features/copycontent.js'; +import {initCaptcha} from './features/captcha.js'; import {initRepositoryActionView} from './components/RepoActionView.vue'; // Run time-critical code as soon as possible. This is safe to do because this @@ -191,7 +191,7 @@ $(document).ready(() => { initRepositoryActionView(); initCommitStatuses(); - initMcaptcha(); + initCaptcha(); initUserAuthLinkAccountView(); initUserAuthOauth2(); diff --git a/web_src/less/_base.less b/web_src/less/_base.less index 26fc83785b79f..e58bf53f5df9f 100644 --- a/web_src/less/_base.less +++ b/web_src/less/_base.less @@ -2200,9 +2200,7 @@ a.ui.label:hover { border: 1px solid var(--color-light-border); color: var(--color-text); } -.ui.tertiary.button { - border: none; -} + .page-content .ui.button { box-shadow: none !important; } @@ -2319,6 +2317,15 @@ a.ui.label:hover { color: var(--color-secondary-dark-8) !important; } +.ui.tertiary.button { + color: var(--color-text-light); + border: none; +} + +.ui.tertiary.button:hover { + color: var(--color-text); +} + .ui.primary.label, .ui.primary.labels .label { background-color: var(--color-primary) !important; diff --git a/web_src/less/_form.less b/web_src/less/_form.less index 3d2ec9fb8a9ee..1a1c1678f8b3a 100644 --- a/web_src/less/_form.less +++ b/web_src/less/_form.less @@ -220,18 +220,24 @@ textarea:focus, } @media @mediaMdAndUp { - .g-recaptcha, - .h-captcha { + .g-recaptcha-style, + .h-captcha-style { margin: 0 auto !important; width: 304px; padding-left: 30px; + + iframe { + border-radius: 5px !important; + width: 302px !important; + height: 76px !important; + } } } @media (max-height: 575px) { #rc-imageselect, - .g-recaptcha, - .h-captcha { + .g-recaptcha-style, + .h-captcha-style { transform: scale(.77); transform-origin: 0 0; } diff --git a/web_src/svg/gitea-cargo.svg b/web_src/svg/gitea-cargo.svg new file mode 100644 index 0000000000000..dbec107ad0dcf --- /dev/null +++ b/web_src/svg/gitea-cargo.svg @@ -0,0 +1,3 @@ + + + \ No newline at end of file