2014-03-23 01:50:50 +08:00
|
|
|
// Copyright 2014 The Gogs Authors. All rights reserved.
|
2018-11-28 19:26:14 +08:00
|
|
|
// Copyright 2018 The Gitea Authors. All rights reserved.
|
2022-11-28 02:20:29 +08:00
|
|
|
// SPDX-License-Identifier: MIT
|
2014-03-23 01:50:50 +08:00
|
|
|
|
|
|
|
package repo
|
|
|
|
|
2014-07-26 14:28:04 +08:00
|
|
|
import (
|
|
|
|
"errors"
|
|
|
|
"fmt"
|
2024-03-02 23:05:07 +08:00
|
|
|
"html/template"
|
2018-07-18 05:23:58 +08:00
|
|
|
"net/http"
|
2021-11-17 02:18:25 +08:00
|
|
|
"net/url"
|
2017-03-15 09:10:35 +08:00
|
|
|
"strconv"
|
2014-07-26 14:28:04 +08:00
|
|
|
"strings"
|
|
|
|
|
2021-09-24 19:32:56 +08:00
|
|
|
"code.gitea.io/gitea/models/db"
|
2022-03-31 17:20:39 +08:00
|
|
|
issues_model "code.gitea.io/gitea/models/issues"
|
2022-03-29 14:29:02 +08:00
|
|
|
"code.gitea.io/gitea/models/organization"
|
2022-05-11 18:09:36 +08:00
|
|
|
access_model "code.gitea.io/gitea/models/perm/access"
|
2022-03-29 22:16:31 +08:00
|
|
|
project_model "code.gitea.io/gitea/models/project"
|
2021-11-19 21:39:57 +08:00
|
|
|
repo_model "code.gitea.io/gitea/models/repo"
|
2021-11-10 03:57:58 +08:00
|
|
|
"code.gitea.io/gitea/models/unit"
|
2021-11-24 17:49:20 +08:00
|
|
|
user_model "code.gitea.io/gitea/models/user"
|
2016-11-11 00:24:48 +08:00
|
|
|
"code.gitea.io/gitea/modules/base"
|
|
|
|
"code.gitea.io/gitea/modules/log"
|
2019-12-07 12:21:18 +08:00
|
|
|
"code.gitea.io/gitea/modules/markup"
|
2017-09-21 13:20:14 +08:00
|
|
|
"code.gitea.io/gitea/modules/markup/markdown"
|
2024-03-01 02:52:49 +08:00
|
|
|
"code.gitea.io/gitea/modules/optional"
|
2019-06-06 08:37:45 +08:00
|
|
|
api "code.gitea.io/gitea/modules/structs"
|
2024-03-01 15:11:51 +08:00
|
|
|
"code.gitea.io/gitea/modules/templates"
|
2017-01-25 10:43:02 +08:00
|
|
|
"code.gitea.io/gitea/modules/util"
|
2021-01-26 23:36:53 +08:00
|
|
|
"code.gitea.io/gitea/modules/web"
|
2024-11-05 15:46:40 +08:00
|
|
|
"code.gitea.io/gitea/routers/common"
|
2024-02-27 15:12:22 +08:00
|
|
|
"code.gitea.io/gitea/services/context"
|
2022-12-29 10:57:15 +08:00
|
|
|
"code.gitea.io/gitea/services/convert"
|
2021-04-07 03:44:05 +08:00
|
|
|
"code.gitea.io/gitea/services/forms"
|
2019-09-30 21:50:44 +08:00
|
|
|
issue_service "code.gitea.io/gitea/services/issue"
|
2014-07-26 14:28:04 +08:00
|
|
|
)
|
|
|
|
|
|
|
|
const (
|
2019-10-15 20:19:32 +08:00
|
|
|
tplAttachment base.TplName = "repo/issue/view_content/attachments"
|
|
|
|
|
2020-09-11 22:48:39 +08:00
|
|
|
tplIssues base.TplName = "repo/issue/list"
|
|
|
|
tplIssueNew base.TplName = "repo/issue/new"
|
|
|
|
tplIssueChoose base.TplName = "repo/issue/choose"
|
|
|
|
tplIssueView base.TplName = "repo/issue/view"
|
2014-07-26 14:28:04 +08:00
|
|
|
|
2017-12-04 07:14:26 +08:00
|
|
|
tplReactions base.TplName = "repo/issue/view_content/reactions"
|
|
|
|
|
2020-09-11 22:48:39 +08:00
|
|
|
issueTemplateKey = "IssueTemplate"
|
|
|
|
issueTemplateTitleKey = "IssueTemplateTitle"
|
2014-07-26 14:28:04 +08:00
|
|
|
)
|
|
|
|
|
2022-01-21 01:46:10 +08:00
|
|
|
// IssueTemplateCandidates issue templates
|
|
|
|
var IssueTemplateCandidates = []string{
|
|
|
|
"ISSUE_TEMPLATE.md",
|
2022-09-02 15:58:49 +08:00
|
|
|
"ISSUE_TEMPLATE.yaml",
|
|
|
|
"ISSUE_TEMPLATE.yml",
|
2022-01-21 01:46:10 +08:00
|
|
|
"issue_template.md",
|
2022-09-02 15:58:49 +08:00
|
|
|
"issue_template.yaml",
|
|
|
|
"issue_template.yml",
|
2022-01-21 01:46:10 +08:00
|
|
|
".gitea/ISSUE_TEMPLATE.md",
|
2022-09-02 15:58:49 +08:00
|
|
|
".gitea/ISSUE_TEMPLATE.yaml",
|
|
|
|
".gitea/ISSUE_TEMPLATE.yml",
|
|
|
|
".gitea/issue_template.md",
|
|
|
|
".gitea/issue_template.yaml",
|
2022-09-09 11:22:33 +08:00
|
|
|
".gitea/issue_template.yml",
|
2022-01-21 01:46:10 +08:00
|
|
|
".github/ISSUE_TEMPLATE.md",
|
2022-09-02 15:58:49 +08:00
|
|
|
".github/ISSUE_TEMPLATE.yaml",
|
|
|
|
".github/ISSUE_TEMPLATE.yml",
|
2022-01-21 01:46:10 +08:00
|
|
|
".github/issue_template.md",
|
2022-09-02 15:58:49 +08:00
|
|
|
".github/issue_template.yaml",
|
|
|
|
".github/issue_template.yml",
|
2022-01-21 01:46:10 +08:00
|
|
|
}
|
2014-07-26 14:28:04 +08:00
|
|
|
|
2019-02-19 04:55:04 +08:00
|
|
|
// MustAllowUserComment checks to make sure if an issue is locked.
|
|
|
|
// If locked and user has permissions to write to the repository,
|
|
|
|
// then the comment is allowed, else it is blocked
|
|
|
|
func MustAllowUserComment(ctx *context.Context) {
|
|
|
|
issue := GetActionIssue(ctx)
|
|
|
|
if ctx.Written() {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-03-22 15:03:22 +08:00
|
|
|
if issue.IsLocked && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) && !ctx.Doer.IsAdmin {
|
2019-02-19 04:55:04 +08:00
|
|
|
ctx.Flash.Error(ctx.Tr("repo.issues.comment_on_locked"))
|
2023-02-11 14:34:11 +08:00
|
|
|
ctx.Redirect(issue.Link())
|
2019-02-19 04:55:04 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2016-11-24 15:04:31 +08:00
|
|
|
// MustEnableIssues check if repository enable internal issues
|
2016-03-12 00:56:52 +08:00
|
|
|
func MustEnableIssues(ctx *context.Context) {
|
2021-11-10 03:57:58 +08:00
|
|
|
if !ctx.Repo.CanRead(unit.TypeIssues) &&
|
|
|
|
!ctx.Repo.CanRead(unit.TypeExternalTracker) {
|
2018-01-11 05:34:17 +08:00
|
|
|
ctx.NotFound("MustEnableIssues", nil)
|
2016-03-07 12:57:46 +08:00
|
|
|
return
|
2015-12-05 10:30:33 +08:00
|
|
|
}
|
2016-11-04 16:06:54 +08:00
|
|
|
|
2022-12-10 10:46:31 +08:00
|
|
|
unit, err := ctx.Repo.Repository.GetUnit(ctx, unit.TypeExternalTracker)
|
2017-02-04 23:53:46 +08:00
|
|
|
if err == nil {
|
|
|
|
ctx.Redirect(unit.ExternalTrackerConfig().ExternalTrackerURL)
|
2016-11-04 16:06:54 +08:00
|
|
|
return
|
|
|
|
}
|
2015-12-05 10:30:33 +08:00
|
|
|
}
|
|
|
|
|
2018-11-28 19:26:14 +08:00
|
|
|
// MustAllowPulls check if repository enable pull requests and user have right to do that
|
2016-03-12 00:56:52 +08:00
|
|
|
func MustAllowPulls(ctx *context.Context) {
|
2021-11-10 03:57:58 +08:00
|
|
|
if !ctx.Repo.Repository.CanEnablePulls() || !ctx.Repo.CanRead(unit.TypePullRequests) {
|
2018-01-11 05:34:17 +08:00
|
|
|
ctx.NotFound("MustAllowPulls", nil)
|
2016-03-07 12:57:46 +08:00
|
|
|
return
|
2015-12-05 10:30:33 +08:00
|
|
|
}
|
2015-12-20 11:07:06 +08:00
|
|
|
|
2016-03-07 12:57:46 +08:00
|
|
|
// User can send pull request if owns a forked repository.
|
2023-09-15 01:09:32 +08:00
|
|
|
if ctx.IsSigned && repo_model.HasForkedRepo(ctx, ctx.Doer.ID, ctx.Repo.Repository.ID) {
|
2016-03-07 12:57:46 +08:00
|
|
|
ctx.Repo.PullRequest.Allowed = true
|
2022-03-22 15:03:22 +08:00
|
|
|
ctx.Repo.PullRequest.HeadInfoSubURL = url.PathEscape(ctx.Doer.Name) + ":" + util.PathEscapeSegments(ctx.Repo.BranchName)
|
2016-03-07 12:57:46 +08:00
|
|
|
}
|
2015-12-05 10:30:33 +08:00
|
|
|
}
|
|
|
|
|
2024-11-11 04:07:54 +08:00
|
|
|
func retrieveProjectsInternal(ctx *context.Context, repo *repo_model.Repository) (open, closed []*project_model.Project) {
|
2023-07-29 23:35:53 +08:00
|
|
|
// Distinguish whether the owner of the repository
|
|
|
|
// is an individual or an organization
|
|
|
|
repoOwnerType := project_model.TypeIndividual
|
|
|
|
if repo.Owner.IsOrganization() {
|
|
|
|
repoOwnerType = project_model.TypeOrganization
|
|
|
|
}
|
2023-01-20 19:42:33 +08:00
|
|
|
|
2024-03-04 10:56:52 +08:00
|
|
|
projectsUnit := repo.MustGetUnit(ctx, unit.TypeProjects)
|
2020-08-17 11:07:38 +08:00
|
|
|
|
2024-03-04 10:56:52 +08:00
|
|
|
var openProjects []*project_model.Project
|
|
|
|
var closedProjects []*project_model.Project
|
|
|
|
var err error
|
|
|
|
|
|
|
|
if projectsUnit.ProjectsConfig().IsProjectsAllowed(repo_model.ProjectsModeRepo) {
|
|
|
|
openProjects, err = db.Find[project_model.Project](ctx, project_model.SearchOptions{
|
|
|
|
ListOptions: db.ListOptionsAll,
|
|
|
|
RepoID: repo.ID,
|
|
|
|
IsClosed: optional.Some(false),
|
|
|
|
Type: project_model.TypeRepository,
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetProjects", err)
|
2024-11-11 04:07:54 +08:00
|
|
|
return nil, nil
|
2024-03-04 10:56:52 +08:00
|
|
|
}
|
|
|
|
closedProjects, err = db.Find[project_model.Project](ctx, project_model.SearchOptions{
|
|
|
|
ListOptions: db.ListOptionsAll,
|
|
|
|
RepoID: repo.ID,
|
|
|
|
IsClosed: optional.Some(true),
|
|
|
|
Type: project_model.TypeRepository,
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetProjects", err)
|
2024-11-11 04:07:54 +08:00
|
|
|
return nil, nil
|
2024-03-04 10:56:52 +08:00
|
|
|
}
|
2020-08-17 11:07:38 +08:00
|
|
|
}
|
2024-03-04 10:56:52 +08:00
|
|
|
|
|
|
|
if projectsUnit.ProjectsConfig().IsProjectsAllowed(repo_model.ProjectsModeOwner) {
|
|
|
|
openProjects2, err := db.Find[project_model.Project](ctx, project_model.SearchOptions{
|
|
|
|
ListOptions: db.ListOptionsAll,
|
|
|
|
OwnerID: repo.OwnerID,
|
|
|
|
IsClosed: optional.Some(false),
|
|
|
|
Type: repoOwnerType,
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetProjects", err)
|
2024-11-11 04:07:54 +08:00
|
|
|
return nil, nil
|
2024-03-04 10:56:52 +08:00
|
|
|
}
|
|
|
|
openProjects = append(openProjects, openProjects2...)
|
|
|
|
closedProjects2, err := db.Find[project_model.Project](ctx, project_model.SearchOptions{
|
|
|
|
ListOptions: db.ListOptionsAll,
|
|
|
|
OwnerID: repo.OwnerID,
|
|
|
|
IsClosed: optional.Some(true),
|
|
|
|
Type: repoOwnerType,
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetProjects", err)
|
2024-11-11 04:07:54 +08:00
|
|
|
return nil, nil
|
2024-03-04 10:56:52 +08:00
|
|
|
}
|
|
|
|
closedProjects = append(closedProjects, closedProjects2...)
|
2023-01-20 19:42:33 +08:00
|
|
|
}
|
2024-11-11 04:07:54 +08:00
|
|
|
return openProjects, closedProjects
|
2020-08-17 11:07:38 +08:00
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
// GetActionIssue will return the issue which is used in the context.
|
|
|
|
func GetActionIssue(ctx *context.Context) *issues_model.Issue {
|
|
|
|
issue, err := issues_model.GetIssueByIndex(ctx, ctx.Repo.Repository.ID, ctx.PathParamInt64(":index"))
|
|
|
|
if err != nil {
|
|
|
|
ctx.NotFoundOrServerError("GetIssueByIndex", issues_model.IsErrIssueNotExist, err)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
issue.Repo = ctx.Repo.Repository
|
|
|
|
checkIssueRights(ctx, issue)
|
|
|
|
if ctx.Written() {
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
if err = issue.LoadAttributes(ctx); err != nil {
|
|
|
|
ctx.ServerError("LoadAttributes", err)
|
|
|
|
return nil
|
|
|
|
}
|
|
|
|
return issue
|
2020-10-13 03:55:13 +08:00
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
func checkIssueRights(ctx *context.Context, issue *issues_model.Issue) {
|
|
|
|
if issue.IsPull && !ctx.Repo.CanRead(unit.TypePullRequests) ||
|
|
|
|
!issue.IsPull && !ctx.Repo.CanRead(unit.TypeIssues) {
|
|
|
|
ctx.NotFound("IssueOrPullRequestUnitNotAllowed", nil)
|
|
|
|
}
|
2024-11-09 12:48:31 +08:00
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
func getActionIssues(ctx *context.Context) issues_model.IssueList {
|
|
|
|
commaSeparatedIssueIDs := ctx.FormString("issue_ids")
|
|
|
|
if len(commaSeparatedIssueIDs) == 0 {
|
|
|
|
return nil
|
2024-11-11 04:07:54 +08:00
|
|
|
}
|
2024-11-11 12:28:54 +08:00
|
|
|
issueIDs := make([]int64, 0, 10)
|
|
|
|
for _, stringIssueID := range strings.Split(commaSeparatedIssueIDs, ",") {
|
|
|
|
issueID, err := strconv.ParseInt(stringIssueID, 10, 64)
|
2024-11-09 12:48:31 +08:00
|
|
|
if err != nil {
|
2024-11-11 12:28:54 +08:00
|
|
|
ctx.ServerError("ParseInt", err)
|
|
|
|
return nil
|
2024-11-09 12:48:31 +08:00
|
|
|
}
|
2024-11-11 12:28:54 +08:00
|
|
|
issueIDs = append(issueIDs, issueID)
|
2020-04-07 00:33:34 +08:00
|
|
|
}
|
2024-11-11 12:28:54 +08:00
|
|
|
issues, err := issues_model.GetIssuesByIDs(ctx, issueIDs)
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetIssuesByIDs", err)
|
|
|
|
return nil
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 14:28:53 +08:00
|
|
|
}
|
2024-11-11 12:28:54 +08:00
|
|
|
// Check access rights for all issues
|
|
|
|
issueUnitEnabled := ctx.Repo.CanRead(unit.TypeIssues)
|
|
|
|
prUnitEnabled := ctx.Repo.CanRead(unit.TypePullRequests)
|
|
|
|
for _, issue := range issues {
|
|
|
|
if issue.RepoID != ctx.Repo.Repository.ID {
|
|
|
|
ctx.NotFound("some issue's RepoID is incorrect", errors.New("some issue's RepoID is incorrect"))
|
|
|
|
return nil
|
2022-04-08 02:59:56 +08:00
|
|
|
}
|
2024-11-11 12:28:54 +08:00
|
|
|
if issue.IsPull && !prUnitEnabled || !issue.IsPull && !issueUnitEnabled {
|
|
|
|
ctx.NotFound("IssueOrPullRequestUnitNotAllowed", nil)
|
|
|
|
return nil
|
2022-04-08 02:59:56 +08:00
|
|
|
}
|
2024-11-11 12:28:54 +08:00
|
|
|
if err = issue.LoadAttributes(ctx); err != nil {
|
|
|
|
ctx.ServerError("LoadAttributes", err)
|
|
|
|
return nil
|
2023-02-25 10:55:50 +08:00
|
|
|
}
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 14:28:53 +08:00
|
|
|
}
|
2024-11-11 12:28:54 +08:00
|
|
|
return issues
|
2022-04-08 02:59:56 +08:00
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
// GetIssueInfo get an issue of a repository
|
|
|
|
func GetIssueInfo(ctx *context.Context) {
|
|
|
|
issue, err := issues_model.GetIssueWithAttrsByIndex(ctx, ctx.Repo.Repository.ID, ctx.PathParamInt64(":index"))
|
2022-04-08 02:59:56 +08:00
|
|
|
if err != nil {
|
2024-11-11 12:28:54 +08:00
|
|
|
if issues_model.IsErrIssueNotExist(err) {
|
|
|
|
ctx.Error(http.StatusNotFound)
|
|
|
|
} else {
|
|
|
|
ctx.Error(http.StatusInternalServerError, "GetIssueByIndex", err.Error())
|
|
|
|
}
|
2022-04-08 02:59:56 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
if issue.IsPull {
|
|
|
|
// Need to check if Pulls are enabled and we can read Pulls
|
|
|
|
if !ctx.Repo.Repository.CanEnablePulls() || !ctx.Repo.CanRead(unit.TypePullRequests) {
|
|
|
|
ctx.Error(http.StatusNotFound)
|
2022-04-08 02:59:56 +08:00
|
|
|
return
|
|
|
|
}
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 14:28:53 +08:00
|
|
|
} else {
|
2024-11-11 12:28:54 +08:00
|
|
|
// Need to check if Issues are enabled and we can read Issues
|
|
|
|
if !ctx.Repo.CanRead(unit.TypeIssues) {
|
|
|
|
ctx.Error(http.StatusNotFound)
|
|
|
|
return
|
2022-04-08 02:59:56 +08:00
|
|
|
}
|
Refactor and enhance issue indexer to support both searching, filtering and paging (#26012)
Fix #24662.
Replace #24822 and #25708 (although it has been merged)
## Background
In the past, Gitea supported issue searching with a keyword and
conditions in a less efficient way. It worked by searching for issues
with the keyword and obtaining limited IDs (as it is heavy to get all)
on the indexer (bleve/elasticsearch/meilisearch), and then querying with
conditions on the database to find a subset of the found IDs. This is
why the results could be incomplete.
To solve this issue, we need to store all fields that could be used as
conditions in the indexer and support both keyword and additional
conditions when searching with the indexer.
## Major changes
- Redefine `IndexerData` to include all fields that could be used as
filter conditions.
- Refactor `Search(ctx context.Context, kw string, repoIDs []int64,
limit, start int, state string)` to `Search(ctx context.Context, options
*SearchOptions)`, so it supports more conditions now.
- Change the data type stored in `issueIndexerQueue`. Use
`IndexerMetadata` instead of `IndexerData` in case the data has been
updated while it is in the queue. This also reduces the storage size of
the queue.
- Enhance searching with Bleve/Elasticsearch/Meilisearch, make them
fully support `SearchOptions`. Also, update the data versions.
- Keep most logic of database indexer, but remove
`issues.SearchIssueIDsByKeyword` in `models` to avoid confusion where is
the entry point to search issues.
- Start a Meilisearch instance to test it in unit tests.
- Add unit tests with almost full coverage to test
Bleve/Elasticsearch/Meilisearch indexer.
---------
Co-authored-by: Lunny Xiao <xiaolunwen@gmail.com>
2023-07-31 14:28:53 +08:00
|
|
|
}
|
2022-04-08 02:59:56 +08:00
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
ctx.JSON(http.StatusOK, map[string]any{
|
|
|
|
"convertedIssue": convert.ToIssue(ctx, ctx.Doer, issue),
|
|
|
|
"renderedLabels": templates.NewRenderUtils(ctx).RenderLabels(issue.Labels, ctx.Repo.RepoLink, issue),
|
|
|
|
})
|
2022-04-08 02:59:56 +08:00
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
// UpdateIssueTitle change issue's title
|
|
|
|
func UpdateIssueTitle(ctx *context.Context) {
|
|
|
|
issue := GetActionIssue(ctx)
|
2023-06-19 15:46:50 +08:00
|
|
|
if ctx.Written() {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
if !ctx.IsSigned || (!issue.IsPoster(ctx.Doer.ID) && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) {
|
|
|
|
ctx.Error(http.StatusForbidden)
|
2014-07-26 14:28:04 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
title := ctx.FormTrim("title")
|
|
|
|
if len(title) == 0 {
|
|
|
|
ctx.Error(http.StatusNoContent)
|
2017-03-15 09:10:35 +08:00
|
|
|
return
|
|
|
|
}
|
2024-11-11 12:28:54 +08:00
|
|
|
|
|
|
|
if err := issue_service.ChangeTitle(ctx, issue, ctx.Doer, title); err != nil {
|
|
|
|
ctx.ServerError("ChangeTitle", err)
|
2023-07-16 06:10:49 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
ctx.JSON(http.StatusOK, map[string]any{
|
|
|
|
"title": issue.Title,
|
|
|
|
})
|
2014-07-26 14:28:04 +08:00
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
// UpdateIssueRef change issue's ref (branch)
|
|
|
|
func UpdateIssueRef(ctx *context.Context) {
|
2017-10-16 15:55:43 +08:00
|
|
|
issue := GetActionIssue(ctx)
|
|
|
|
if ctx.Written() {
|
2014-07-26 14:28:04 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
if !ctx.IsSigned || (!issue.IsPoster(ctx.Doer.ID) && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) || issue.IsPull {
|
2021-04-05 23:30:52 +08:00
|
|
|
ctx.Error(http.StatusForbidden)
|
2020-01-20 20:00:32 +08:00
|
|
|
return
|
2019-02-19 04:55:04 +08:00
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
ref := ctx.FormTrim("ref")
|
2014-07-26 14:28:04 +08:00
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
if err := issue_service.ChangeIssueRef(ctx, issue, ctx.Doer, ref); err != nil {
|
|
|
|
ctx.ServerError("ChangeRef", err)
|
2014-07-26 14:28:04 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
ctx.JSON(http.StatusOK, map[string]any{
|
|
|
|
"ref": ref,
|
|
|
|
})
|
2014-07-26 14:28:04 +08:00
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
// UpdateIssueContent change issue's content
|
|
|
|
func UpdateIssueContent(ctx *context.Context) {
|
|
|
|
issue := GetActionIssue(ctx)
|
|
|
|
if ctx.Written() {
|
2023-11-26 01:21:21 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
if !ctx.IsSigned || (ctx.Doer.ID != issue.PosterID && !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull)) {
|
2021-04-05 23:30:52 +08:00
|
|
|
ctx.Error(http.StatusForbidden)
|
2015-08-20 04:31:28 +08:00
|
|
|
return
|
2022-01-19 01:28:38 +08:00
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
if err := issue_service.ChangeContent(ctx, issue, ctx.Doer, ctx.Req.FormValue("content"), ctx.FormInt("content_version")); err != nil {
|
2024-03-04 16:16:03 +08:00
|
|
|
if errors.Is(err, user_model.ErrBlockedUser) {
|
2024-11-11 12:28:54 +08:00
|
|
|
ctx.JSONError(ctx.Tr("repo.issues.edit.blocked_user"))
|
|
|
|
} else if errors.Is(err, issues_model.ErrIssueAlreadyChanged) {
|
|
|
|
if issue.IsPull {
|
|
|
|
ctx.JSONError(ctx.Tr("repo.pulls.edit.already_changed"))
|
|
|
|
} else {
|
|
|
|
ctx.JSONError(ctx.Tr("repo.issues.edit.already_changed"))
|
|
|
|
}
|
2024-03-04 16:16:03 +08:00
|
|
|
} else {
|
2024-11-11 12:28:54 +08:00
|
|
|
ctx.ServerError("ChangeContent", err)
|
2024-03-04 16:16:03 +08:00
|
|
|
}
|
2015-08-20 04:31:28 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
// when update the request doesn't intend to update attachments (eg: change checkbox state), ignore attachment updates
|
2021-08-21 21:04:47 +08:00
|
|
|
if !ctx.FormBool("ignore_attachments") {
|
2024-11-11 12:28:54 +08:00
|
|
|
if err := updateAttachments(ctx, issue, ctx.FormStrings("files[]")); err != nil {
|
2021-08-21 21:04:47 +08:00
|
|
|
ctx.ServerError("UpdateAttachments", err)
|
|
|
|
return
|
|
|
|
}
|
2021-04-20 06:25:08 +08:00
|
|
|
}
|
|
|
|
|
2024-11-22 13:48:09 +08:00
|
|
|
content, err := markdown.RenderString(markup.NewRenderContext(ctx).
|
|
|
|
WithLinks(markup.Links{Base: ctx.FormString("context")}).
|
|
|
|
WithMetas(ctx.Repo.Repository.ComposeMetas(ctx)).
|
|
|
|
WithGitRepo(ctx.Repo.GitRepo).
|
|
|
|
WithRepoFacade(ctx.Repo.Repository),
|
|
|
|
issue.Content)
|
2024-11-11 12:28:54 +08:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("RenderString", err)
|
|
|
|
return
|
2019-10-15 20:19:32 +08:00
|
|
|
}
|
|
|
|
|
2023-07-05 02:36:08 +08:00
|
|
|
ctx.JSON(http.StatusOK, map[string]any{
|
2024-11-11 12:28:54 +08:00
|
|
|
"content": content,
|
|
|
|
"contentVersion": issue.ContentVersion,
|
|
|
|
"attachments": attachmentsHTML(ctx, issue.Attachments, issue.Content),
|
2015-08-20 04:31:28 +08:00
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
// UpdateIssueDeadline updates an issue deadline
|
|
|
|
func UpdateIssueDeadline(ctx *context.Context) {
|
|
|
|
issue, err := issues_model.GetIssueByIndex(ctx, ctx.Repo.Repository.ID, ctx.PathParamInt64(":index"))
|
2016-07-26 02:48:17 +08:00
|
|
|
if err != nil {
|
2024-11-11 12:28:54 +08:00
|
|
|
if issues_model.IsErrIssueNotExist(err) {
|
|
|
|
ctx.NotFound("GetIssueByIndex", err)
|
|
|
|
} else {
|
|
|
|
ctx.Error(http.StatusInternalServerError, "GetIssueByIndex", err.Error())
|
|
|
|
}
|
2016-07-26 02:48:17 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
if !ctx.Repo.CanWriteIssuesOrPulls(issue.IsPull) {
|
|
|
|
ctx.Error(http.StatusForbidden, "", "Not repo writer")
|
2018-11-28 19:26:14 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
deadlineUnix, _ := common.ParseDeadlineDateToEndOfDay(ctx.FormString("deadline"))
|
|
|
|
if err := issues_model.UpdateIssueDeadline(ctx, issue, deadlineUnix, ctx.Doer); err != nil {
|
|
|
|
ctx.Error(http.StatusInternalServerError, "UpdateIssueDeadline", err.Error())
|
2023-11-26 01:21:21 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
ctx.JSONRedirect("")
|
|
|
|
}
|
|
|
|
|
|
|
|
// UpdateIssueMilestone change issue's milestone
|
|
|
|
func UpdateIssueMilestone(ctx *context.Context) {
|
|
|
|
issues := getActionIssues(ctx)
|
|
|
|
if ctx.Written() {
|
2016-07-26 02:48:17 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
milestoneID := ctx.FormInt64("id")
|
|
|
|
for _, issue := range issues {
|
|
|
|
oldMilestoneID := issue.MilestoneID
|
|
|
|
if oldMilestoneID == milestoneID {
|
|
|
|
continue
|
|
|
|
}
|
|
|
|
issue.MilestoneID = milestoneID
|
|
|
|
if err := issue_service.ChangeMilestoneAssign(ctx, issue, ctx.Doer, oldMilestoneID); err != nil {
|
|
|
|
ctx.ServerError("ChangeMilestoneAssign", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
ctx.JSONOK()
|
|
|
|
}
|
|
|
|
|
|
|
|
// UpdateIssueAssignee change issue's or pull's assignee
|
|
|
|
func UpdateIssueAssignee(ctx *context.Context) {
|
|
|
|
issues := getActionIssues(ctx)
|
|
|
|
if ctx.Written() {
|
2016-07-26 02:48:17 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-11-11 12:28:54 +08:00
|
|
|
assigneeID := ctx.FormInt64("id")
|
|
|
|
action := ctx.FormString("action")
|
|
|
|
|
|
|
|
for _, issue := range issues {
|
|
|
|
switch action {
|
|
|
|
case "clear":
|
|
|
|
if err := issue_service.DeleteNotPassedAssignee(ctx, issue, ctx.Doer, []*user_model.User{}); err != nil {
|
|
|
|
ctx.ServerError("ClearAssignees", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
default:
|
|
|
|
assignee, err := user_model.GetUserByID(ctx, assigneeID)
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetUserByID", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
valid, err := access_model.CanBeAssigned(ctx, assignee, issue.Repo, issue.IsPull)
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("canBeAssigned", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
if !valid {
|
|
|
|
ctx.ServerError("canBeAssigned", repo_model.ErrUserDoesNotHaveAccessToRepo{UserID: assigneeID, RepoName: issue.Repo.Name})
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
_, _, err = issue_service.ToggleAssigneeWithNotify(ctx, issue, ctx.Doer, assigneeID)
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("ToggleAssignee", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
}
|
|
|
|
ctx.JSONOK()
|
2016-07-26 02:48:17 +08:00
|
|
|
}
|
|
|
|
|
2017-12-04 07:14:26 +08:00
|
|
|
// ChangeIssueReaction create a reaction for issue
|
2021-01-26 23:36:53 +08:00
|
|
|
func ChangeIssueReaction(ctx *context.Context) {
|
2021-04-07 03:44:05 +08:00
|
|
|
form := web.GetForm(ctx).(*forms.ReactionForm)
|
2017-12-04 07:14:26 +08:00
|
|
|
issue := GetActionIssue(ctx)
|
|
|
|
if ctx.Written() {
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2022-03-22 15:03:22 +08:00
|
|
|
if !ctx.IsSigned || (ctx.Doer.ID != issue.PosterID && !ctx.Repo.CanReadIssuesOrPulls(issue.IsPull)) {
|
2019-04-23 04:40:51 +08:00
|
|
|
if log.IsTrace() {
|
|
|
|
if ctx.IsSigned {
|
|
|
|
issueType := "issues"
|
|
|
|
if issue.IsPull {
|
|
|
|
issueType = "pulls"
|
|
|
|
}
|
|
|
|
log.Trace("Permission Denied: User %-v not the Poster (ID: %d) and cannot read %s in Repo %-v.\n"+
|
|
|
|
"User in Repo has Permissions: %-+v",
|
2022-03-22 15:03:22 +08:00
|
|
|
ctx.Doer,
|
Rewrite logger system (#24726)
## ⚠️ Breaking
The `log.<mode>.<logger>` style config has been dropped. If you used it,
please check the new config manual & app.example.ini to make your
instance output logs as expected.
Although many legacy options still work, it's encouraged to upgrade to
the new options.
The SMTP logger is deleted because SMTP is not suitable to collect logs.
If you have manually configured Gitea log options, please confirm the
logger system works as expected after upgrading.
## Description
Close #12082 and maybe more log-related issues, resolve some related
FIXMEs in old code (which seems unfixable before)
Just like rewriting queue #24505 : make code maintainable, clear legacy
bugs, and add the ability to support more writers (eg: JSON, structured
log)
There is a new document (with examples): `logging-config.en-us.md`
This PR is safer than the queue rewriting, because it's just for
logging, it won't break other logic.
## The old problems
The logging system is quite old and difficult to maintain:
* Unclear concepts: Logger, NamedLogger, MultiChannelledLogger,
SubLogger, EventLogger, WriterLogger etc
* Some code is diffuclt to konw whether it is right:
`log.DelNamedLogger("console")` vs `log.DelNamedLogger(log.DEFAULT)` vs
`log.DelLogger("console")`
* The old system heavily depends on ini config system, it's difficult to
create new logger for different purpose, and it's very fragile.
* The "color" trick is difficult to use and read, many colors are
unnecessary, and in the future structured log could help
* It's difficult to add other log formats, eg: JSON format
* The log outputer doesn't have full control of its goroutine, it's
difficult to make outputer have advanced behaviors
* The logs could be lost in some cases: eg: no Fatal error when using
CLI.
* Config options are passed by JSON, which is quite fragile.
* INI package makes the KEY in `[log]` section visible in `[log.sub1]`
and `[log.sub1.subA]`, this behavior is quite fragile and would cause
more unclear problems, and there is no strong requirement to support
`log.<mode>.<logger>` syntax.
## The new design
See `logger.go` for documents.
## Screenshot
<details>
![image](https://github.com/go-gitea/gitea/assets/2114189/4462d713-ba39-41f5-bb08-de912e67e1ff)
![image](https://github.com/go-gitea/gitea/assets/2114189/b188035e-f691-428b-8b2d-ff7b2199b2f9)
![image](https://github.com/go-gitea/gitea/assets/2114189/132e9745-1c3b-4e00-9e0d-15eaea495dee)
</details>
## TODO
* [x] add some new tests
* [x] fix some tests
* [x] test some sub-commands (manually ....)
---------
Co-authored-by: Jason Song <i@wolfogre.com>
Co-authored-by: delvh <dev.lh@web.de>
Co-authored-by: Giteabot <teabot@gitea.io>
2023-05-22 06:35:11 +08:00
|
|
|
issue.PosterID,
|
2019-04-23 04:40:51 +08:00
|
|
|
issueType,
|
|
|
|
ctx.Repo.Repository,
|
|
|
|
ctx.Repo.Permission)
|
|
|
|
} else {
|
|
|
|
log.Trace("Permission Denied: Not logged in")
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-04-05 23:30:52 +08:00
|
|
|
ctx.Error(http.StatusForbidden)
|
2018-11-28 19:26:14 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2017-12-04 07:14:26 +08:00
|
|
|
if ctx.HasError() {
|
2018-01-11 05:34:17 +08:00
|
|
|
ctx.ServerError("ChangeIssueReaction", errors.New(ctx.GetErrMsg()))
|
2017-12-04 07:14:26 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-06-19 06:32:45 +08:00
|
|
|
switch ctx.PathParam(":action") {
|
2017-12-04 07:14:26 +08:00
|
|
|
case "react":
|
2024-03-04 16:16:03 +08:00
|
|
|
reaction, err := issue_service.CreateIssueReaction(ctx, ctx.Doer, issue, form.Content)
|
2017-12-04 07:14:26 +08:00
|
|
|
if err != nil {
|
2024-03-04 16:16:03 +08:00
|
|
|
if issues_model.IsErrForbiddenIssueReaction(err) || errors.Is(err, user_model.ErrBlockedUser) {
|
2019-12-08 06:04:19 +08:00
|
|
|
ctx.ServerError("ChangeIssueReaction", err)
|
|
|
|
return
|
|
|
|
}
|
2017-12-04 07:14:26 +08:00
|
|
|
log.Info("CreateIssueReaction: %s", err)
|
|
|
|
break
|
|
|
|
}
|
|
|
|
// Reload new reactions
|
|
|
|
issue.Reactions = nil
|
2022-06-13 17:37:59 +08:00
|
|
|
if err = issue.LoadAttributes(ctx); err != nil {
|
2017-12-04 07:14:26 +08:00
|
|
|
log.Info("issue.LoadAttributes: %s", err)
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
log.Trace("Reaction for issue created: %d/%d/%d", ctx.Repo.Repository.ID, issue.ID, reaction.ID)
|
|
|
|
case "unreact":
|
2023-09-25 21:17:37 +08:00
|
|
|
if err := issues_model.DeleteIssueReaction(ctx, ctx.Doer.ID, issue.ID, form.Content); err != nil {
|
2018-01-11 05:34:17 +08:00
|
|
|
ctx.ServerError("DeleteIssueReaction", err)
|
2017-12-04 07:14:26 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
// Reload new reactions
|
|
|
|
issue.Reactions = nil
|
2022-06-13 17:37:59 +08:00
|
|
|
if err := issue.LoadAttributes(ctx); err != nil {
|
2017-12-04 07:14:26 +08:00
|
|
|
log.Info("issue.LoadAttributes: %s", err)
|
|
|
|
break
|
|
|
|
}
|
|
|
|
|
|
|
|
log.Trace("Reaction for issue removed: %d/%d", ctx.Repo.Repository.ID, issue.ID)
|
|
|
|
default:
|
2024-06-19 06:32:45 +08:00
|
|
|
ctx.NotFound(fmt.Sprintf("Unknown action %s", ctx.PathParam(":action")), nil)
|
2017-12-04 07:14:26 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
|
|
|
if len(issue.Reactions) == 0 {
|
2023-07-05 02:36:08 +08:00
|
|
|
ctx.JSON(http.StatusOK, map[string]any{
|
2017-12-04 07:14:26 +08:00
|
|
|
"empty": true,
|
|
|
|
"html": "",
|
|
|
|
})
|
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2024-03-02 23:05:07 +08:00
|
|
|
html, err := ctx.RenderToHTML(tplReactions, map[string]any{
|
2017-12-04 07:14:26 +08:00
|
|
|
"ActionURL": fmt.Sprintf("%s/issues/%d/reactions", ctx.Repo.RepoLink, issue.Index),
|
|
|
|
"Reactions": issue.Reactions.GroupByType(),
|
|
|
|
})
|
|
|
|
if err != nil {
|
2018-01-11 05:34:17 +08:00
|
|
|
ctx.ServerError("ChangeIssueReaction.HTMLString", err)
|
2017-12-04 07:14:26 +08:00
|
|
|
return
|
|
|
|
}
|
2023-07-05 02:36:08 +08:00
|
|
|
ctx.JSON(http.StatusOK, map[string]any{
|
2017-12-04 07:14:26 +08:00
|
|
|
"html": html,
|
|
|
|
})
|
|
|
|
}
|
|
|
|
|
2019-10-15 20:19:32 +08:00
|
|
|
// GetIssueAttachments returns attachments for the issue
|
|
|
|
func GetIssueAttachments(ctx *context.Context) {
|
|
|
|
issue := GetActionIssue(ctx)
|
2023-07-06 02:52:12 +08:00
|
|
|
if ctx.Written() {
|
|
|
|
return
|
|
|
|
}
|
2022-01-21 01:46:10 +08:00
|
|
|
attachments := make([]*api.Attachment, len(issue.Attachments))
|
2019-10-15 20:19:32 +08:00
|
|
|
for i := 0; i < len(issue.Attachments); i++ {
|
2023-07-10 17:31:19 +08:00
|
|
|
attachments[i] = convert.ToAttachment(ctx.Repo.Repository, issue.Attachments[i])
|
2019-10-15 20:19:32 +08:00
|
|
|
}
|
2021-04-05 23:30:52 +08:00
|
|
|
ctx.JSON(http.StatusOK, attachments)
|
2019-10-15 20:19:32 +08:00
|
|
|
}
|
|
|
|
|
2023-07-05 02:36:08 +08:00
|
|
|
func updateAttachments(ctx *context.Context, item any, files []string) error {
|
2021-11-19 21:39:57 +08:00
|
|
|
var attachments []*repo_model.Attachment
|
2019-10-15 20:19:32 +08:00
|
|
|
switch content := item.(type) {
|
2022-06-13 17:37:59 +08:00
|
|
|
case *issues_model.Issue:
|
2019-10-15 20:19:32 +08:00
|
|
|
attachments = content.Attachments
|
2022-06-13 17:37:59 +08:00
|
|
|
case *issues_model.Comment:
|
2019-10-15 20:19:32 +08:00
|
|
|
attachments = content.Attachments
|
|
|
|
default:
|
2022-02-26 20:15:32 +08:00
|
|
|
return fmt.Errorf("unknown Type: %T", content)
|
2019-10-15 20:19:32 +08:00
|
|
|
}
|
|
|
|
for i := 0; i < len(attachments); i++ {
|
Improve utils of slices (#22379)
- Move the file `compare.go` and `slice.go` to `slice.go`.
- Fix `ExistsInSlice`, it's buggy
- It uses `sort.Search`, so it assumes that the input slice is sorted.
- It passes `func(i int) bool { return slice[i] == target })` to
`sort.Search`, that's incorrect, check the doc of `sort.Search`.
- Conbine `IsInt64InSlice(int64, []int64)` and `ExistsInSlice(string,
[]string)` to `SliceContains[T]([]T, T)`.
- Conbine `IsSliceInt64Eq([]int64, []int64)` and `IsEqualSlice([]string,
[]string)` to `SliceSortedEqual[T]([]T, T)`.
- Add `SliceEqual[T]([]T, T)` as a distinction from
`SliceSortedEqual[T]([]T, T)`.
- Redesign `RemoveIDFromList([]int64, int64) ([]int64, bool)` to
`SliceRemoveAll[T]([]T, T) []T`.
- Add `SliceContainsFunc[T]([]T, func(T) bool)` and
`SliceRemoveAllFunc[T]([]T, func(T) bool)` for general use.
- Add comments to explain why not `golang.org/x/exp/slices`.
- Add unit tests.
2023-01-11 13:31:16 +08:00
|
|
|
if util.SliceContainsString(files, attachments[i].UUID) {
|
2019-10-15 20:19:32 +08:00
|
|
|
continue
|
|
|
|
}
|
2023-09-15 14:13:19 +08:00
|
|
|
if err := repo_model.DeleteAttachment(ctx, attachments[i], true); err != nil {
|
2019-10-15 20:19:32 +08:00
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
var err error
|
|
|
|
if len(files) > 0 {
|
|
|
|
switch content := item.(type) {
|
2022-06-13 17:37:59 +08:00
|
|
|
case *issues_model.Issue:
|
2023-09-29 20:12:54 +08:00
|
|
|
err = issues_model.UpdateIssueAttachments(ctx, content.ID, files)
|
2022-06-13 17:37:59 +08:00
|
|
|
case *issues_model.Comment:
|
2023-09-29 20:12:54 +08:00
|
|
|
err = content.UpdateAttachments(ctx, files)
|
2019-10-15 20:19:32 +08:00
|
|
|
default:
|
2022-02-26 20:15:32 +08:00
|
|
|
return fmt.Errorf("unknown Type: %T", content)
|
2019-10-15 20:19:32 +08:00
|
|
|
}
|
|
|
|
if err != nil {
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
switch content := item.(type) {
|
2022-06-13 17:37:59 +08:00
|
|
|
case *issues_model.Issue:
|
2022-05-20 22:08:52 +08:00
|
|
|
content.Attachments, err = repo_model.GetAttachmentsByIssueID(ctx, content.ID)
|
2022-06-13 17:37:59 +08:00
|
|
|
case *issues_model.Comment:
|
2022-05-20 22:08:52 +08:00
|
|
|
content.Attachments, err = repo_model.GetAttachmentsByCommentID(ctx, content.ID)
|
2019-10-15 20:19:32 +08:00
|
|
|
default:
|
2022-02-26 20:15:32 +08:00
|
|
|
return fmt.Errorf("unknown Type: %T", content)
|
2019-10-15 20:19:32 +08:00
|
|
|
}
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-03-02 23:05:07 +08:00
|
|
|
func attachmentsHTML(ctx *context.Context, attachments []*repo_model.Attachment, content string) template.HTML {
|
|
|
|
attachHTML, err := ctx.RenderToHTML(tplAttachment, map[string]any{
|
2023-03-03 01:44:06 +08:00
|
|
|
"ctxData": ctx.Data,
|
2019-10-15 20:19:32 +08:00
|
|
|
"Attachments": attachments,
|
2020-12-14 03:12:27 +08:00
|
|
|
"Content": content,
|
2019-10-15 20:19:32 +08:00
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("attachmentsHTML.HTMLString", err)
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
return attachHTML
|
|
|
|
}
|
2020-10-26 05:49:48 +08:00
|
|
|
|
2020-12-21 23:39:28 +08:00
|
|
|
// get all teams that current user can mention
|
|
|
|
func handleTeamMentions(ctx *context.Context) {
|
2022-03-22 15:03:22 +08:00
|
|
|
if ctx.Doer == nil || !ctx.Repo.Owner.IsOrganization() {
|
2020-12-21 23:39:28 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
|
2021-11-19 19:41:40 +08:00
|
|
|
var isAdmin bool
|
2020-12-21 23:39:28 +08:00
|
|
|
var err error
|
2022-03-29 14:29:02 +08:00
|
|
|
var teams []*organization.Team
|
|
|
|
org := organization.OrgFromUser(ctx.Repo.Owner)
|
2020-12-21 23:39:28 +08:00
|
|
|
// Admin has super access.
|
2022-03-22 15:03:22 +08:00
|
|
|
if ctx.Doer.IsAdmin {
|
2020-12-21 23:39:28 +08:00
|
|
|
isAdmin = true
|
|
|
|
} else {
|
2023-10-03 18:30:41 +08:00
|
|
|
isAdmin, err = org.IsOwnedBy(ctx, ctx.Doer.ID)
|
2020-12-21 23:39:28 +08:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("IsOwnedBy", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
if isAdmin {
|
2023-10-03 18:30:41 +08:00
|
|
|
teams, err = org.LoadTeams(ctx)
|
2021-11-19 19:41:40 +08:00
|
|
|
if err != nil {
|
2021-08-12 20:43:08 +08:00
|
|
|
ctx.ServerError("LoadTeams", err)
|
2020-12-21 23:39:28 +08:00
|
|
|
return
|
|
|
|
}
|
|
|
|
} else {
|
2023-10-03 18:30:41 +08:00
|
|
|
teams, err = org.GetUserTeams(ctx, ctx.Doer.ID)
|
2020-12-21 23:39:28 +08:00
|
|
|
if err != nil {
|
|
|
|
ctx.ServerError("GetUserTeams", err)
|
|
|
|
return
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2021-11-19 19:41:40 +08:00
|
|
|
ctx.Data["MentionableTeams"] = teams
|
2020-12-21 23:39:28 +08:00
|
|
|
ctx.Data["MentionableTeamsOrg"] = ctx.Repo.Owner.Name
|
Add context cache as a request level cache (#22294)
To avoid duplicated load of the same data in an HTTP request, we can set
a context cache to do that. i.e. Some pages may load a user from a
database with the same id in different areas on the same page. But the
code is hidden in two different deep logic. How should we share the
user? As a result of this PR, now if both entry functions accept
`context.Context` as the first parameter and we just need to refactor
`GetUserByID` to reuse the user from the context cache. Then it will not
be loaded twice on an HTTP request.
But of course, sometimes we would like to reload an object from the
database, that's why `RemoveContextData` is also exposed.
The core context cache is here. It defines a new context
```go
type cacheContext struct {
ctx context.Context
data map[any]map[any]any
lock sync.RWMutex
}
var cacheContextKey = struct{}{}
func WithCacheContext(ctx context.Context) context.Context {
return context.WithValue(ctx, cacheContextKey, &cacheContext{
ctx: ctx,
data: make(map[any]map[any]any),
})
}
```
Then you can use the below 4 methods to read/write/del the data within
the same context.
```go
func GetContextData(ctx context.Context, tp, key any) any
func SetContextData(ctx context.Context, tp, key, value any)
func RemoveContextData(ctx context.Context, tp, key any)
func GetWithContextCache[T any](ctx context.Context, cacheGroupKey string, cacheTargetID any, f func() (T, error)) (T, error)
```
Then let's take a look at how `system.GetString` implement it.
```go
func GetSetting(ctx context.Context, key string) (string, error) {
return cache.GetWithContextCache(ctx, contextCacheKey, key, func() (string, error) {
return cache.GetString(genSettingCacheKey(key), func() (string, error) {
res, err := GetSettingNoCache(ctx, key)
if err != nil {
return "", err
}
return res.SettingValue, nil
})
})
}
```
First, it will check if context data include the setting object with the
key. If not, it will query from the global cache which may be memory or
a Redis cache. If not, it will get the object from the database. In the
end, if the object gets from the global cache or database, it will be
set into the context cache.
An object stored in the context cache will only be destroyed after the
context disappeared.
2023-02-15 21:37:34 +08:00
|
|
|
ctx.Data["MentionableTeamsOrgAvatar"] = ctx.Repo.Owner.AvatarLink(ctx)
|
2020-12-21 23:39:28 +08:00
|
|
|
}
|