2017-09-16 13:17:57 -04:00
|
|
|
|
// Copyright 2017 The Gitea Authors. All rights reserved.
|
2025-02-05 04:04:19 -05:00
|
|
|
|
// Copyright 2025 The Forgejo Authors.
|
2022-11-27 13:20:29 -05:00
|
|
|
|
// SPDX-License-Identifier: MIT
|
2017-09-16 13:17:57 -04:00
|
|
|
|
|
|
|
|
|
|
package markup
|
|
|
|
|
|
|
|
|
|
|
|
import (
|
|
|
|
|
|
"bytes"
|
2021-04-19 18:25:08 -04:00
|
|
|
|
"io"
|
2017-09-16 13:17:57 -04:00
|
|
|
|
"net/url"
|
|
|
|
|
|
"path"
|
|
|
|
|
|
"path/filepath"
|
|
|
|
|
|
"regexp"
|
|
|
|
|
|
"strings"
|
2021-07-15 16:33:56 -04:00
|
|
|
|
"sync"
|
2017-09-16 13:17:57 -04:00
|
|
|
|
|
2025-03-27 15:40:14 -04:00
|
|
|
|
"forgejo.org/modules/base"
|
|
|
|
|
|
"forgejo.org/modules/emoji"
|
|
|
|
|
|
"forgejo.org/modules/git"
|
|
|
|
|
|
"forgejo.org/modules/log"
|
|
|
|
|
|
"forgejo.org/modules/markup/common"
|
|
|
|
|
|
"forgejo.org/modules/references"
|
|
|
|
|
|
"forgejo.org/modules/regexplru"
|
|
|
|
|
|
"forgejo.org/modules/setting"
|
|
|
|
|
|
"forgejo.org/modules/templates/vars"
|
|
|
|
|
|
"forgejo.org/modules/translation"
|
|
|
|
|
|
"forgejo.org/modules/util"
|
2017-09-16 13:17:57 -04:00
|
|
|
|
|
|
|
|
|
|
"golang.org/x/net/html"
|
2018-02-27 02:09:18 -05:00
|
|
|
|
"golang.org/x/net/html/atom"
|
2019-03-27 07:15:23 -04:00
|
|
|
|
"mvdan.cc/xurls/v2"
|
2017-09-16 13:17:57 -04:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
// Issue name styles
|
|
|
|
|
|
const (
|
|
|
|
|
|
IssueNameStyleNumeric = "numeric"
|
|
|
|
|
|
IssueNameStyleAlphanumeric = "alphanumeric"
|
2022-06-10 01:39:53 -04:00
|
|
|
|
IssueNameStyleRegexp = "regexp"
|
2017-09-16 13:17:57 -04:00
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
|
var (
|
|
|
|
|
|
// NOTE: All below regex matching do not perform any extra validation.
|
|
|
|
|
|
// Thus a link is produced even if the linked entity does not exist.
|
|
|
|
|
|
// While fast, this is also incorrect and lead to false positives.
|
|
|
|
|
|
// TODO: fix invalid linking issue
|
|
|
|
|
|
|
2021-12-11 12:21:36 -05:00
|
|
|
|
// valid chars in encoded path and parameter: [-+~_%.a-zA-Z0-9/]
|
|
|
|
|
|
|
2025-12-28 11:18:51 -05:00
|
|
|
|
// httpSchemePattern matches https:// or http://
|
|
|
|
|
|
httpSchemePattern = regexp.MustCompile(`^https?://`)
|
|
|
|
|
|
|
2024-01-19 11:05:02 -05:00
|
|
|
|
// hashCurrentPattern matches string that represents a commit SHA, e.g. d8a994ef243349f321568f9e36d5c3f444b99cae
|
|
|
|
|
|
// Although SHA1 hashes are 40 chars long, SHA256 are 64, the regex matches the hash from 7 to 64 chars in length
|
2021-12-11 12:21:36 -05:00
|
|
|
|
// so that abbreviated hash links can be used as well. This matches git and GitHub usability.
|
2025-12-21 20:32:40 -05:00
|
|
|
|
hashCurrentPattern = regexp.MustCompile(`(?:^|\s)[^\w\d]*([0-9a-f]{7,64})[^\w\d]*(?:\s|$)`)
|
2017-09-16 13:17:57 -04:00
|
|
|
|
|
2018-02-27 02:09:18 -05:00
|
|
|
|
// shortLinkPattern matches short but difficult to parse [[name|link|arg=test]] syntax
|
|
|
|
|
|
shortLinkPattern = regexp.MustCompile(`\[\[(.*?)\]\](\w*)`)
|
2017-09-16 13:17:57 -04:00
|
|
|
|
|
2025-02-05 04:04:19 -05:00
|
|
|
|
// anyHashPattern splits url containing SHA into parts
|
2025-09-30 23:00:41 -04:00
|
|
|
|
anyHashPattern = regexp.MustCompile(`https?://[^\s/]+/(\S+/(?:commit|tree|blob))/([0-9a-f]{7,64})(/[-+~_%.a-zA-Z0-9/]+)?(\?[-+~_%\.a-zA-Z0-9=&]+)?(#[-+~_%.a-zA-Z0-9]+)?`)
|
2021-12-11 12:21:36 -05:00
|
|
|
|
|
|
|
|
|
|
// comparePattern matches "http://domain/org/repo/compare/COMMIT1...COMMIT2#hash"
|
2025-09-30 23:00:41 -04:00
|
|
|
|
comparePattern = regexp.MustCompile(`https?://[^\s/]+/(?:\S+/)?([^\s/]+/[^\s/]+)/compare/([0-9a-f]{7,64})(\.\.\.?)([0-9a-f]{7,64})?(\?[-+~_%\.a-zA-Z0-9=&/]+)?(#[-+~_%.a-zA-Z0-9]+)?`)
|
|
|
|
|
|
|
|
|
|
|
|
// pullReviewCommitPattern matches "https://domain.tld/<subpath...>/<owner>/<repo>/pulls/<id>/commits/<sha>"
|
|
|
|
|
|
pullReviewCommitPattern = regexp.MustCompile(`https?://[^\s/]+/(?:\S+/)?([^\s/]+/[^\s/]+)/pulls/(\d+)/commits/([0-9a-f]{7,64})(#[-+~_%.a-zA-Z0-9]+)?`)
|
2017-09-16 13:17:57 -04:00
|
|
|
|
|
|
|
|
|
|
validLinksPattern = regexp.MustCompile(`^[a-z][\w-]+://`)
|
2018-02-27 02:09:18 -05:00
|
|
|
|
|
|
|
|
|
|
// While this email regex is definitely not perfect and I'm sure you can come up
|
|
|
|
|
|
// with edge cases, it is still accepted by the CommonMark specification, as
|
|
|
|
|
|
// well as the HTML5 spec:
|
|
|
|
|
|
// http://spec.commonmark.org/0.28/#email-address
|
|
|
|
|
|
// https://html.spec.whatwg.org/multipage/input.html#e-mail-state-(type%3Demail)
|
2023-11-10 23:08:19 -05:00
|
|
|
|
emailRegex = regexp.MustCompile("(?:\\s|^|\\(|\\[)([a-zA-Z0-9.!#$%&'*+\\/=?^_`{|}~-]+@[a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\.[a-zA-Z0-9]{2,}(?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+)(?:\\s|$|\\)|\\]|;|,|\\?|!|\\.(\\s|$))")
|
2018-02-27 02:09:18 -05:00
|
|
|
|
|
2025-12-28 11:18:51 -05:00
|
|
|
|
// Fediverse handle regex (same as emailRegex but with additional @ or !
|
2025-06-16 08:55:17 -04:00
|
|
|
|
// at start)
|
|
|
|
|
|
fediRegex = regexp.MustCompile("(?:\\s|^|\\(|\\[)([@!]([a-zA-Z0-9.!#$%&'*+\\/=?^_`{|}~-]+)@([a-zA-Z0-9](?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?(?:\\.[a-zA-Z0-9]{2,}(?:[a-zA-Z0-9-]{0,61}[a-zA-Z0-9])?)+))(?:\\s|$|\\)|\\]|;|,|\\?|!|\\.(\\s|$))")
|
|
|
|
|
|
|
2019-12-23 17:38:50 -05:00
|
|
|
|
// blackfriday extensions create IDs like fn:user-content-footnote
|
|
|
|
|
|
blackfridayExtRegex = regexp.MustCompile(`[^:]*:user-content-`)
|
2020-04-28 14:05:39 -04:00
|
|
|
|
|
|
|
|
|
|
// EmojiShortCodeRegex find emoji by alias like :smile:
|
2021-12-11 12:21:36 -05:00
|
|
|
|
EmojiShortCodeRegex = regexp.MustCompile(`:[-+\w]+:`)
|
2024-07-16 19:37:20 -04:00
|
|
|
|
|
|
|
|
|
|
InlineCodeBlockRegex = regexp.MustCompile("`[^`]+`")
|
2017-09-16 13:17:57 -04:00
|
|
|
|
)
|
|
|
|
|
|
|
2019-10-13 18:29:10 -04:00
|
|
|
|
// CSS class for action keywords (e.g. "closes: #1")
|
|
|
|
|
|
const keywordClass = "issue-keyword"
|
|
|
|
|
|
|
2017-09-16 13:17:57 -04:00
|
|
|
|
// IsLink reports whether link fits valid format.
|
|
|
|
|
|
func IsLink(link []byte) bool {
|
|
|
|
|
|
return validLinksPattern.Match(link)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-01-15 03:49:24 -05:00
|
|
|
|
func IsLinkStr(link string) bool {
|
2018-02-27 02:09:18 -05:00
|
|
|
|
return validLinksPattern.MatchString(link)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2021-07-15 16:33:56 -04:00
|
|
|
|
// regexp for full links to issues/pulls
|
|
|
|
|
|
var issueFullPattern *regexp.Regexp
|
|
|
|
|
|
|
|
|
|
|
|
// Once for to prevent races
|
|
|
|
|
|
var issueFullPatternOnce sync.Once
|
|
|
|
|
|
|
2017-09-16 13:17:57 -04:00
|
|
|
|
func getIssueFullPattern() *regexp.Regexp {
|
2021-07-15 16:33:56 -04:00
|
|
|
|
issueFullPatternOnce.Do(func() {
|
Append `(comment)` when a link points at a comment rather than the whole issue (#23734)
Close #23671
For the feature mentioned above, this PR append ' (comment)' to the
rendered html if it is a hashcomment.
After the PR, type in the following
```
pull request from other repo:
http://localhost:3000/testOrg/testOrgRepo/pulls/2
pull request from this repo:
http://localhost:3000/aaa/testA/pulls/2
issue comment from this repo:
http://localhost:3000/aaa/testA/issues/1#issuecomment-18
http://localhost:3000/aaa/testA/pulls/2#issue-9
issue comment from other repo:
http://localhost:3000/testOrg/testOrgRepo/pulls/2#issuecomment-24
http://localhost:3000/testOrg/testOrgRepo/pulls/2#issue
```
Gives:
<img width="687" alt="截屏2023-03-27 13 53 06"
src="https://user-images.githubusercontent.com/17645053/227852387-2b218e0d-3468-4d90-ad81-d702ddd17fd2.png">
Other than the above feature, this PR also includes two other changes:
1 Right now, the render of links from file changed tab in pull request
might not be very proper, for example, if type in the following. (not
sure if this is an issue or design, if not an issue, I will revert the
changes). example on
[try.gitea.io](https://try.gitea.io/HesterG/testrepo/pulls/1)
```
https://try.gitea.io/HesterG/testrepo/pulls/1/files#issuecomment-162725
https://try.gitea.io/HesterG/testrepo/pulls/1/files
```
it will render the following
<img width="899" alt="截屏2023-03-24 15 41 37"
src="https://user-images.githubusercontent.com/17645053/227456117-5eccedb7-9118-4540-929d-aee9a76de852.png">
In this PR, skip processing the link into a ref issue if it is a link
from files changed tab in pull request
After:
type in following
```
hash comment on files changed tab:
http://localhost:3000/testOrg/testOrgRepo/pulls/2/files#issuecomment-24
files changed link:
http://localhost:3000/testOrg/testOrgRepo/pulls/2/files
```
Gives
<img width="708" alt="截屏2023-03-27 22 09 02"
src="https://user-images.githubusercontent.com/17645053/227964273-5dc06c50-3713-489c-b05d-d95367d0ab0f.png">
2 Right now, after editing the comment area, there will not be tippys
attached to `ref-issue`; and no tippy attached on preview as well.
example:
https://user-images.githubusercontent.com/17645053/227850540-5ae34e2d-b1d7-4d0d-9726-7701bf825d1f.mov
In this PR, in frontend, make sure tippy is added after editing the
comment, and to the comment on preview tab
After:
https://user-images.githubusercontent.com/17645053/227853777-06f56b4c-1148-467c-b6f7-f79418e67504.mov
2023-04-03 04:02:57 -04:00
|
|
|
|
// example: https://domain/org/repo/pulls/27#hash
|
2021-02-19 16:36:43 -05:00
|
|
|
|
issueFullPattern = regexp.MustCompile(regexp.QuoteMeta(setting.AppURL) +
|
2024-08-22 16:35:04 -04:00
|
|
|
|
`(?P<user>[\w_.-]+)\/(?P<repo>[\w_.-]+)\/(?:issues|pulls)\/(?P<num>(?:\w{1,10}-)?[1-9][0-9]*)(?P<subpath>\/[\w_.-]+)?(?:(?P<comment>#(?:issue|issuecomment)-\d+)|(?:[\?#](?:\S+)?))?\b`)
|
2021-07-15 16:33:56 -04:00
|
|
|
|
})
|
2017-09-16 13:17:57 -04:00
|
|
|
|
return issueFullPattern
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2019-10-14 21:31:09 -04:00
|
|
|
|
// CustomLinkURLSchemes allows for additional schemes to be detected when parsing links within text
|
|
|
|
|
|
func CustomLinkURLSchemes(schemes []string) {
|
|
|
|
|
|
schemes = append(schemes, "http", "https")
|
|
|
|
|
|
withAuth := make([]string, 0, len(schemes))
|
|
|
|
|
|
validScheme := regexp.MustCompile(`^[a-z]+$`)
|
|
|
|
|
|
for _, s := range schemes {
|
|
|
|
|
|
if !validScheme.MatchString(s) {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
without := false
|
|
|
|
|
|
for _, sna := range xurls.SchemesNoAuthority {
|
|
|
|
|
|
if s == sna {
|
|
|
|
|
|
without = true
|
|
|
|
|
|
break
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
if without {
|
|
|
|
|
|
s += ":"
|
|
|
|
|
|
} else {
|
|
|
|
|
|
s += "://"
|
|
|
|
|
|
}
|
|
|
|
|
|
withAuth = append(withAuth, s)
|
|
|
|
|
|
}
|
2019-12-30 20:53:28 -05:00
|
|
|
|
common.LinkRegex, _ = xurls.StrictMatchingScheme(strings.Join(withAuth, "|"))
|
2019-10-14 21:31:09 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2018-02-27 02:09:18 -05:00
|
|
|
|
type postProcessError struct {
|
|
|
|
|
|
context string
|
|
|
|
|
|
err error
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func (p *postProcessError) Error() string {
|
2019-06-12 15:41:28 -04:00
|
|
|
|
return "PostProcess: " + p.context + ", " + p.err.Error()
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2021-04-19 18:25:08 -04:00
|
|
|
|
type processor func(ctx *RenderContext, node *html.Node)
|
2018-02-27 02:09:18 -05:00
|
|
|
|
|
|
|
|
|
|
var defaultProcessors = []processor{
|
2025-09-30 23:00:41 -04:00
|
|
|
|
pullReviewCommitPatternProcessor,
|
2018-02-27 02:09:18 -05:00
|
|
|
|
fullIssuePatternProcessor,
|
2021-12-11 12:21:36 -05:00
|
|
|
|
comparePatternProcessor,
|
2024-03-15 08:44:42 -04:00
|
|
|
|
filePreviewPatternProcessor,
|
2024-01-19 11:05:02 -05:00
|
|
|
|
fullHashPatternProcessor,
|
2019-03-27 11:37:54 -04:00
|
|
|
|
shortLinkProcessor,
|
|
|
|
|
|
linkProcessor,
|
|
|
|
|
|
mentionProcessor,
|
2018-02-27 02:09:18 -05:00
|
|
|
|
issueIndexPatternProcessor,
|
2023-01-29 20:50:01 -05:00
|
|
|
|
commitCrossReferencePatternProcessor,
|
2024-01-19 11:05:02 -05:00
|
|
|
|
hashCurrentPatternProcessor,
|
2025-06-16 08:55:17 -04:00
|
|
|
|
fediAddressProcessor,
|
2018-02-27 02:09:18 -05:00
|
|
|
|
emailAddressProcessor,
|
2020-04-28 14:05:39 -04:00
|
|
|
|
emojiProcessor,
|
|
|
|
|
|
emojiShortCodeProcessor,
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// PostProcess does the final required transformations to the passed raw HTML
|
|
|
|
|
|
// data, and ensures its validity. Transformations include: replacing links and
|
|
|
|
|
|
// emails with HTML links, parsing shortlinks in the format of [[Link]], like
|
|
|
|
|
|
// MediaWiki, linking issues in the format #ID, and mentions in the format
|
|
|
|
|
|
// @user, and others.
|
|
|
|
|
|
func PostProcess(
|
2021-04-19 18:25:08 -04:00
|
|
|
|
ctx *RenderContext,
|
|
|
|
|
|
input io.Reader,
|
|
|
|
|
|
output io.Writer,
|
|
|
|
|
|
) error {
|
|
|
|
|
|
return postProcess(ctx, defaultProcessors, input, output)
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
var commitMessageProcessors = []processor{
|
2025-09-30 23:00:41 -04:00
|
|
|
|
pullReviewCommitPatternProcessor,
|
2018-02-27 02:09:18 -05:00
|
|
|
|
fullIssuePatternProcessor,
|
2021-12-11 12:21:36 -05:00
|
|
|
|
comparePatternProcessor,
|
2024-01-19 11:05:02 -05:00
|
|
|
|
fullHashPatternProcessor,
|
2019-03-27 11:37:54 -04:00
|
|
|
|
linkProcessor,
|
|
|
|
|
|
mentionProcessor,
|
2018-02-27 02:09:18 -05:00
|
|
|
|
issueIndexPatternProcessor,
|
2023-01-29 20:50:01 -05:00
|
|
|
|
commitCrossReferencePatternProcessor,
|
2024-01-19 11:05:02 -05:00
|
|
|
|
hashCurrentPatternProcessor,
|
2018-02-27 02:09:18 -05:00
|
|
|
|
emailAddressProcessor,
|
2020-04-28 14:05:39 -04:00
|
|
|
|
emojiProcessor,
|
|
|
|
|
|
emojiShortCodeProcessor,
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// RenderCommitMessage will use the same logic as PostProcess, but will disable
|
|
|
|
|
|
// the shortLinkProcessor and will add a defaultLinkProcessor if defaultLink is
|
|
|
|
|
|
// set, which changes every text node into a link to the passed default link.
|
|
|
|
|
|
func RenderCommitMessage(
|
2021-04-19 18:25:08 -04:00
|
|
|
|
ctx *RenderContext,
|
|
|
|
|
|
content string,
|
|
|
|
|
|
) (string, error) {
|
2022-01-20 12:46:10 -05:00
|
|
|
|
procs := commitMessageProcessors
|
2021-04-19 18:25:08 -04:00
|
|
|
|
if ctx.DefaultLink != "" {
|
2018-02-27 02:09:18 -05:00
|
|
|
|
// we don't have to fear data races, because being
|
|
|
|
|
|
// commitMessageProcessors of fixed len and cap, every time we append
|
|
|
|
|
|
// something to it the slice is realloc+copied, so append always
|
|
|
|
|
|
// generates the slice ex-novo.
|
2021-04-19 18:25:08 -04:00
|
|
|
|
procs = append(procs, genDefaultLinkProcessor(ctx.DefaultLink))
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
2021-04-19 18:25:08 -04:00
|
|
|
|
return renderProcessString(ctx, procs, content)
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2019-09-10 05:03:30 -04:00
|
|
|
|
var commitMessageSubjectProcessors = []processor{
|
2025-09-30 23:00:41 -04:00
|
|
|
|
pullReviewCommitPatternProcessor,
|
2019-09-10 05:03:30 -04:00
|
|
|
|
fullIssuePatternProcessor,
|
2021-12-11 12:21:36 -05:00
|
|
|
|
comparePatternProcessor,
|
2024-01-19 11:05:02 -05:00
|
|
|
|
fullHashPatternProcessor,
|
2019-09-10 05:03:30 -04:00
|
|
|
|
linkProcessor,
|
|
|
|
|
|
mentionProcessor,
|
|
|
|
|
|
issueIndexPatternProcessor,
|
2023-01-29 20:50:01 -05:00
|
|
|
|
commitCrossReferencePatternProcessor,
|
2024-01-19 11:05:02 -05:00
|
|
|
|
hashCurrentPatternProcessor,
|
2020-04-28 14:05:39 -04:00
|
|
|
|
emojiShortCodeProcessor,
|
|
|
|
|
|
emojiProcessor,
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
var emojiProcessors = []processor{
|
|
|
|
|
|
emojiShortCodeProcessor,
|
|
|
|
|
|
emojiProcessor,
|
2019-09-10 05:03:30 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// RenderCommitMessageSubject will use the same logic as PostProcess and
|
|
|
|
|
|
// RenderCommitMessage, but will disable the shortLinkProcessor and
|
|
|
|
|
|
// emailAddressProcessor, will add a defaultLinkProcessor if defaultLink is set,
|
|
|
|
|
|
// which changes every text node into a link to the passed default link.
|
|
|
|
|
|
func RenderCommitMessageSubject(
|
2021-04-19 18:25:08 -04:00
|
|
|
|
ctx *RenderContext,
|
|
|
|
|
|
content string,
|
|
|
|
|
|
) (string, error) {
|
2022-01-20 12:46:10 -05:00
|
|
|
|
procs := commitMessageSubjectProcessors
|
2021-04-19 18:25:08 -04:00
|
|
|
|
if ctx.DefaultLink != "" {
|
2019-09-10 05:03:30 -04:00
|
|
|
|
// we don't have to fear data races, because being
|
|
|
|
|
|
// commitMessageSubjectProcessors of fixed len and cap, every time we
|
|
|
|
|
|
// append something to it the slice is realloc+copied, so append always
|
|
|
|
|
|
// generates the slice ex-novo.
|
2021-04-19 18:25:08 -04:00
|
|
|
|
procs = append(procs, genDefaultLinkProcessor(ctx.DefaultLink))
|
2019-09-10 05:03:30 -04:00
|
|
|
|
}
|
2021-04-19 18:25:08 -04:00
|
|
|
|
return renderProcessString(ctx, procs, content)
|
2019-09-10 05:03:30 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2020-12-03 05:50:47 -05:00
|
|
|
|
// RenderIssueTitle to process title on individual issue/pull page
|
|
|
|
|
|
func RenderIssueTitle(
|
2021-04-19 18:25:08 -04:00
|
|
|
|
ctx *RenderContext,
|
|
|
|
|
|
title string,
|
|
|
|
|
|
) (string, error) {
|
|
|
|
|
|
return renderProcessString(ctx, []processor{
|
2024-07-16 19:37:20 -04:00
|
|
|
|
inlineCodeBlockProcessor,
|
2021-04-19 18:25:08 -04:00
|
|
|
|
issueIndexPatternProcessor,
|
2023-01-29 20:50:01 -05:00
|
|
|
|
commitCrossReferencePatternProcessor,
|
2024-01-19 11:05:02 -05:00
|
|
|
|
hashCurrentPatternProcessor,
|
2021-04-19 18:25:08 -04:00
|
|
|
|
emojiShortCodeProcessor,
|
|
|
|
|
|
emojiProcessor,
|
|
|
|
|
|
}, title)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-07-16 19:37:20 -04:00
|
|
|
|
// RenderRefIssueTitle to process title on places where an issue is referenced
|
|
|
|
|
|
func RenderRefIssueTitle(
|
|
|
|
|
|
ctx *RenderContext,
|
|
|
|
|
|
title string,
|
|
|
|
|
|
) (string, error) {
|
|
|
|
|
|
return renderProcessString(ctx, []processor{
|
|
|
|
|
|
inlineCodeBlockProcessor,
|
|
|
|
|
|
issueIndexPatternProcessor,
|
|
|
|
|
|
emojiShortCodeProcessor,
|
|
|
|
|
|
emojiProcessor,
|
|
|
|
|
|
}, title)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2021-04-19 18:25:08 -04:00
|
|
|
|
func renderProcessString(ctx *RenderContext, procs []processor, content string) (string, error) {
|
|
|
|
|
|
var buf strings.Builder
|
|
|
|
|
|
if err := postProcess(ctx, procs, strings.NewReader(content), &buf); err != nil {
|
|
|
|
|
|
return "", err
|
2020-12-03 05:50:47 -05:00
|
|
|
|
}
|
2021-04-19 18:25:08 -04:00
|
|
|
|
return buf.String(), nil
|
2020-12-03 05:50:47 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2019-03-11 22:23:34 -04:00
|
|
|
|
// RenderDescriptionHTML will use similar logic as PostProcess, but will
|
|
|
|
|
|
// use a single special linkProcessor.
|
|
|
|
|
|
func RenderDescriptionHTML(
|
2021-04-19 18:25:08 -04:00
|
|
|
|
ctx *RenderContext,
|
|
|
|
|
|
content string,
|
|
|
|
|
|
) (string, error) {
|
|
|
|
|
|
return renderProcessString(ctx, []processor{
|
|
|
|
|
|
descriptionLinkProcessor,
|
|
|
|
|
|
emojiShortCodeProcessor,
|
|
|
|
|
|
emojiProcessor,
|
2026-01-25 17:10:56 -05:00
|
|
|
|
}, escapeInlineCodeBlocks(content))
|
2019-03-11 22:23:34 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2020-04-28 14:05:39 -04:00
|
|
|
|
// RenderEmoji for when we want to just process emoji and shortcodes
|
2021-07-08 07:38:13 -04:00
|
|
|
|
// in various places it isn't already run through the normal markdown processor
|
2020-04-28 14:05:39 -04:00
|
|
|
|
func RenderEmoji(
|
2023-03-05 16:59:05 -05:00
|
|
|
|
ctx *RenderContext,
|
2021-04-19 18:25:08 -04:00
|
|
|
|
content string,
|
|
|
|
|
|
) (string, error) {
|
2023-03-05 16:59:05 -05:00
|
|
|
|
return renderProcessString(ctx, emojiProcessors, content)
|
2020-04-28 14:05:39 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2022-01-20 12:46:10 -05:00
|
|
|
|
var (
|
|
|
|
|
|
tagCleaner = regexp.MustCompile(`<((?:/?\w+/\w+)|(?:/[\w ]+/)|(/?[hH][tT][mM][lL]\b)|(/?[hH][eE][aA][dD]\b))`)
|
|
|
|
|
|
nulCleaner = strings.NewReplacer("\000", "")
|
|
|
|
|
|
)
|
2021-03-15 19:20:05 -04:00
|
|
|
|
|
2021-04-19 18:25:08 -04:00
|
|
|
|
func postProcess(ctx *RenderContext, procs []processor, input io.Reader, output io.Writer) error {
|
2021-06-20 18:39:12 -04:00
|
|
|
|
defer ctx.Cancel()
|
2021-04-19 18:25:08 -04:00
|
|
|
|
// FIXME: don't read all content to memory
|
2021-09-22 01:38:34 -04:00
|
|
|
|
rawHTML, err := io.ReadAll(input)
|
2021-04-19 18:25:08 -04:00
|
|
|
|
if err != nil {
|
|
|
|
|
|
return err
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// parse the HTML
|
2022-12-11 22:03:54 -05:00
|
|
|
|
node, err := html.Parse(io.MultiReader(
|
|
|
|
|
|
// prepend "<html><body>"
|
|
|
|
|
|
strings.NewReader("<html><body>"),
|
|
|
|
|
|
// Strip out nuls - they're always invalid
|
|
|
|
|
|
bytes.NewReader(tagCleaner.ReplaceAll([]byte(nulCleaner.Replace(string(rawHTML))), []byte("<$1"))),
|
|
|
|
|
|
// close the tags
|
|
|
|
|
|
strings.NewReader("</body></html>"),
|
|
|
|
|
|
))
|
2018-02-27 02:09:18 -05:00
|
|
|
|
if err != nil {
|
2021-04-19 18:25:08 -04:00
|
|
|
|
return &postProcessError{"invalid HTML", err}
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2021-06-21 18:12:22 -04:00
|
|
|
|
if node.Type == html.DocumentNode {
|
|
|
|
|
|
node = node.FirstChild
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-01-15 03:49:24 -05:00
|
|
|
|
visitNode(ctx, procs, node)
|
2021-01-20 10:10:50 -05:00
|
|
|
|
|
2021-06-21 18:12:22 -04:00
|
|
|
|
newNodes := make([]*html.Node, 0, 5)
|
|
|
|
|
|
|
|
|
|
|
|
if node.Data == "html" {
|
|
|
|
|
|
node = node.FirstChild
|
|
|
|
|
|
for node != nil && node.Data != "body" {
|
|
|
|
|
|
node = node.NextSibling
|
2021-01-20 10:10:50 -05:00
|
|
|
|
}
|
2021-06-21 18:12:22 -04:00
|
|
|
|
}
|
|
|
|
|
|
if node != nil {
|
2021-01-20 10:10:50 -05:00
|
|
|
|
if node.Data == "body" {
|
|
|
|
|
|
child := node.FirstChild
|
|
|
|
|
|
for child != nil {
|
|
|
|
|
|
newNodes = append(newNodes, child)
|
|
|
|
|
|
child = child.NextSibling
|
|
|
|
|
|
}
|
|
|
|
|
|
} else {
|
|
|
|
|
|
newNodes = append(newNodes, node)
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2018-02-27 02:09:18 -05:00
|
|
|
|
// Render everything to buf.
|
2021-04-19 18:25:08 -04:00
|
|
|
|
for _, node := range newNodes {
|
2022-03-17 14:04:36 -04:00
|
|
|
|
if err := html.Render(output, node); err != nil {
|
2021-04-19 18:25:08 -04:00
|
|
|
|
return &postProcessError{"error rendering processed HTML", err}
|
2017-09-16 13:17:57 -04:00
|
|
|
|
}
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
2021-04-19 18:25:08 -04:00
|
|
|
|
return nil
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-01-15 03:49:24 -05:00
|
|
|
|
func visitNode(ctx *RenderContext, procs []processor, node *html.Node) {
|
2023-01-31 00:21:29 -05:00
|
|
|
|
// Add user-content- to IDs and "#" links if they don't already have them
|
2019-12-23 17:38:50 -05:00
|
|
|
|
for idx, attr := range node.Attr {
|
2023-01-31 00:21:29 -05:00
|
|
|
|
val := strings.TrimPrefix(attr.Val, "#")
|
2025-03-28 18:22:21 -04:00
|
|
|
|
notHasPrefix := !strings.HasPrefix(val, "user-content-") && !blackfridayExtRegex.MatchString(val)
|
2023-01-31 00:21:29 -05:00
|
|
|
|
|
|
|
|
|
|
if attr.Key == "id" && notHasPrefix {
|
2019-12-23 17:38:50 -05:00
|
|
|
|
node.Attr[idx].Val = "user-content-" + attr.Val
|
|
|
|
|
|
}
|
2020-04-28 14:05:39 -04:00
|
|
|
|
|
2023-01-31 00:21:29 -05:00
|
|
|
|
if attr.Key == "href" && strings.HasPrefix(attr.Val, "#") && notHasPrefix {
|
|
|
|
|
|
node.Attr[idx].Val = "#user-content-" + val
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-04-28 14:05:39 -04:00
|
|
|
|
if attr.Key == "class" && attr.Val == "emoji" {
|
2024-01-15 03:49:24 -05:00
|
|
|
|
procs = nil
|
2020-04-28 14:05:39 -04:00
|
|
|
|
}
|
2019-12-23 17:38:50 -05:00
|
|
|
|
}
|
2020-04-28 14:05:39 -04:00
|
|
|
|
|
2021-12-15 18:49:12 -05:00
|
|
|
|
// We ignore code and pre.
|
2018-02-27 02:09:18 -05:00
|
|
|
|
switch node.Type {
|
|
|
|
|
|
case html.TextNode:
|
2024-06-17 18:56:45 -04:00
|
|
|
|
processTextNodes(ctx, procs, node)
|
2018-02-27 02:09:18 -05:00
|
|
|
|
case html.ElementNode:
|
2020-02-27 23:16:05 -05:00
|
|
|
|
if node.Data == "img" {
|
2021-07-04 05:26:04 -04:00
|
|
|
|
for i, attr := range node.Attr {
|
2020-02-27 23:16:05 -05:00
|
|
|
|
if attr.Key != "src" {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
2024-01-15 03:49:24 -05:00
|
|
|
|
if len(attr.Val) > 0 && !IsLinkStr(attr.Val) && !strings.HasPrefix(attr.Val, "data:image/") {
|
|
|
|
|
|
attr.Val = util.URLJoin(ctx.Links.ResolveMediaLink(ctx.IsWiki), attr.Val)
|
2020-02-27 23:16:05 -05:00
|
|
|
|
}
|
2022-03-29 04:27:37 -04:00
|
|
|
|
attr.Val = camoHandleLink(attr.Val)
|
2021-07-04 05:26:04 -04:00
|
|
|
|
node.Attr[i] = attr
|
2020-02-27 23:16:05 -05:00
|
|
|
|
}
|
|
|
|
|
|
} else if node.Data == "a" {
|
2021-12-15 18:49:12 -05:00
|
|
|
|
// Restrict text in links to emojis
|
2024-01-15 03:49:24 -05:00
|
|
|
|
procs = emojiProcessors
|
2020-02-27 23:16:05 -05:00
|
|
|
|
} else if node.Data == "code" || node.Data == "pre" {
|
2018-02-27 02:09:18 -05:00
|
|
|
|
return
|
2020-04-24 09:22:36 -04:00
|
|
|
|
} else if node.Data == "i" {
|
|
|
|
|
|
for _, attr := range node.Attr {
|
|
|
|
|
|
if attr.Key != "class" {
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
classes := strings.Split(attr.Val, " ")
|
|
|
|
|
|
for i, class := range classes {
|
|
|
|
|
|
if class == "icon" {
|
|
|
|
|
|
classes[0], classes[i] = classes[i], classes[0]
|
|
|
|
|
|
attr.Val = strings.Join(classes, " ")
|
|
|
|
|
|
|
|
|
|
|
|
// Remove all children of icons
|
|
|
|
|
|
child := node.FirstChild
|
|
|
|
|
|
for child != nil {
|
|
|
|
|
|
node.RemoveChild(child)
|
|
|
|
|
|
child = node.FirstChild
|
|
|
|
|
|
}
|
|
|
|
|
|
break
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2017-09-16 13:17:57 -04:00
|
|
|
|
}
|
2018-02-27 02:09:18 -05:00
|
|
|
|
for n := node.FirstChild; n != nil; n = n.NextSibling {
|
2024-01-15 03:49:24 -05:00
|
|
|
|
visitNode(ctx, procs, n)
|
2017-09-16 13:17:57 -04:00
|
|
|
|
}
|
2024-06-17 18:56:45 -04:00
|
|
|
|
default:
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
|
|
|
|
|
// ignore everything else
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-06-17 18:56:45 -04:00
|
|
|
|
// processTextNodes runs the passed node through various processors, in order to handle
|
2018-02-27 02:09:18 -05:00
|
|
|
|
// all kinds of special links handled by the post-processing.
|
2024-06-17 18:56:45 -04:00
|
|
|
|
func processTextNodes(ctx *RenderContext, procs []processor, node *html.Node) {
|
|
|
|
|
|
for _, p := range procs {
|
|
|
|
|
|
p(ctx, node)
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
2017-09-16 13:17:57 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2019-10-13 18:29:10 -04:00
|
|
|
|
// createKeyword() renders a highlighted version of an action keyword
|
|
|
|
|
|
func createKeyword(content string) *html.Node {
|
|
|
|
|
|
span := &html.Node{
|
|
|
|
|
|
Type: html.ElementNode,
|
|
|
|
|
|
Data: atom.Span.String(),
|
|
|
|
|
|
Attr: []html.Attribute{},
|
|
|
|
|
|
}
|
|
|
|
|
|
span.Attr = append(span.Attr, html.Attribute{Key: "class", Val: keywordClass})
|
|
|
|
|
|
|
|
|
|
|
|
text := &html.Node{
|
|
|
|
|
|
Type: html.TextNode,
|
|
|
|
|
|
Data: content,
|
|
|
|
|
|
}
|
|
|
|
|
|
span.AppendChild(text)
|
|
|
|
|
|
|
|
|
|
|
|
return span
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-07-16 19:37:20 -04:00
|
|
|
|
func createInlineCode(content string) *html.Node {
|
|
|
|
|
|
code := &html.Node{
|
|
|
|
|
|
Type: html.ElementNode,
|
|
|
|
|
|
Data: atom.Code.String(),
|
|
|
|
|
|
Attr: []html.Attribute{},
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
code.Attr = append(code.Attr, html.Attribute{Key: "class", Val: "inline-code-block"})
|
|
|
|
|
|
|
|
|
|
|
|
text := &html.Node{
|
|
|
|
|
|
Type: html.TextNode,
|
|
|
|
|
|
Data: content,
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
code.AppendChild(text)
|
|
|
|
|
|
return code
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-10-23 19:07:53 -04:00
|
|
|
|
func createEmoji(content, class, name, alias string) *html.Node {
|
2020-04-28 14:05:39 -04:00
|
|
|
|
span := &html.Node{
|
|
|
|
|
|
Type: html.ElementNode,
|
|
|
|
|
|
Data: atom.Span.String(),
|
|
|
|
|
|
Attr: []html.Attribute{},
|
|
|
|
|
|
}
|
|
|
|
|
|
if class != "" {
|
|
|
|
|
|
span.Attr = append(span.Attr, html.Attribute{Key: "class", Val: class})
|
|
|
|
|
|
}
|
|
|
|
|
|
if name != "" {
|
|
|
|
|
|
span.Attr = append(span.Attr, html.Attribute{Key: "aria-label", Val: name})
|
|
|
|
|
|
}
|
2024-10-23 19:07:53 -04:00
|
|
|
|
if alias != "" {
|
|
|
|
|
|
span.Attr = append(span.Attr, html.Attribute{Key: "data-alias", Val: alias})
|
|
|
|
|
|
}
|
2020-04-28 14:05:39 -04:00
|
|
|
|
|
|
|
|
|
|
text := &html.Node{
|
|
|
|
|
|
Type: html.TextNode,
|
|
|
|
|
|
Data: content,
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
span.AppendChild(text)
|
|
|
|
|
|
return span
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2021-06-29 10:28:38 -04:00
|
|
|
|
func createCustomEmoji(alias string) *html.Node {
|
2020-04-28 14:05:39 -04:00
|
|
|
|
span := &html.Node{
|
|
|
|
|
|
Type: html.ElementNode,
|
|
|
|
|
|
Data: atom.Span.String(),
|
|
|
|
|
|
Attr: []html.Attribute{},
|
|
|
|
|
|
}
|
2021-06-29 10:28:38 -04:00
|
|
|
|
span.Attr = append(span.Attr, html.Attribute{Key: "class", Val: "emoji"})
|
|
|
|
|
|
span.Attr = append(span.Attr, html.Attribute{Key: "aria-label", Val: alias})
|
2024-10-23 19:07:53 -04:00
|
|
|
|
span.Attr = append(span.Attr, html.Attribute{Key: "data-alias", Val: alias})
|
2020-04-28 14:05:39 -04:00
|
|
|
|
|
|
|
|
|
|
img := &html.Node{
|
|
|
|
|
|
Type: html.ElementNode,
|
|
|
|
|
|
DataAtom: atom.Img,
|
|
|
|
|
|
Data: "img",
|
|
|
|
|
|
Attr: []html.Attribute{},
|
|
|
|
|
|
}
|
2021-06-29 10:28:38 -04:00
|
|
|
|
img.Attr = append(img.Attr, html.Attribute{Key: "alt", Val: ":" + alias + ":"})
|
|
|
|
|
|
img.Attr = append(img.Attr, html.Attribute{Key: "src", Val: setting.StaticURLPrefix + "/assets/img/emoji/" + alias + ".png"})
|
2020-04-28 14:05:39 -04:00
|
|
|
|
|
|
|
|
|
|
span.AppendChild(img)
|
|
|
|
|
|
return span
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2019-09-10 05:03:30 -04:00
|
|
|
|
func createLink(href, content, class string) *html.Node {
|
2019-04-08 23:18:48 -04:00
|
|
|
|
a := &html.Node{
|
|
|
|
|
|
Type: html.ElementNode,
|
|
|
|
|
|
Data: atom.A.String(),
|
|
|
|
|
|
Attr: []html.Attribute{{Key: "href", Val: href}},
|
|
|
|
|
|
}
|
2019-09-10 05:03:30 -04:00
|
|
|
|
|
|
|
|
|
|
if class != "" {
|
|
|
|
|
|
a.Attr = append(a.Attr, html.Attribute{Key: "class", Val: class})
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2019-04-08 23:18:48 -04:00
|
|
|
|
text := &html.Node{
|
2018-02-27 02:09:18 -05:00
|
|
|
|
Type: html.TextNode,
|
|
|
|
|
|
Data: content,
|
|
|
|
|
|
}
|
2019-04-08 23:18:48 -04:00
|
|
|
|
|
|
|
|
|
|
a.AppendChild(text)
|
|
|
|
|
|
return a
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2019-09-10 05:03:30 -04:00
|
|
|
|
func createCodeLink(href, content, class string) *html.Node {
|
2019-04-08 23:18:48 -04:00
|
|
|
|
a := &html.Node{
|
|
|
|
|
|
Type: html.ElementNode,
|
|
|
|
|
|
Data: atom.A.String(),
|
|
|
|
|
|
Attr: []html.Attribute{{Key: "href", Val: href}},
|
|
|
|
|
|
}
|
2019-09-10 05:03:30 -04:00
|
|
|
|
|
|
|
|
|
|
if class != "" {
|
|
|
|
|
|
a.Attr = append(a.Attr, html.Attribute{Key: "class", Val: class})
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-09-06 07:19:43 -04:00
|
|
|
|
unescaped, err := url.QueryUnescape(content)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
unescaped = content
|
|
|
|
|
|
}
|
2019-04-08 23:18:48 -04:00
|
|
|
|
text := &html.Node{
|
|
|
|
|
|
Type: html.TextNode,
|
2025-09-06 07:19:43 -04:00
|
|
|
|
Data: unescaped,
|
2017-09-16 13:17:57 -04:00
|
|
|
|
}
|
2019-04-08 23:18:48 -04:00
|
|
|
|
|
|
|
|
|
|
code := &html.Node{
|
|
|
|
|
|
Type: html.ElementNode,
|
|
|
|
|
|
Data: atom.Code.String(),
|
2019-06-18 18:31:31 -04:00
|
|
|
|
Attr: []html.Attribute{{Key: "class", Val: "nohighlight"}},
|
2019-04-08 23:18:48 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
code.AppendChild(text)
|
|
|
|
|
|
a.AppendChild(code)
|
|
|
|
|
|
return a
|
2017-09-16 13:17:57 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2019-10-13 18:29:10 -04:00
|
|
|
|
// replaceContent takes text node, and in its content it replaces a section of
|
|
|
|
|
|
// it with the specified newNode.
|
2018-02-27 02:09:18 -05:00
|
|
|
|
func replaceContent(node *html.Node, i, j int, newNode *html.Node) {
|
2019-10-13 18:29:10 -04:00
|
|
|
|
replaceContentList(node, i, j, []*html.Node{newNode})
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// replaceContentList takes text node, and in its content it replaces a section of
|
|
|
|
|
|
// it with the specified newNodes. An example to visualize how this can work can
|
|
|
|
|
|
// be found here: https://play.golang.org/p/5zP8NnHZ03s
|
|
|
|
|
|
func replaceContentList(node *html.Node, i, j int, newNodes []*html.Node) {
|
2018-02-27 02:09:18 -05:00
|
|
|
|
// get the data before and after the match
|
|
|
|
|
|
before := node.Data[:i]
|
|
|
|
|
|
after := node.Data[j:]
|
|
|
|
|
|
|
|
|
|
|
|
// Replace in the current node the text, so that it is only what it is
|
|
|
|
|
|
// supposed to have.
|
|
|
|
|
|
node.Data = before
|
|
|
|
|
|
|
|
|
|
|
|
// Get the current next sibling, before which we place the replaced data,
|
|
|
|
|
|
// and after that we place the new text node.
|
|
|
|
|
|
nextSibling := node.NextSibling
|
2019-10-13 18:29:10 -04:00
|
|
|
|
for _, n := range newNodes {
|
|
|
|
|
|
node.Parent.InsertBefore(n, nextSibling)
|
|
|
|
|
|
}
|
2018-02-27 02:09:18 -05:00
|
|
|
|
if after != "" {
|
|
|
|
|
|
node.Parent.InsertBefore(&html.Node{
|
|
|
|
|
|
Type: html.TextNode,
|
|
|
|
|
|
Data: after,
|
|
|
|
|
|
}, nextSibling)
|
2017-09-16 13:17:57 -04:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2021-04-19 18:25:08 -04:00
|
|
|
|
func mentionProcessor(ctx *RenderContext, node *html.Node) {
|
2021-06-17 06:35:05 -04:00
|
|
|
|
start := 0
|
|
|
|
|
|
next := node.NextSibling
|
|
|
|
|
|
for node != nil && node != next && start < len(node.Data) {
|
|
|
|
|
|
// We replace only the first mention; other mentions will be addressed later
|
|
|
|
|
|
found, loc := references.FindFirstMentionBytes([]byte(node.Data[start:]))
|
|
|
|
|
|
if !found {
|
|
|
|
|
|
return
|
2020-12-21 10:39:28 -05:00
|
|
|
|
}
|
2021-06-17 06:35:05 -04:00
|
|
|
|
loc.Start += start
|
|
|
|
|
|
loc.End += start
|
|
|
|
|
|
mention := node.Data[loc.Start:loc.End]
|
|
|
|
|
|
var teams string
|
|
|
|
|
|
teams, ok := ctx.Metas["teams"]
|
|
|
|
|
|
// FIXME: util.URLJoin may not be necessary here:
|
|
|
|
|
|
// - setting.AppURL is defined to have a terminal '/' so unless mention[1:]
|
|
|
|
|
|
// is an AppSubURL link we can probably fallback to concatenation.
|
|
|
|
|
|
// team mention should follow @orgName/teamName style
|
|
|
|
|
|
if ok && strings.Contains(mention, "/") {
|
|
|
|
|
|
mentionOrgAndTeam := strings.Split(mention, "/")
|
|
|
|
|
|
if mentionOrgAndTeam[0][1:] == ctx.Metas["org"] && strings.Contains(teams, ","+strings.ToLower(mentionOrgAndTeam[1])+",") {
|
2024-03-13 06:34:58 -04:00
|
|
|
|
replaceContent(node, loc.Start, loc.End, createLink(util.URLJoin(ctx.Links.Prefix(), "org", ctx.Metas["org"], "teams", mentionOrgAndTeam[1]), mention, "mention"))
|
2021-06-17 06:35:05 -04:00
|
|
|
|
node = node.NextSibling.NextSibling
|
|
|
|
|
|
start = 0
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
start = loc.End
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
2022-10-22 13:15:52 -04:00
|
|
|
|
mentionedUsername := mention[1:]
|
|
|
|
|
|
|
2023-05-20 17:02:52 -04:00
|
|
|
|
if DefaultProcessorHelper.IsUsernameMentionable != nil && DefaultProcessorHelper.IsUsernameMentionable(ctx.Ctx, mentionedUsername) {
|
2024-03-13 06:34:58 -04:00
|
|
|
|
replaceContent(node, loc.Start, loc.End, createLink(util.URLJoin(ctx.Links.Prefix(), mentionedUsername), mention, "mention"))
|
2022-10-22 13:15:52 -04:00
|
|
|
|
node = node.NextSibling.NextSibling
|
2024-04-30 06:51:30 -04:00
|
|
|
|
start = 0
|
2022-10-22 13:15:52 -04:00
|
|
|
|
} else {
|
2024-04-30 06:51:30 -04:00
|
|
|
|
start = loc.End
|
2022-10-22 13:15:52 -04:00
|
|
|
|
}
|
2019-11-24 11:34:44 -05:00
|
|
|
|
}
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
2021-04-19 18:25:08 -04:00
|
|
|
|
func shortLinkProcessor(ctx *RenderContext, node *html.Node) {
|
2021-06-17 06:35:05 -04:00
|
|
|
|
next := node.NextSibling
|
|
|
|
|
|
for node != nil && node != next {
|
|
|
|
|
|
m := shortLinkPattern.FindStringSubmatchIndex(node.Data)
|
|
|
|
|
|
if m == nil {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
2017-09-16 13:17:57 -04:00
|
|
|
|
|
2021-06-17 06:35:05 -04:00
|
|
|
|
content := node.Data[m[2]:m[3]]
|
|
|
|
|
|
tail := node.Data[m[4]:m[5]]
|
|
|
|
|
|
props := make(map[string]string)
|
|
|
|
|
|
|
|
|
|
|
|
// MediaWiki uses [[link|text]], while GitHub uses [[text|link]]
|
|
|
|
|
|
// It makes page handling terrible, but we prefer GitHub syntax
|
|
|
|
|
|
// And fall back to MediaWiki only when it is obvious from the look
|
|
|
|
|
|
// Of text and link contents
|
|
|
|
|
|
sl := strings.Split(content, "|")
|
|
|
|
|
|
for _, v := range sl {
|
|
|
|
|
|
if equalPos := strings.IndexByte(v, '='); equalPos == -1 {
|
|
|
|
|
|
// There is no equal in this argument; this is a mandatory arg
|
|
|
|
|
|
if props["name"] == "" {
|
2024-01-15 03:49:24 -05:00
|
|
|
|
if IsLinkStr(v) {
|
2021-06-17 06:35:05 -04:00
|
|
|
|
// If we clearly see it is a link, we save it so
|
|
|
|
|
|
|
|
|
|
|
|
// But first we need to ensure, that if both mandatory args provided
|
|
|
|
|
|
// look like links, we stick to GitHub syntax
|
|
|
|
|
|
if props["link"] != "" {
|
|
|
|
|
|
props["name"] = props["link"]
|
|
|
|
|
|
}
|
2018-02-27 02:09:18 -05:00
|
|
|
|
|
2021-06-17 06:35:05 -04:00
|
|
|
|
props["link"] = strings.TrimSpace(v)
|
|
|
|
|
|
} else {
|
|
|
|
|
|
props["name"] = v
|
|
|
|
|
|
}
|
2018-02-27 02:09:18 -05:00
|
|
|
|
} else {
|
2021-06-17 06:35:05 -04:00
|
|
|
|
props["link"] = strings.TrimSpace(v)
|
2017-09-16 13:17:57 -04:00
|
|
|
|
}
|
2018-02-27 02:09:18 -05:00
|
|
|
|
} else {
|
2021-06-17 06:35:05 -04:00
|
|
|
|
// There is an equal; optional argument.
|
|
|
|
|
|
|
|
|
|
|
|
sep := strings.IndexByte(v, '=')
|
|
|
|
|
|
key, val := v[:sep], html.UnescapeString(v[sep+1:])
|
|
|
|
|
|
|
|
|
|
|
|
// When parsing HTML, x/net/html will change all quotes which are
|
|
|
|
|
|
// not used for syntax into UTF-8 quotes. So checking val[0] won't
|
|
|
|
|
|
// be enough, since that only checks a single byte.
|
|
|
|
|
|
if len(val) > 1 {
|
|
|
|
|
|
if (strings.HasPrefix(val, "“") && strings.HasSuffix(val, "”")) ||
|
|
|
|
|
|
(strings.HasPrefix(val, "‘") && strings.HasSuffix(val, "’")) {
|
|
|
|
|
|
const lenQuote = len("‘")
|
|
|
|
|
|
val = val[lenQuote : len(val)-lenQuote]
|
|
|
|
|
|
} else if (strings.HasPrefix(val, "\"") && strings.HasSuffix(val, "\"")) ||
|
|
|
|
|
|
(strings.HasPrefix(val, "'") && strings.HasSuffix(val, "'")) {
|
|
|
|
|
|
val = val[1 : len(val)-1]
|
|
|
|
|
|
} else if strings.HasPrefix(val, "'") && strings.HasSuffix(val, "’") {
|
|
|
|
|
|
const lenQuote = len("‘")
|
|
|
|
|
|
val = val[1 : len(val)-lenQuote]
|
|
|
|
|
|
}
|
2020-12-03 21:01:42 -05:00
|
|
|
|
}
|
2021-06-17 06:35:05 -04:00
|
|
|
|
props[key] = val
|
2017-09-16 13:17:57 -04:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2018-02-27 02:09:18 -05:00
|
|
|
|
|
2021-06-17 06:35:05 -04:00
|
|
|
|
var name, link string
|
|
|
|
|
|
if props["link"] != "" {
|
|
|
|
|
|
link = props["link"]
|
|
|
|
|
|
} else if props["name"] != "" {
|
|
|
|
|
|
link = props["name"]
|
|
|
|
|
|
}
|
|
|
|
|
|
if props["title"] != "" {
|
|
|
|
|
|
name = props["title"]
|
|
|
|
|
|
} else if props["name"] != "" {
|
|
|
|
|
|
name = props["name"]
|
2018-03-05 13:39:12 -05:00
|
|
|
|
} else {
|
2021-06-17 06:35:05 -04:00
|
|
|
|
name = link
|
2018-03-05 13:39:12 -05:00
|
|
|
|
}
|
2021-06-17 06:35:05 -04:00
|
|
|
|
|
|
|
|
|
|
name += tail
|
|
|
|
|
|
image := false
|
|
|
|
|
|
switch ext := filepath.Ext(link); ext {
|
|
|
|
|
|
// fast path: empty string, ignore
|
|
|
|
|
|
case "":
|
|
|
|
|
|
// leave image as false
|
ci: detect and prevent empty `case` statements in Go code (#11593)
One of the security patches released 2026-03-09 [fixed a vulnerability](https://codeberg.org/forgejo/forgejo/pulls/11513/commits/d1c7b04d09f6a13896eaa1322ac690b2021539da) caused by a misapplication of Go `case` statements, where the implementation would have been correct if Go `case` statements automatically fall through to the next case block, but they do not. This PR adds a semgrep rule which detects any empty `case` statement and raises an error, in order to prevent this coding mistake in the future.
For example, code like this will now trigger a build error:
```go
switch setting.Protocol {
case setting.HTTPUnix:
case setting.FCGI:
case setting.FCGIUnix:
default:
defaultLocalURL := string(setting.Protocol) + "://"
}
```
Example error:
```
cmd/web.go
❯❯❱ semgrep.config.forgejo-switch-empty-case
switch has a case block with no content. This is treated as "break" by Go, but developers may
confuse it for "fallthrough". To fix this error, disambiguate by using "break" or
"fallthrough".
279┆ switch setting.Protocol {
280┆ case setting.HTTPUnix:
281┆ case setting.FCGI:
282┆ case setting.FCGIUnix:
283┆ default:
284┆ defaultLocalURL := string(setting.Protocol) + "://"
285┆ if setting.HTTPAddr == "0.0.0.0" {
286┆ defaultLocalURL += "localhost"
287┆ } else {
288┆ defaultLocalURL += setting.HTTPAddr
```
As described in the error output, this error can be fixed by explicitly listing `break` (the real Go behaviour, to do nothing in the block), or by listing `fallthrough` (if the intent was to fall through).
All existing code triggering this detection has been changed to `break` (or, rarely, irrelevant cases have been removed), which should maintain the same code functionality. While performing this fixup, a light analysis was performed on each case and they *appeared* correct, but with ~65 cases I haven't gone into extreme depth.
Tests are present for the semgrep rule in `.semgrep/tests/go.go`.
## Checklist
The [contributor guide](https://forgejo.org/docs/next/contributor/) contains information that will be helpful to first time contributors. There also are a few [conditions for merging Pull Requests in Forgejo repositories](https://codeberg.org/forgejo/governance/src/branch/main/PullRequestsAgreement.md). You are also welcome to join the [Forgejo development chatroom](https://matrix.to/#/#forgejo-development:matrix.org).
### Documentation
- [ ] I created a pull request [to the documentation](https://codeberg.org/forgejo/docs) to explain to Forgejo users how to use this change.
- [x] I did not document these changes and I do not expect someone else to do it.
### Release notes
- [ ] This change will be noticed by a Forgejo user or admin (feature, bug fix, performance, etc.). I suggest to include a release note for this change.
- [x] This change is not visible to a Forgejo user or admin (refactor, dependency upgrade, etc.). I think there is no need to add a release note for this change.
Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/11593
Reviewed-by: Gusted <gusted@noreply.codeberg.org>
Co-authored-by: Mathieu Fenniak <mathieu@fenniak.net>
Co-committed-by: Mathieu Fenniak <mathieu@fenniak.net>
2026-03-09 21:50:28 -04:00
|
|
|
|
break
|
2021-06-17 06:35:05 -04:00
|
|
|
|
case ".jpg", ".jpeg", ".png", ".tif", ".tiff", ".webp", ".gif", ".bmp", ".ico", ".svg":
|
|
|
|
|
|
image = true
|
2018-06-15 08:42:49 -04:00
|
|
|
|
}
|
2021-06-17 06:35:05 -04:00
|
|
|
|
|
|
|
|
|
|
childNode := &html.Node{}
|
|
|
|
|
|
linkNode := &html.Node{
|
|
|
|
|
|
FirstChild: childNode,
|
|
|
|
|
|
LastChild: childNode,
|
|
|
|
|
|
Type: html.ElementNode,
|
|
|
|
|
|
Data: "a",
|
|
|
|
|
|
DataAtom: atom.A,
|
|
|
|
|
|
}
|
|
|
|
|
|
childNode.Parent = linkNode
|
2024-01-15 03:49:24 -05:00
|
|
|
|
absoluteLink := IsLinkStr(link)
|
2018-02-27 02:09:18 -05:00
|
|
|
|
if !absoluteLink {
|
2021-06-17 06:35:05 -04:00
|
|
|
|
if image {
|
|
|
|
|
|
link = strings.ReplaceAll(link, " ", "+")
|
|
|
|
|
|
} else {
|
|
|
|
|
|
link = strings.ReplaceAll(link, " ", "-")
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
2021-06-17 06:35:05 -04:00
|
|
|
|
if !strings.Contains(link, "/") {
|
|
|
|
|
|
link = url.PathEscape(link)
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
2017-09-16 13:17:57 -04:00
|
|
|
|
}
|
2021-06-17 06:35:05 -04:00
|
|
|
|
if image {
|
|
|
|
|
|
if !absoluteLink {
|
2024-01-15 03:49:24 -05:00
|
|
|
|
link = util.URLJoin(ctx.Links.ResolveMediaLink(ctx.IsWiki), link)
|
2021-06-17 06:35:05 -04:00
|
|
|
|
}
|
|
|
|
|
|
title := props["title"]
|
|
|
|
|
|
if title == "" {
|
|
|
|
|
|
title = props["alt"]
|
|
|
|
|
|
}
|
|
|
|
|
|
if title == "" {
|
|
|
|
|
|
title = path.Base(name)
|
|
|
|
|
|
}
|
|
|
|
|
|
alt := props["alt"]
|
2017-09-16 13:17:57 -04:00
|
|
|
|
|
2021-06-17 06:35:05 -04:00
|
|
|
|
// make the childNode an image - if we can, we also place the alt
|
|
|
|
|
|
childNode.Type = html.ElementNode
|
|
|
|
|
|
childNode.Data = "img"
|
|
|
|
|
|
childNode.DataAtom = atom.Img
|
|
|
|
|
|
childNode.Attr = []html.Attribute{
|
|
|
|
|
|
{Key: "src", Val: link},
|
|
|
|
|
|
{Key: "title", Val: title},
|
|
|
|
|
|
{Key: "alt", Val: alt},
|
|
|
|
|
|
}
|
|
|
|
|
|
} else {
|
|
|
|
|
|
if !absoluteLink {
|
|
|
|
|
|
if ctx.IsWiki {
|
2024-01-15 03:49:24 -05:00
|
|
|
|
link = util.URLJoin(ctx.Links.WikiLink(), link)
|
|
|
|
|
|
} else {
|
|
|
|
|
|
link = util.URLJoin(ctx.Links.SrcLink(), link)
|
2021-06-17 06:35:05 -04:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
childNode.Type = html.TextNode
|
|
|
|
|
|
childNode.Data = name
|
2017-09-16 13:17:57 -04:00
|
|
|
|
}
|
2024-01-15 03:49:24 -05:00
|
|
|
|
linkNode.Attr = []html.Attribute{{Key: "href", Val: link}}
|
2021-06-17 06:35:05 -04:00
|
|
|
|
replaceContent(node, m[0], m[1], linkNode)
|
|
|
|
|
|
node = node.NextSibling.NextSibling
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
2017-09-16 13:17:57 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2025-09-30 23:00:41 -04:00
|
|
|
|
// pullReviewCommitPatternProcessor creates links to pull review commits.
|
|
|
|
|
|
func pullReviewCommitPatternProcessor(ctx *RenderContext, node *html.Node) {
|
|
|
|
|
|
next := node.NextSibling
|
|
|
|
|
|
for node != nil && node != next {
|
|
|
|
|
|
m := pullReviewCommitPattern.FindStringSubmatchIndex(node.Data)
|
|
|
|
|
|
if m == nil {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
urlFull := node.Data[m[0]:m[1]]
|
|
|
|
|
|
repoSlug := node.Data[m[2]:m[3]]
|
|
|
|
|
|
id := node.Data[m[4]:m[5]]
|
|
|
|
|
|
sha := base.ShortSha(node.Data[m[6]:m[7]])
|
|
|
|
|
|
|
|
|
|
|
|
// Create an `<a>` node with a text of
|
|
|
|
|
|
// `!123 (commit <code>abcdef1234</code>)`
|
|
|
|
|
|
aNode := &html.Node{
|
|
|
|
|
|
Type: html.ElementNode,
|
|
|
|
|
|
Data: atom.A.String(),
|
|
|
|
|
|
Attr: []html.Attribute{{Key: "href", Val: urlFull}, {Key: "class", Val: "commit"}},
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
text := "!" + id + " (commit "
|
|
|
|
|
|
|
2025-12-28 11:18:51 -05:00
|
|
|
|
optionalRepoSlugAndInstancePath(ctx, &text, urlFull, repoSlug)
|
2025-09-30 23:00:41 -04:00
|
|
|
|
|
|
|
|
|
|
aNode.AppendChild(&html.Node{
|
|
|
|
|
|
Type: html.TextNode,
|
|
|
|
|
|
Data: text,
|
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
textNode := &html.Node{
|
|
|
|
|
|
Type: html.TextNode,
|
|
|
|
|
|
Data: sha,
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
codeNode := &html.Node{
|
|
|
|
|
|
Type: html.ElementNode,
|
|
|
|
|
|
Data: atom.Code.String(),
|
|
|
|
|
|
Attr: []html.Attribute{{Key: "class", Val: "nohighlight"}},
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
codeNode.AppendChild(textNode)
|
|
|
|
|
|
aNode.AppendChild(codeNode)
|
|
|
|
|
|
|
|
|
|
|
|
aNode.AppendChild(&html.Node{
|
|
|
|
|
|
Type: html.TextNode,
|
|
|
|
|
|
Data: ")",
|
|
|
|
|
|
})
|
|
|
|
|
|
|
|
|
|
|
|
replaceContent(node, m[0], m[1], aNode)
|
|
|
|
|
|
node = node.NextSibling.NextSibling
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2021-04-19 18:25:08 -04:00
|
|
|
|
func fullIssuePatternProcessor(ctx *RenderContext, node *html.Node) {
|
|
|
|
|
|
if ctx.Metas == nil {
|
2019-04-12 01:53:34 -04:00
|
|
|
|
return
|
|
|
|
|
|
}
|
2021-06-17 06:35:05 -04:00
|
|
|
|
next := node.NextSibling
|
|
|
|
|
|
for node != nil && node != next {
|
2024-08-22 16:35:04 -04:00
|
|
|
|
re := getIssueFullPattern()
|
|
|
|
|
|
linkIndex, m := re.FindStringIndex(node.Data), re.FindStringSubmatch(node.Data)
|
|
|
|
|
|
if linkIndex == nil || m == nil {
|
2021-06-17 06:35:05 -04:00
|
|
|
|
return
|
|
|
|
|
|
}
|
Append `(comment)` when a link points at a comment rather than the whole issue (#23734)
Close #23671
For the feature mentioned above, this PR append ' (comment)' to the
rendered html if it is a hashcomment.
After the PR, type in the following
```
pull request from other repo:
http://localhost:3000/testOrg/testOrgRepo/pulls/2
pull request from this repo:
http://localhost:3000/aaa/testA/pulls/2
issue comment from this repo:
http://localhost:3000/aaa/testA/issues/1#issuecomment-18
http://localhost:3000/aaa/testA/pulls/2#issue-9
issue comment from other repo:
http://localhost:3000/testOrg/testOrgRepo/pulls/2#issuecomment-24
http://localhost:3000/testOrg/testOrgRepo/pulls/2#issue
```
Gives:
<img width="687" alt="截屏2023-03-27 13 53 06"
src="https://user-images.githubusercontent.com/17645053/227852387-2b218e0d-3468-4d90-ad81-d702ddd17fd2.png">
Other than the above feature, this PR also includes two other changes:
1 Right now, the render of links from file changed tab in pull request
might not be very proper, for example, if type in the following. (not
sure if this is an issue or design, if not an issue, I will revert the
changes). example on
[try.gitea.io](https://try.gitea.io/HesterG/testrepo/pulls/1)
```
https://try.gitea.io/HesterG/testrepo/pulls/1/files#issuecomment-162725
https://try.gitea.io/HesterG/testrepo/pulls/1/files
```
it will render the following
<img width="899" alt="截屏2023-03-24 15 41 37"
src="https://user-images.githubusercontent.com/17645053/227456117-5eccedb7-9118-4540-929d-aee9a76de852.png">
In this PR, skip processing the link into a ref issue if it is a link
from files changed tab in pull request
After:
type in following
```
hash comment on files changed tab:
http://localhost:3000/testOrg/testOrgRepo/pulls/2/files#issuecomment-24
files changed link:
http://localhost:3000/testOrg/testOrgRepo/pulls/2/files
```
Gives
<img width="708" alt="截屏2023-03-27 22 09 02"
src="https://user-images.githubusercontent.com/17645053/227964273-5dc06c50-3713-489c-b05d-d95367d0ab0f.png">
2 Right now, after editing the comment area, there will not be tippys
attached to `ref-issue`; and no tippy attached on preview as well.
example:
https://user-images.githubusercontent.com/17645053/227850540-5ae34e2d-b1d7-4d0d-9726-7701bf825d1f.mov
In this PR, in frontend, make sure tippy is added after editing the
comment, and to the comment on preview tab
After:
https://user-images.githubusercontent.com/17645053/227853777-06f56b4c-1148-467c-b6f7-f79418e67504.mov
2023-04-03 04:02:57 -04:00
|
|
|
|
|
2024-08-22 16:35:04 -04:00
|
|
|
|
link := node.Data[linkIndex[0]:linkIndex[1]]
|
|
|
|
|
|
text := "#" + m[re.SubexpIndex("num")] + m[re.SubexpIndex("subpath")]
|
Append `(comment)` when a link points at a comment rather than the whole issue (#23734)
Close #23671
For the feature mentioned above, this PR append ' (comment)' to the
rendered html if it is a hashcomment.
After the PR, type in the following
```
pull request from other repo:
http://localhost:3000/testOrg/testOrgRepo/pulls/2
pull request from this repo:
http://localhost:3000/aaa/testA/pulls/2
issue comment from this repo:
http://localhost:3000/aaa/testA/issues/1#issuecomment-18
http://localhost:3000/aaa/testA/pulls/2#issue-9
issue comment from other repo:
http://localhost:3000/testOrg/testOrgRepo/pulls/2#issuecomment-24
http://localhost:3000/testOrg/testOrgRepo/pulls/2#issue
```
Gives:
<img width="687" alt="截屏2023-03-27 13 53 06"
src="https://user-images.githubusercontent.com/17645053/227852387-2b218e0d-3468-4d90-ad81-d702ddd17fd2.png">
Other than the above feature, this PR also includes two other changes:
1 Right now, the render of links from file changed tab in pull request
might not be very proper, for example, if type in the following. (not
sure if this is an issue or design, if not an issue, I will revert the
changes). example on
[try.gitea.io](https://try.gitea.io/HesterG/testrepo/pulls/1)
```
https://try.gitea.io/HesterG/testrepo/pulls/1/files#issuecomment-162725
https://try.gitea.io/HesterG/testrepo/pulls/1/files
```
it will render the following
<img width="899" alt="截屏2023-03-24 15 41 37"
src="https://user-images.githubusercontent.com/17645053/227456117-5eccedb7-9118-4540-929d-aee9a76de852.png">
In this PR, skip processing the link into a ref issue if it is a link
from files changed tab in pull request
After:
type in following
```
hash comment on files changed tab:
http://localhost:3000/testOrg/testOrgRepo/pulls/2/files#issuecomment-24
files changed link:
http://localhost:3000/testOrg/testOrgRepo/pulls/2/files
```
Gives
<img width="708" alt="截屏2023-03-27 22 09 02"
src="https://user-images.githubusercontent.com/17645053/227964273-5dc06c50-3713-489c-b05d-d95367d0ab0f.png">
2 Right now, after editing the comment area, there will not be tippys
attached to `ref-issue`; and no tippy attached on preview as well.
example:
https://user-images.githubusercontent.com/17645053/227850540-5ae34e2d-b1d7-4d0d-9726-7701bf825d1f.mov
In this PR, in frontend, make sure tippy is added after editing the
comment, and to the comment on preview tab
After:
https://user-images.githubusercontent.com/17645053/227853777-06f56b4c-1148-467c-b6f7-f79418e67504.mov
2023-04-03 04:02:57 -04:00
|
|
|
|
|
2024-08-22 16:35:04 -04:00
|
|
|
|
if len(m[re.SubexpIndex("comment")]) > 0 {
|
Append `(comment)` when a link points at a comment rather than the whole issue (#23734)
Close #23671
For the feature mentioned above, this PR append ' (comment)' to the
rendered html if it is a hashcomment.
After the PR, type in the following
```
pull request from other repo:
http://localhost:3000/testOrg/testOrgRepo/pulls/2
pull request from this repo:
http://localhost:3000/aaa/testA/pulls/2
issue comment from this repo:
http://localhost:3000/aaa/testA/issues/1#issuecomment-18
http://localhost:3000/aaa/testA/pulls/2#issue-9
issue comment from other repo:
http://localhost:3000/testOrg/testOrgRepo/pulls/2#issuecomment-24
http://localhost:3000/testOrg/testOrgRepo/pulls/2#issue
```
Gives:
<img width="687" alt="截屏2023-03-27 13 53 06"
src="https://user-images.githubusercontent.com/17645053/227852387-2b218e0d-3468-4d90-ad81-d702ddd17fd2.png">
Other than the above feature, this PR also includes two other changes:
1 Right now, the render of links from file changed tab in pull request
might not be very proper, for example, if type in the following. (not
sure if this is an issue or design, if not an issue, I will revert the
changes). example on
[try.gitea.io](https://try.gitea.io/HesterG/testrepo/pulls/1)
```
https://try.gitea.io/HesterG/testrepo/pulls/1/files#issuecomment-162725
https://try.gitea.io/HesterG/testrepo/pulls/1/files
```
it will render the following
<img width="899" alt="截屏2023-03-24 15 41 37"
src="https://user-images.githubusercontent.com/17645053/227456117-5eccedb7-9118-4540-929d-aee9a76de852.png">
In this PR, skip processing the link into a ref issue if it is a link
from files changed tab in pull request
After:
type in following
```
hash comment on files changed tab:
http://localhost:3000/testOrg/testOrgRepo/pulls/2/files#issuecomment-24
files changed link:
http://localhost:3000/testOrg/testOrgRepo/pulls/2/files
```
Gives
<img width="708" alt="截屏2023-03-27 22 09 02"
src="https://user-images.githubusercontent.com/17645053/227964273-5dc06c50-3713-489c-b05d-d95367d0ab0f.png">
2 Right now, after editing the comment area, there will not be tippys
attached to `ref-issue`; and no tippy attached on preview as well.
example:
https://user-images.githubusercontent.com/17645053/227850540-5ae34e2d-b1d7-4d0d-9726-7701bf825d1f.mov
In this PR, in frontend, make sure tippy is added after editing the
comment, and to the comment on preview tab
After:
https://user-images.githubusercontent.com/17645053/227853777-06f56b4c-1148-467c-b6f7-f79418e67504.mov
2023-04-03 04:02:57 -04:00
|
|
|
|
if locale, ok := ctx.Ctx.Value(translation.ContextKey).(translation.Locale); ok {
|
2024-02-14 16:48:45 -05:00
|
|
|
|
text += " " + locale.TrString("repo.from_comment")
|
Append `(comment)` when a link points at a comment rather than the whole issue (#23734)
Close #23671
For the feature mentioned above, this PR append ' (comment)' to the
rendered html if it is a hashcomment.
After the PR, type in the following
```
pull request from other repo:
http://localhost:3000/testOrg/testOrgRepo/pulls/2
pull request from this repo:
http://localhost:3000/aaa/testA/pulls/2
issue comment from this repo:
http://localhost:3000/aaa/testA/issues/1#issuecomment-18
http://localhost:3000/aaa/testA/pulls/2#issue-9
issue comment from other repo:
http://localhost:3000/testOrg/testOrgRepo/pulls/2#issuecomment-24
http://localhost:3000/testOrg/testOrgRepo/pulls/2#issue
```
Gives:
<img width="687" alt="截屏2023-03-27 13 53 06"
src="https://user-images.githubusercontent.com/17645053/227852387-2b218e0d-3468-4d90-ad81-d702ddd17fd2.png">
Other than the above feature, this PR also includes two other changes:
1 Right now, the render of links from file changed tab in pull request
might not be very proper, for example, if type in the following. (not
sure if this is an issue or design, if not an issue, I will revert the
changes). example on
[try.gitea.io](https://try.gitea.io/HesterG/testrepo/pulls/1)
```
https://try.gitea.io/HesterG/testrepo/pulls/1/files#issuecomment-162725
https://try.gitea.io/HesterG/testrepo/pulls/1/files
```
it will render the following
<img width="899" alt="截屏2023-03-24 15 41 37"
src="https://user-images.githubusercontent.com/17645053/227456117-5eccedb7-9118-4540-929d-aee9a76de852.png">
In this PR, skip processing the link into a ref issue if it is a link
from files changed tab in pull request
After:
type in following
```
hash comment on files changed tab:
http://localhost:3000/testOrg/testOrgRepo/pulls/2/files#issuecomment-24
files changed link:
http://localhost:3000/testOrg/testOrgRepo/pulls/2/files
```
Gives
<img width="708" alt="截屏2023-03-27 22 09 02"
src="https://user-images.githubusercontent.com/17645053/227964273-5dc06c50-3713-489c-b05d-d95367d0ab0f.png">
2 Right now, after editing the comment area, there will not be tippys
attached to `ref-issue`; and no tippy attached on preview as well.
example:
https://user-images.githubusercontent.com/17645053/227850540-5ae34e2d-b1d7-4d0d-9726-7701bf825d1f.mov
In this PR, in frontend, make sure tippy is added after editing the
comment, and to the comment on preview tab
After:
https://user-images.githubusercontent.com/17645053/227853777-06f56b4c-1148-467c-b6f7-f79418e67504.mov
2023-04-03 04:02:57 -04:00
|
|
|
|
} else {
|
|
|
|
|
|
text += " (comment)"
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2021-06-17 06:35:05 -04:00
|
|
|
|
|
2024-08-22 16:35:04 -04:00
|
|
|
|
matchUser := m[re.SubexpIndex("user")]
|
|
|
|
|
|
matchRepo := m[re.SubexpIndex("repo")]
|
2021-06-17 06:35:05 -04:00
|
|
|
|
|
2024-08-22 16:35:04 -04:00
|
|
|
|
if matchUser == ctx.Metas["user"] && matchRepo == ctx.Metas["repo"] {
|
|
|
|
|
|
replaceContent(node, linkIndex[0], linkIndex[1], createLink(link, text, "ref-issue"))
|
2021-06-17 06:35:05 -04:00
|
|
|
|
} else {
|
2024-08-22 16:35:04 -04:00
|
|
|
|
text = matchUser + "/" + matchRepo + text
|
|
|
|
|
|
replaceContent(node, linkIndex[0], linkIndex[1], createLink(link, text, "ref-issue"))
|
2021-06-17 06:35:05 -04:00
|
|
|
|
}
|
|
|
|
|
|
node = node.NextSibling.NextSibling
|
2019-04-12 01:53:34 -04:00
|
|
|
|
}
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
2017-09-16 13:17:57 -04:00
|
|
|
|
|
2021-04-19 18:25:08 -04:00
|
|
|
|
func issueIndexPatternProcessor(ctx *RenderContext, node *html.Node) {
|
2024-01-03 01:01:12 -05:00
|
|
|
|
if ctx.Metas == nil {
|
2019-04-12 01:53:34 -04:00
|
|
|
|
return
|
|
|
|
|
|
}
|
2024-01-03 01:01:12 -05:00
|
|
|
|
|
|
|
|
|
|
// FIXME: the use of "mode" is quite dirty and hacky, for example: what is a "document"? how should it be rendered?
|
|
|
|
|
|
// The "mode" approach should be refactored to some other more clear&reliable way.
|
|
|
|
|
|
crossLinkOnly := (ctx.Metas["mode"] == "document" && !ctx.IsWiki)
|
|
|
|
|
|
|
2019-10-13 18:29:10 -04:00
|
|
|
|
var (
|
|
|
|
|
|
found bool
|
|
|
|
|
|
ref *references.RenderizableReference
|
|
|
|
|
|
)
|
|
|
|
|
|
|
2021-06-17 06:35:05 -04:00
|
|
|
|
next := node.NextSibling
|
2022-06-10 01:39:53 -04:00
|
|
|
|
|
2021-06-17 06:35:05 -04:00
|
|
|
|
for node != nil && node != next {
|
2022-06-10 01:39:53 -04:00
|
|
|
|
_, hasExtTrackFormat := ctx.Metas["format"]
|
2021-06-17 06:35:05 -04:00
|
|
|
|
|
|
|
|
|
|
// Repos with external issue trackers might still need to reference local PRs
|
|
|
|
|
|
// We need to concern with the first one that shows up in the text, whichever it is
|
2022-06-10 01:39:53 -04:00
|
|
|
|
isNumericStyle := ctx.Metas["style"] == "" || ctx.Metas["style"] == IssueNameStyleNumeric
|
2024-01-03 01:01:12 -05:00
|
|
|
|
foundNumeric, refNumeric := references.FindRenderizableReferenceNumeric(node.Data, hasExtTrackFormat && !isNumericStyle, crossLinkOnly)
|
2022-06-10 01:39:53 -04:00
|
|
|
|
|
|
|
|
|
|
switch ctx.Metas["style"] {
|
|
|
|
|
|
case "", IssueNameStyleNumeric:
|
|
|
|
|
|
found, ref = foundNumeric, refNumeric
|
|
|
|
|
|
case IssueNameStyleAlphanumeric:
|
|
|
|
|
|
found, ref = references.FindRenderizableReferenceAlphanumeric(node.Data)
|
|
|
|
|
|
case IssueNameStyleRegexp:
|
|
|
|
|
|
pattern, err := regexplru.GetCompiled(ctx.Metas["regexp"])
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
found, ref = references.FindRenderizableReferenceRegexp(node.Data, pattern)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// Repos with external issue trackers might still need to reference local PRs
|
|
|
|
|
|
// We need to concern with the first one that shows up in the text, whichever it is
|
2022-07-07 11:46:49 -04:00
|
|
|
|
if hasExtTrackFormat && !isNumericStyle && refNumeric != nil {
|
2022-06-10 01:39:53 -04:00
|
|
|
|
// If numeric (PR) was found, and it was BEFORE the non-numeric pattern, use that
|
2022-07-07 11:46:49 -04:00
|
|
|
|
// Allow a free-pass when non-numeric pattern wasn't found.
|
|
|
|
|
|
if found && (ref == nil || refNumeric.RefLocation.Start < ref.RefLocation.Start) {
|
2022-06-10 01:39:53 -04:00
|
|
|
|
found = foundNumeric
|
|
|
|
|
|
ref = refNumeric
|
2019-12-01 08:57:05 -05:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2021-06-17 06:35:05 -04:00
|
|
|
|
if !found {
|
|
|
|
|
|
return
|
2019-12-01 08:57:05 -05:00
|
|
|
|
}
|
2021-06-17 06:35:05 -04:00
|
|
|
|
|
|
|
|
|
|
var link *html.Node
|
|
|
|
|
|
reftext := node.Data[ref.RefLocation.Start:ref.RefLocation.End]
|
2024-08-06 21:19:12 -04:00
|
|
|
|
if hasExtTrackFormat && !ref.IsPull && ref.Owner == "" {
|
2021-06-17 06:35:05 -04:00
|
|
|
|
ctx.Metas["index"] = ref.Issue
|
2022-04-01 04:47:50 -04:00
|
|
|
|
|
|
|
|
|
|
res, err := vars.Expand(ctx.Metas["format"], ctx.Metas)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
// here we could just log the error and continue the rendering
|
|
|
|
|
|
log.Error("unable to expand template vars for ref %s, err: %v", ref.Issue, err)
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
link = createLink(res, reftext, "ref-issue ref-external-issue")
|
2019-12-01 08:57:05 -05:00
|
|
|
|
} else {
|
2021-06-17 06:35:05 -04:00
|
|
|
|
// Path determines the type of link that will be rendered. It's unknown at this point whether
|
|
|
|
|
|
// the linked item is actually a PR or an issue. Luckily it's of no real consequence because
|
2024-04-21 12:26:15 -04:00
|
|
|
|
// Forgejo will redirect on click as appropriate.
|
2021-06-17 06:35:05 -04:00
|
|
|
|
path := "issues"
|
|
|
|
|
|
if ref.IsPull {
|
|
|
|
|
|
path = "pulls"
|
|
|
|
|
|
}
|
|
|
|
|
|
if ref.Owner == "" {
|
2024-03-13 06:34:58 -04:00
|
|
|
|
link = createLink(util.URLJoin(ctx.Links.Prefix(), ctx.Metas["user"], ctx.Metas["repo"], path, ref.Issue), reftext, "ref-issue")
|
2021-06-17 06:35:05 -04:00
|
|
|
|
} else {
|
2024-03-13 06:34:58 -04:00
|
|
|
|
link = createLink(util.URLJoin(ctx.Links.Prefix(), ref.Owner, ref.Name, path, ref.Issue), reftext, "ref-issue")
|
2021-06-17 06:35:05 -04:00
|
|
|
|
}
|
2019-12-01 08:57:05 -05:00
|
|
|
|
}
|
2017-09-16 13:17:57 -04:00
|
|
|
|
|
2021-06-17 06:35:05 -04:00
|
|
|
|
if ref.Action == references.XRefActionNone {
|
|
|
|
|
|
replaceContent(node, ref.RefLocation.Start, ref.RefLocation.End, link)
|
|
|
|
|
|
node = node.NextSibling.NextSibling
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
2018-02-27 02:09:18 -05:00
|
|
|
|
|
2021-06-17 06:35:05 -04:00
|
|
|
|
// Decorate action keywords if actionable
|
|
|
|
|
|
var keyword *html.Node
|
2022-06-10 01:39:53 -04:00
|
|
|
|
if references.IsXrefActionable(ref, hasExtTrackFormat) {
|
2021-06-17 06:35:05 -04:00
|
|
|
|
keyword = createKeyword(node.Data[ref.ActionLocation.Start:ref.ActionLocation.End])
|
|
|
|
|
|
} else {
|
|
|
|
|
|
keyword = &html.Node{
|
|
|
|
|
|
Type: html.TextNode,
|
|
|
|
|
|
Data: node.Data[ref.ActionLocation.Start:ref.ActionLocation.End],
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
spaces := &html.Node{
|
2019-11-18 08:13:07 -05:00
|
|
|
|
Type: html.TextNode,
|
2021-06-17 06:35:05 -04:00
|
|
|
|
Data: node.Data[ref.ActionLocation.End:ref.RefLocation.Start],
|
2019-11-18 08:13:07 -05:00
|
|
|
|
}
|
2021-06-17 06:35:05 -04:00
|
|
|
|
replaceContentList(node, ref.ActionLocation.Start, ref.RefLocation.End, []*html.Node{keyword, spaces, link})
|
|
|
|
|
|
node = node.NextSibling.NextSibling.NextSibling.NextSibling
|
2019-11-18 08:13:07 -05:00
|
|
|
|
}
|
2017-09-16 13:17:57 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2023-01-29 20:50:01 -05:00
|
|
|
|
func commitCrossReferencePatternProcessor(ctx *RenderContext, node *html.Node) {
|
|
|
|
|
|
next := node.NextSibling
|
|
|
|
|
|
|
|
|
|
|
|
for node != nil && node != next {
|
|
|
|
|
|
found, ref := references.FindRenderizableCommitCrossReference(node.Data)
|
|
|
|
|
|
if !found {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
reftext := ref.Owner + "/" + ref.Name + "@" + base.ShortSha(ref.CommitSha)
|
2024-03-13 06:34:58 -04:00
|
|
|
|
link := createLink(util.URLJoin(ctx.Links.Prefix(), ref.Owner, ref.Name, "commit", ref.CommitSha), reftext, "commit")
|
2023-01-29 20:50:01 -05:00
|
|
|
|
|
|
|
|
|
|
replaceContent(node, ref.RefLocation.Start, ref.RefLocation.End, link)
|
|
|
|
|
|
node = node.NextSibling.NextSibling
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-09-30 23:00:41 -04:00
|
|
|
|
// fullHashPatternProcessor renders URLs that contain a SHA
|
2024-01-19 11:05:02 -05:00
|
|
|
|
func fullHashPatternProcessor(ctx *RenderContext, node *html.Node) {
|
2021-04-19 18:25:08 -04:00
|
|
|
|
if ctx.Metas == nil {
|
2019-04-16 03:53:57 -04:00
|
|
|
|
return
|
|
|
|
|
|
}
|
2019-04-06 14:28:45 -04:00
|
|
|
|
|
2021-06-17 06:35:05 -04:00
|
|
|
|
next := node.NextSibling
|
|
|
|
|
|
for node != nil && node != next {
|
2024-01-19 11:05:02 -05:00
|
|
|
|
m := anyHashPattern.FindStringSubmatchIndex(node.Data)
|
2021-06-17 06:35:05 -04:00
|
|
|
|
if m == nil {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
2017-09-16 13:17:57 -04:00
|
|
|
|
|
2021-06-17 06:35:05 -04:00
|
|
|
|
urlFull := node.Data[m[0]:m[1]]
|
2017-09-16 13:17:57 -04:00
|
|
|
|
|
2025-09-30 23:00:41 -04:00
|
|
|
|
// In most cases, the URL will look like this:
|
|
|
|
|
|
// `https://domain.tld/<owner>/<repo>/<path>/<sha>`.
|
|
|
|
|
|
// The amount of components in `<path>` is variable, but that alone is doable with regexp.
|
|
|
|
|
|
//
|
|
|
|
|
|
// However, Forgejo also allows being hosted on a sub path, i.e.
|
|
|
|
|
|
// `https://domain.tld/<sub>/<owner>/<repo>/<path>/<sha>`.
|
|
|
|
|
|
// And this sub path can also have any amount of components. But fishing out a section
|
|
|
|
|
|
// between two variable length matches is not something regular grammars are capable of.
|
|
|
|
|
|
//
|
|
|
|
|
|
// Instead, the regexp extracts the entire path section before the SHA
|
|
|
|
|
|
// (i.e. `<sub>/<owner>/<repo>/<path>`), and we find the components we need by counting.
|
|
|
|
|
|
// `<sub>` is unknown, but the possible values for `<path>` are defined by us
|
|
|
|
|
|
// (see `router/web/web.go`). So we count from the back.
|
|
|
|
|
|
subPath := node.Data[m[2]:m[3]]
|
|
|
|
|
|
|
|
|
|
|
|
components := strings.Split(subPath, "/")
|
|
|
|
|
|
componentCount := len(components)
|
|
|
|
|
|
|
|
|
|
|
|
// In most cases, the `<owner>` component is right at the start of the path.
|
|
|
|
|
|
ownerIndex := 0
|
|
|
|
|
|
|
|
|
|
|
|
// But if there are more than three components, this could be `<sub>` or an app route
|
|
|
|
|
|
// with two components. Or both.
|
|
|
|
|
|
if componentCount > 3 {
|
|
|
|
|
|
// As mentioned, we count from the back. We decrement for the `<repo>` component, and the one
|
|
|
|
|
|
// component from the app route that's guaranteed to be there.
|
|
|
|
|
|
// We also adjust this to be an array index, so we subtract one more.
|
|
|
|
|
|
ownerIndex = componentCount - 3
|
|
|
|
|
|
|
|
|
|
|
|
// We then check for known app routes that use two components.
|
|
|
|
|
|
// Currently, this checks for:
|
|
|
|
|
|
// - `src/commit`
|
|
|
|
|
|
// - `commits/commit`
|
|
|
|
|
|
//
|
|
|
|
|
|
// This does have one scenario where we cannot figure things out reliably:
|
|
|
|
|
|
// If there is a sub path, and the repository is named like one of the known app routes
|
|
|
|
|
|
// (e.g. `src`), we cannot distinguish between the repo and the app route.
|
|
|
|
|
|
// We assume that naming a repository like that is uncommon, and prioritize the case where its
|
|
|
|
|
|
// part of the app route.
|
|
|
|
|
|
if components[componentCount-1] == "commit" &&
|
|
|
|
|
|
(components[componentCount-2] == "src" || components[componentCount-2] == "commits") {
|
|
|
|
|
|
ownerIndex--
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
repoSlug := components[ownerIndex] + "/" + components[ownerIndex+1]
|
|
|
|
|
|
|
|
|
|
|
|
text := base.ShortSha(node.Data[m[4]:m[5]])
|
|
|
|
|
|
|
|
|
|
|
|
// We need to figure out the base of the provided URL, which is up to and including the
|
|
|
|
|
|
// `<owner>/<repo>` slug.
|
|
|
|
|
|
// With that we can determine if it matches the current repo, or if the slug should be shown.
|
2025-12-28 11:18:51 -05:00
|
|
|
|
optionalRepoSlugAndInstancePath(ctx, &text, urlFull, repoSlug)
|
2025-09-30 23:00:41 -04:00
|
|
|
|
|
|
|
|
|
|
// 3rd capture group matches an optional file path after the SHA
|
|
|
|
|
|
filePath := ""
|
|
|
|
|
|
if m[7] > 0 {
|
|
|
|
|
|
filePath = node.Data[m[6]:m[7]]
|
2021-06-17 06:35:05 -04:00
|
|
|
|
}
|
2019-04-06 14:28:45 -04:00
|
|
|
|
|
2024-05-01 06:36:09 -04:00
|
|
|
|
// 5th capture group matches a optional url hash
|
2021-06-17 06:35:05 -04:00
|
|
|
|
hash := ""
|
2025-09-30 23:00:41 -04:00
|
|
|
|
if m[11] > 0 {
|
|
|
|
|
|
hash = node.Data[m[10]:m[11]][1:]
|
|
|
|
|
|
|
|
|
|
|
|
// Truncate long diff IDs
|
|
|
|
|
|
if len(hash) > 15 && strings.HasPrefix(hash, "diff-") {
|
|
|
|
|
|
hash = hash[:15]
|
|
|
|
|
|
}
|
2021-06-17 06:35:05 -04:00
|
|
|
|
}
|
2019-04-06 14:28:45 -04:00
|
|
|
|
|
2021-06-17 06:35:05 -04:00
|
|
|
|
start := m[0]
|
|
|
|
|
|
end := m[1]
|
|
|
|
|
|
|
2025-09-30 23:00:41 -04:00
|
|
|
|
// If the URL ends in '.', it's very likely that it is not part of the
|
|
|
|
|
|
// actual URL but used to finish a sentence.
|
2021-06-17 06:35:05 -04:00
|
|
|
|
if strings.HasSuffix(urlFull, ".") {
|
|
|
|
|
|
end--
|
|
|
|
|
|
urlFull = urlFull[:len(urlFull)-1]
|
|
|
|
|
|
if hash != "" {
|
|
|
|
|
|
hash = hash[:len(hash)-1]
|
2025-09-30 23:00:41 -04:00
|
|
|
|
} else if filePath != "" {
|
|
|
|
|
|
filePath = filePath[:len(filePath)-1]
|
2021-06-17 06:35:05 -04:00
|
|
|
|
}
|
2019-04-06 14:28:45 -04:00
|
|
|
|
}
|
2017-09-16 13:17:57 -04:00
|
|
|
|
|
2025-09-30 23:00:41 -04:00
|
|
|
|
if filePath != "" {
|
|
|
|
|
|
decoded, err := url.QueryUnescape(filePath)
|
|
|
|
|
|
if err != nil {
|
|
|
|
|
|
text += decoded
|
|
|
|
|
|
} else {
|
|
|
|
|
|
text += filePath
|
|
|
|
|
|
}
|
2021-06-17 06:35:05 -04:00
|
|
|
|
}
|
2019-04-06 14:28:45 -04:00
|
|
|
|
|
2021-06-17 06:35:05 -04:00
|
|
|
|
if hash != "" {
|
|
|
|
|
|
text += " (" + hash + ")"
|
|
|
|
|
|
}
|
|
|
|
|
|
replaceContent(node, start, end, createCodeLink(urlFull, text, "commit"))
|
|
|
|
|
|
node = node.NextSibling.NextSibling
|
|
|
|
|
|
}
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
2017-09-16 13:17:57 -04:00
|
|
|
|
|
2021-12-11 12:21:36 -05:00
|
|
|
|
func comparePatternProcessor(ctx *RenderContext, node *html.Node) {
|
|
|
|
|
|
if ctx.Metas == nil {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
next := node.NextSibling
|
|
|
|
|
|
for node != nil && node != next {
|
|
|
|
|
|
m := comparePattern.FindStringSubmatchIndex(node.Data)
|
|
|
|
|
|
if m == nil {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-09-30 23:00:41 -04:00
|
|
|
|
// Ensure that every group (m[0]...m[9]) has a match
|
|
|
|
|
|
for i := 0; i < 10; i++ {
|
2022-01-30 18:48:47 -05:00
|
|
|
|
if m[i] == -1 {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2021-12-11 12:21:36 -05:00
|
|
|
|
urlFull := node.Data[m[0]:m[1]]
|
2025-09-30 23:00:41 -04:00
|
|
|
|
repoSlug := node.Data[m[2]:m[3]]
|
|
|
|
|
|
text1 := base.ShortSha(node.Data[m[4]:m[5]])
|
|
|
|
|
|
textDots := base.ShortSha(node.Data[m[6]:m[7]])
|
|
|
|
|
|
text2 := base.ShortSha(node.Data[m[8]:m[9]])
|
|
|
|
|
|
|
|
|
|
|
|
query := ""
|
|
|
|
|
|
if m[11] > 0 {
|
|
|
|
|
|
query = node.Data[m[10]:m[11]][1:]
|
|
|
|
|
|
}
|
2021-12-11 12:21:36 -05:00
|
|
|
|
|
|
|
|
|
|
hash := ""
|
2025-09-30 23:00:41 -04:00
|
|
|
|
if m[13] > 0 {
|
|
|
|
|
|
hash = node.Data[m[12]:m[13]][1:]
|
2021-12-11 12:21:36 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
start := m[0]
|
|
|
|
|
|
end := m[1]
|
|
|
|
|
|
|
2025-09-30 23:00:41 -04:00
|
|
|
|
// If the URL ends in '.', it's very likely that it is not part of the
|
|
|
|
|
|
// actual URL but used to finish a sentence.
|
2021-12-11 12:21:36 -05:00
|
|
|
|
if strings.HasSuffix(urlFull, ".") {
|
|
|
|
|
|
end--
|
|
|
|
|
|
urlFull = urlFull[:len(urlFull)-1]
|
|
|
|
|
|
if hash != "" {
|
|
|
|
|
|
hash = hash[:len(hash)-1]
|
2025-09-30 23:00:41 -04:00
|
|
|
|
} else if query != "" {
|
|
|
|
|
|
query = query[:len(query)-1]
|
2021-12-11 12:21:36 -05:00
|
|
|
|
} else if text2 != "" {
|
|
|
|
|
|
text2 = text2[:len(text2)-1]
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
text := text1 + textDots + text2
|
2025-09-30 23:00:41 -04:00
|
|
|
|
|
2025-12-28 11:18:51 -05:00
|
|
|
|
optionalRepoSlugAndInstancePath(ctx, &text, urlFull, repoSlug)
|
2025-09-30 23:00:41 -04:00
|
|
|
|
|
|
|
|
|
|
extra := ""
|
|
|
|
|
|
if query != "" {
|
|
|
|
|
|
query, err := url.ParseQuery(query)
|
|
|
|
|
|
if err == nil && query.Has("files") {
|
|
|
|
|
|
extra = query.Get("files")
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2021-12-11 12:21:36 -05:00
|
|
|
|
if hash != "" {
|
2025-09-30 23:00:41 -04:00
|
|
|
|
if extra != "" {
|
|
|
|
|
|
extra += "#"
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
extra += hash
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if extra != "" {
|
|
|
|
|
|
text += " (" + extra + ")"
|
2021-12-11 12:21:36 -05:00
|
|
|
|
}
|
|
|
|
|
|
replaceContent(node, start, end, createCodeLink(urlFull, text, "compare"))
|
|
|
|
|
|
node = node.NextSibling.NextSibling
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-15 08:44:42 -04:00
|
|
|
|
func filePreviewPatternProcessor(ctx *RenderContext, node *html.Node) {
|
2024-04-12 18:30:20 -04:00
|
|
|
|
if ctx.Metas == nil || ctx.Metas["user"] == "" || ctx.Metas["repo"] == "" {
|
2024-03-15 08:44:42 -04:00
|
|
|
|
return
|
|
|
|
|
|
}
|
2024-03-25 11:05:01 -04:00
|
|
|
|
if DefaultProcessorHelper.GetRepoFileBlob == nil {
|
2024-03-15 08:44:42 -04:00
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-04-17 06:44:17 -04:00
|
|
|
|
locale := translation.NewLocale("en-US")
|
|
|
|
|
|
if ctx.Ctx != nil {
|
|
|
|
|
|
ctxLocale, ok := ctx.Ctx.Value(translation.ContextKey).(translation.Locale)
|
|
|
|
|
|
if ok {
|
|
|
|
|
|
locale = ctxLocale
|
2024-03-15 20:17:04 -04:00
|
|
|
|
}
|
2024-04-17 06:44:17 -04:00
|
|
|
|
}
|
2024-03-15 20:17:04 -04:00
|
|
|
|
|
2024-04-17 06:44:17 -04:00
|
|
|
|
next := node.NextSibling
|
|
|
|
|
|
for node != nil && node != next {
|
fix(ui): reworked file preview placement towards better HTML validity (#9181)
### What?
- fixes HTML nodes placement for inline previews
- (breaking?) disallows file previews in headings (`<h1>`, …), striked out (`<del>`), `<summary>` and other environments
- allows them in `<span>`, `<em>` and `<strong>` environments, but without extra formatting
- allows them in `<div>`, `<li>`, `<th>`, `<td>` and `<details>` (not in `<summary>`) environments following the parent’s formatting
- improves overall HTML validity, but only to the extend of the direct parent nodes (but not a big issue, as modern browsers tend to ignore invalid HTML and still parse it)
- fix #9136
See examples of strangely formatted file previews at https://v13.next.forgejo.org/mahlzahn/test/issues/4.
### How is it implemented?
For links in `<p>`, `<span>`, `<em>` and `<strong>` parent nodes it inserts the file preview and following text as siblings to the **parent** node.
`<em>before LINK after</em>`
→ `<em>before </em>PREVIEW<em> after</em>`
For links in `<div>`, `<li>`, `<th>`, `<td>` and `<details>` parent nodes it inserts the file preview and following text as siblings to the **text** node.
`<div>before LINK after</div>`
→ `<div>before PREVIEW after</div>`
Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/9181
Reviewed-by: Gusted <gusted@noreply.codeberg.org>
Co-authored-by: Robert Wolff <mahlzahn@posteo.de>
Co-committed-by: Robert Wolff <mahlzahn@posteo.de>
2025-09-15 09:57:52 -04:00
|
|
|
|
if node.Parent == nil || node.Parent.Type != html.ElementNode {
|
|
|
|
|
|
node = node.NextSibling
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
2024-04-19 12:21:21 -04:00
|
|
|
|
previews := NewFilePreviews(ctx, node, locale)
|
|
|
|
|
|
if previews == nil {
|
2024-04-17 06:44:17 -04:00
|
|
|
|
node = node.NextSibling
|
|
|
|
|
|
continue
|
2024-03-15 08:44:42 -04:00
|
|
|
|
}
|
2024-04-19 12:21:21 -04:00
|
|
|
|
offset := 0
|
|
|
|
|
|
for _, preview := range previews {
|
|
|
|
|
|
previewNode := preview.CreateHTML(locale)
|
|
|
|
|
|
|
|
|
|
|
|
// Specialized version of replaceContent, so the parent paragraph element is not destroyed from our div
|
|
|
|
|
|
before := node.Data[:(preview.start - offset)]
|
|
|
|
|
|
after := node.Data[(preview.end - offset):]
|
2025-10-05 23:35:26 -04:00
|
|
|
|
afterTextNode := &html.Node{
|
fix(ui): reworked file preview placement towards better HTML validity (#9181)
### What?
- fixes HTML nodes placement for inline previews
- (breaking?) disallows file previews in headings (`<h1>`, …), striked out (`<del>`), `<summary>` and other environments
- allows them in `<span>`, `<em>` and `<strong>` environments, but without extra formatting
- allows them in `<div>`, `<li>`, `<th>`, `<td>` and `<details>` (not in `<summary>`) environments following the parent’s formatting
- improves overall HTML validity, but only to the extend of the direct parent nodes (but not a big issue, as modern browsers tend to ignore invalid HTML and still parse it)
- fix #9136
See examples of strangely formatted file previews at https://v13.next.forgejo.org/mahlzahn/test/issues/4.
### How is it implemented?
For links in `<p>`, `<span>`, `<em>` and `<strong>` parent nodes it inserts the file preview and following text as siblings to the **parent** node.
`<em>before LINK after</em>`
→ `<em>before </em>PREVIEW<em> after</em>`
For links in `<div>`, `<li>`, `<th>`, `<td>` and `<details>` parent nodes it inserts the file preview and following text as siblings to the **text** node.
`<div>before LINK after</div>`
→ `<div>before PREVIEW after</div>`
Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/9181
Reviewed-by: Gusted <gusted@noreply.codeberg.org>
Co-authored-by: Robert Wolff <mahlzahn@posteo.de>
Co-committed-by: Robert Wolff <mahlzahn@posteo.de>
2025-09-15 09:57:52 -04:00
|
|
|
|
Type: html.TextNode,
|
|
|
|
|
|
Data: after,
|
|
|
|
|
|
}
|
|
|
|
|
|
matched := true
|
|
|
|
|
|
switch node.Parent.Data {
|
|
|
|
|
|
case "div", "li", "td", "th", "details":
|
|
|
|
|
|
nextSibling := node.NextSibling
|
|
|
|
|
|
node.Parent.InsertBefore(previewNode, nextSibling)
|
2025-10-05 23:35:26 -04:00
|
|
|
|
node.Parent.InsertBefore(afterTextNode, nextSibling)
|
fix(ui): reworked file preview placement towards better HTML validity (#9181)
### What?
- fixes HTML nodes placement for inline previews
- (breaking?) disallows file previews in headings (`<h1>`, …), striked out (`<del>`), `<summary>` and other environments
- allows them in `<span>`, `<em>` and `<strong>` environments, but without extra formatting
- allows them in `<div>`, `<li>`, `<th>`, `<td>` and `<details>` (not in `<summary>`) environments following the parent’s formatting
- improves overall HTML validity, but only to the extend of the direct parent nodes (but not a big issue, as modern browsers tend to ignore invalid HTML and still parse it)
- fix #9136
See examples of strangely formatted file previews at https://v13.next.forgejo.org/mahlzahn/test/issues/4.
### How is it implemented?
For links in `<p>`, `<span>`, `<em>` and `<strong>` parent nodes it inserts the file preview and following text as siblings to the **parent** node.
`<em>before LINK after</em>`
→ `<em>before </em>PREVIEW<em> after</em>`
For links in `<div>`, `<li>`, `<th>`, `<td>` and `<details>` parent nodes it inserts the file preview and following text as siblings to the **text** node.
`<div>before LINK after</div>`
→ `<div>before PREVIEW after</div>`
Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/9181
Reviewed-by: Gusted <gusted@noreply.codeberg.org>
Co-authored-by: Robert Wolff <mahlzahn@posteo.de>
Co-committed-by: Robert Wolff <mahlzahn@posteo.de>
2025-09-15 09:57:52 -04:00
|
|
|
|
case "p", "span", "em", "strong":
|
2025-10-05 23:35:26 -04:00
|
|
|
|
nextParentSibling := node.Parent.NextSibling
|
|
|
|
|
|
node.Parent.Parent.InsertBefore(previewNode, nextParentSibling)
|
|
|
|
|
|
afterNode := &html.Node{
|
fix(ui): reworked file preview placement towards better HTML validity (#9181)
### What?
- fixes HTML nodes placement for inline previews
- (breaking?) disallows file previews in headings (`<h1>`, …), striked out (`<del>`), `<summary>` and other environments
- allows them in `<span>`, `<em>` and `<strong>` environments, but without extra formatting
- allows them in `<div>`, `<li>`, `<th>`, `<td>` and `<details>` (not in `<summary>`) environments following the parent’s formatting
- improves overall HTML validity, but only to the extend of the direct parent nodes (but not a big issue, as modern browsers tend to ignore invalid HTML and still parse it)
- fix #9136
See examples of strangely formatted file previews at https://v13.next.forgejo.org/mahlzahn/test/issues/4.
### How is it implemented?
For links in `<p>`, `<span>`, `<em>` and `<strong>` parent nodes it inserts the file preview and following text as siblings to the **parent** node.
`<em>before LINK after</em>`
→ `<em>before </em>PREVIEW<em> after</em>`
For links in `<div>`, `<li>`, `<th>`, `<td>` and `<details>` parent nodes it inserts the file preview and following text as siblings to the **text** node.
`<div>before LINK after</div>`
→ `<div>before PREVIEW after</div>`
Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/9181
Reviewed-by: Gusted <gusted@noreply.codeberg.org>
Co-authored-by: Robert Wolff <mahlzahn@posteo.de>
Co-committed-by: Robert Wolff <mahlzahn@posteo.de>
2025-09-15 09:57:52 -04:00
|
|
|
|
Type: html.ElementNode,
|
|
|
|
|
|
Data: node.Parent.Data,
|
|
|
|
|
|
Attr: node.Parent.Attr,
|
|
|
|
|
|
}
|
2025-10-05 23:35:26 -04:00
|
|
|
|
afterNode.AppendChild(afterTextNode)
|
|
|
|
|
|
node.Parent.Parent.InsertBefore(afterNode, nextParentSibling)
|
|
|
|
|
|
for sibling := node.NextSibling; sibling != nil; sibling = node.NextSibling {
|
|
|
|
|
|
sibling.Parent.RemoveChild(sibling)
|
|
|
|
|
|
afterNode.AppendChild(sibling)
|
fix(ui): reworked file preview placement towards better HTML validity (#9181)
### What?
- fixes HTML nodes placement for inline previews
- (breaking?) disallows file previews in headings (`<h1>`, …), striked out (`<del>`), `<summary>` and other environments
- allows them in `<span>`, `<em>` and `<strong>` environments, but without extra formatting
- allows them in `<div>`, `<li>`, `<th>`, `<td>` and `<details>` (not in `<summary>`) environments following the parent’s formatting
- improves overall HTML validity, but only to the extend of the direct parent nodes (but not a big issue, as modern browsers tend to ignore invalid HTML and still parse it)
- fix #9136
See examples of strangely formatted file previews at https://v13.next.forgejo.org/mahlzahn/test/issues/4.
### How is it implemented?
For links in `<p>`, `<span>`, `<em>` and `<strong>` parent nodes it inserts the file preview and following text as siblings to the **parent** node.
`<em>before LINK after</em>`
→ `<em>before </em>PREVIEW<em> after</em>`
For links in `<div>`, `<li>`, `<th>`, `<td>` and `<details>` parent nodes it inserts the file preview and following text as siblings to the **text** node.
`<div>before LINK after</div>`
→ `<div>before PREVIEW after</div>`
Reviewed-on: https://codeberg.org/forgejo/forgejo/pulls/9181
Reviewed-by: Gusted <gusted@noreply.codeberg.org>
Co-authored-by: Robert Wolff <mahlzahn@posteo.de>
Co-committed-by: Robert Wolff <mahlzahn@posteo.de>
2025-09-15 09:57:52 -04:00
|
|
|
|
}
|
|
|
|
|
|
default:
|
|
|
|
|
|
matched = false
|
|
|
|
|
|
}
|
|
|
|
|
|
if matched {
|
|
|
|
|
|
offset = preview.end
|
|
|
|
|
|
node.Data = before
|
2025-10-05 23:35:26 -04:00
|
|
|
|
node = afterTextNode
|
2024-04-19 12:21:21 -04:00
|
|
|
|
}
|
|
|
|
|
|
}
|
2024-03-15 08:44:42 -04:00
|
|
|
|
node = node.NextSibling
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2024-07-16 19:37:20 -04:00
|
|
|
|
func inlineCodeBlockProcessor(ctx *RenderContext, node *html.Node) {
|
|
|
|
|
|
start := 0
|
|
|
|
|
|
next := node.NextSibling
|
|
|
|
|
|
for node != nil && node != next && start < len(node.Data) {
|
|
|
|
|
|
m := InlineCodeBlockRegex.FindStringSubmatchIndex(node.Data[start:])
|
|
|
|
|
|
if m == nil {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
code := node.Data[m[0]+1 : m[1]-1]
|
|
|
|
|
|
replaceContent(node, m[0], m[1], createInlineCode(code))
|
|
|
|
|
|
node = node.NextSibling.NextSibling
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2020-04-28 14:05:39 -04:00
|
|
|
|
// emojiShortCodeProcessor for rendering text like :smile: into emoji
|
2021-04-19 18:25:08 -04:00
|
|
|
|
func emojiShortCodeProcessor(ctx *RenderContext, node *html.Node) {
|
2021-06-17 06:35:05 -04:00
|
|
|
|
start := 0
|
|
|
|
|
|
next := node.NextSibling
|
|
|
|
|
|
for node != nil && node != next && start < len(node.Data) {
|
|
|
|
|
|
m := EmojiShortCodeRegex.FindStringSubmatchIndex(node.Data[start:])
|
|
|
|
|
|
if m == nil {
|
2020-04-28 14:05:39 -04:00
|
|
|
|
return
|
|
|
|
|
|
}
|
2021-06-17 06:35:05 -04:00
|
|
|
|
m[0] += start
|
|
|
|
|
|
m[1] += start
|
|
|
|
|
|
|
|
|
|
|
|
start = m[1]
|
|
|
|
|
|
|
|
|
|
|
|
alias := node.Data[m[0]:m[1]]
|
|
|
|
|
|
alias = strings.ReplaceAll(alias, ":", "")
|
|
|
|
|
|
converted := emoji.FromAlias(alias)
|
|
|
|
|
|
if converted == nil {
|
|
|
|
|
|
// check if this is a custom reaction
|
2025-08-10 15:36:11 -04:00
|
|
|
|
if setting.UI.CustomEmojisLookup.Contains(alias) {
|
2021-06-29 10:28:38 -04:00
|
|
|
|
replaceContent(node, m[0], m[1], createCustomEmoji(alias))
|
2021-06-17 06:35:05 -04:00
|
|
|
|
node = node.NextSibling.NextSibling
|
|
|
|
|
|
start = 0
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
|
|
|
|
|
continue
|
|
|
|
|
|
}
|
2020-04-28 14:05:39 -04:00
|
|
|
|
|
2024-10-23 19:07:53 -04:00
|
|
|
|
replaceContent(node, m[0], m[1], createEmoji(converted.Emoji, "emoji", converted.Description, alias))
|
2021-06-17 06:35:05 -04:00
|
|
|
|
node = node.NextSibling.NextSibling
|
|
|
|
|
|
start = 0
|
|
|
|
|
|
}
|
2020-04-28 14:05:39 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
// emoji processor to match emoji and add emoji class
|
2021-04-19 18:25:08 -04:00
|
|
|
|
func emojiProcessor(ctx *RenderContext, node *html.Node) {
|
2021-06-17 06:35:05 -04:00
|
|
|
|
start := 0
|
|
|
|
|
|
next := node.NextSibling
|
|
|
|
|
|
for node != nil && node != next && start < len(node.Data) {
|
|
|
|
|
|
m := emoji.FindEmojiSubmatchIndex(node.Data[start:])
|
|
|
|
|
|
if m == nil {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
m[0] += start
|
|
|
|
|
|
m[1] += start
|
|
|
|
|
|
|
|
|
|
|
|
codepoint := node.Data[m[0]:m[1]]
|
|
|
|
|
|
start = m[1]
|
|
|
|
|
|
val := emoji.FromCode(codepoint)
|
|
|
|
|
|
if val != nil {
|
2024-10-23 19:07:53 -04:00
|
|
|
|
replaceContent(node, m[0], m[1], createEmoji(codepoint, "emoji", val.Description, val.Aliases[0]))
|
2021-06-17 06:35:05 -04:00
|
|
|
|
node = node.NextSibling.NextSibling
|
|
|
|
|
|
start = 0
|
|
|
|
|
|
}
|
2020-04-28 14:05:39 -04:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2025-02-05 04:04:19 -05:00
|
|
|
|
// hashCurrentPatternProcessor renders SHA1/SHA256 strings to corresponding links that
|
2018-02-27 02:09:18 -05:00
|
|
|
|
// are assumed to be in the same repository.
|
2024-01-19 11:05:02 -05:00
|
|
|
|
func hashCurrentPatternProcessor(ctx *RenderContext, node *html.Node) {
|
2021-04-19 18:25:08 -04:00
|
|
|
|
if ctx.Metas == nil || ctx.Metas["user"] == "" || ctx.Metas["repo"] == "" || ctx.Metas["repoPath"] == "" {
|
2019-08-14 04:04:55 -04:00
|
|
|
|
return
|
|
|
|
|
|
}
|
2021-06-17 06:35:05 -04:00
|
|
|
|
|
|
|
|
|
|
start := 0
|
|
|
|
|
|
next := node.NextSibling
|
2021-06-20 18:39:12 -04:00
|
|
|
|
if ctx.ShaExistCache == nil {
|
|
|
|
|
|
ctx.ShaExistCache = make(map[string]bool)
|
|
|
|
|
|
}
|
2021-06-17 06:35:05 -04:00
|
|
|
|
for node != nil && node != next && start < len(node.Data) {
|
2024-01-19 11:05:02 -05:00
|
|
|
|
m := hashCurrentPattern.FindStringSubmatchIndex(node.Data[start:])
|
2021-06-17 06:35:05 -04:00
|
|
|
|
if m == nil {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
m[2] += start
|
|
|
|
|
|
m[3] += start
|
|
|
|
|
|
|
|
|
|
|
|
hash := node.Data[m[2]:m[3]]
|
|
|
|
|
|
// The regex does not lie, it matches the hash pattern.
|
|
|
|
|
|
// However, a regex cannot know if a hash actually exists or not.
|
|
|
|
|
|
// We could assume that a SHA1 hash should probably contain alphas AND numerics
|
|
|
|
|
|
// but that is not always the case.
|
|
|
|
|
|
// Although unlikely, deadbeef and 1234567 are valid short forms of SHA1 hash
|
|
|
|
|
|
// as used by git and github for linking and thus we have to do similar.
|
|
|
|
|
|
// Because of this, we check to make sure that a matched hash is actually
|
|
|
|
|
|
// a commit in the repository before making it a link.
|
2021-06-20 18:39:12 -04:00
|
|
|
|
|
|
|
|
|
|
// check cache first
|
|
|
|
|
|
exist, inCache := ctx.ShaExistCache[hash]
|
|
|
|
|
|
if !inCache {
|
|
|
|
|
|
if ctx.GitRepo == nil {
|
|
|
|
|
|
var err error
|
2022-03-29 15:13:41 -04:00
|
|
|
|
ctx.GitRepo, err = git.OpenRepository(ctx.Ctx, ctx.Metas["repoPath"])
|
2021-06-20 18:39:12 -04:00
|
|
|
|
if err != nil {
|
|
|
|
|
|
log.Error("unable to open repository: %s Error: %v", ctx.Metas["repoPath"], err)
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
ctx.AddCancel(func() {
|
|
|
|
|
|
ctx.GitRepo.Close()
|
|
|
|
|
|
ctx.GitRepo = nil
|
|
|
|
|
|
})
|
2021-06-17 06:35:05 -04:00
|
|
|
|
}
|
2021-06-20 18:39:12 -04:00
|
|
|
|
|
2024-08-08 22:40:45 -04:00
|
|
|
|
exist = ctx.GitRepo.IsReferenceExist(hash)
|
2021-06-20 18:39:12 -04:00
|
|
|
|
ctx.ShaExistCache[hash] = exist
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
if !exist {
|
2021-06-17 06:35:05 -04:00
|
|
|
|
start = m[3]
|
|
|
|
|
|
continue
|
2019-08-14 04:04:55 -04:00
|
|
|
|
}
|
|
|
|
|
|
|
2024-03-13 06:34:58 -04:00
|
|
|
|
link := util.URLJoin(ctx.Links.Prefix(), ctx.Metas["user"], ctx.Metas["repo"], "commit", hash)
|
2021-12-11 12:21:36 -05:00
|
|
|
|
replaceContent(node, m[2], m[3], createCodeLink(link, base.ShortSha(hash), "commit"))
|
2021-06-17 06:35:05 -04:00
|
|
|
|
start = 0
|
|
|
|
|
|
node = node.NextSibling.NextSibling
|
|
|
|
|
|
}
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
2017-09-16 13:17:57 -04:00
|
|
|
|
|
2025-06-16 08:55:17 -04:00
|
|
|
|
// fediAddressProcessor replaces raw fediverse handles with toolforge links
|
|
|
|
|
|
func fediAddressProcessor(ctx *RenderContext, node *html.Node) {
|
|
|
|
|
|
next := node.NextSibling
|
|
|
|
|
|
for node != nil && node != next {
|
|
|
|
|
|
m := fediRegex.FindStringSubmatchIndex(node.Data)
|
|
|
|
|
|
if m == nil {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
fedihandle := node.Data[m[2]:m[3]]
|
|
|
|
|
|
replaceContent(node, m[2], m[3], createLink("https://fedirect.toolforge.org/?id="+url.QueryEscape(fedihandle), fedihandle, "fedihandle"))
|
|
|
|
|
|
node = node.NextSibling.NextSibling
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
2018-02-27 02:09:18 -05:00
|
|
|
|
// emailAddressProcessor replaces raw email addresses with a mailto: link.
|
2021-04-19 18:25:08 -04:00
|
|
|
|
func emailAddressProcessor(ctx *RenderContext, node *html.Node) {
|
2021-06-17 06:35:05 -04:00
|
|
|
|
next := node.NextSibling
|
|
|
|
|
|
for node != nil && node != next {
|
|
|
|
|
|
m := emailRegex.FindStringSubmatchIndex(node.Data)
|
|
|
|
|
|
if m == nil {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
mail := node.Data[m[2]:m[3]]
|
|
|
|
|
|
replaceContent(node, m[2], m[3], createLink("mailto:"+mail, mail, "mailto"))
|
|
|
|
|
|
node = node.NextSibling.NextSibling
|
2017-09-16 13:17:57 -04:00
|
|
|
|
}
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
2017-09-16 13:17:57 -04:00
|
|
|
|
|
2018-02-27 02:09:18 -05:00
|
|
|
|
// linkProcessor creates links for any HTTP or HTTPS URL not captured by
|
|
|
|
|
|
// markdown.
|
2021-04-19 18:25:08 -04:00
|
|
|
|
func linkProcessor(ctx *RenderContext, node *html.Node) {
|
2021-06-17 06:35:05 -04:00
|
|
|
|
next := node.NextSibling
|
|
|
|
|
|
for node != nil && node != next {
|
|
|
|
|
|
m := common.LinkRegex.FindStringIndex(node.Data)
|
|
|
|
|
|
if m == nil {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
uri := node.Data[m[0]:m[1]]
|
|
|
|
|
|
replaceContent(node, m[0], m[1], createLink(uri, uri, "link"))
|
|
|
|
|
|
node = node.NextSibling.NextSibling
|
2017-09-16 13:17:57 -04:00
|
|
|
|
}
|
2018-02-27 02:09:18 -05:00
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func genDefaultLinkProcessor(defaultLink string) processor {
|
2021-04-19 18:25:08 -04:00
|
|
|
|
return func(ctx *RenderContext, node *html.Node) {
|
2018-02-27 02:09:18 -05:00
|
|
|
|
ch := &html.Node{
|
|
|
|
|
|
Parent: node,
|
|
|
|
|
|
Type: html.TextNode,
|
|
|
|
|
|
Data: node.Data,
|
|
|
|
|
|
}
|
2017-09-16 13:17:57 -04:00
|
|
|
|
|
2018-02-27 02:09:18 -05:00
|
|
|
|
node.Type = html.ElementNode
|
|
|
|
|
|
node.Data = "a"
|
|
|
|
|
|
node.DataAtom = atom.A
|
2019-09-10 05:03:30 -04:00
|
|
|
|
node.Attr = []html.Attribute{
|
|
|
|
|
|
{Key: "href", Val: defaultLink},
|
2022-07-22 06:49:24 -04:00
|
|
|
|
{Key: "class", Val: "default-link muted"},
|
2019-09-10 05:03:30 -04:00
|
|
|
|
}
|
2018-02-27 02:09:18 -05:00
|
|
|
|
node.FirstChild, node.LastChild = ch, ch
|
|
|
|
|
|
}
|
2017-09-16 13:17:57 -04:00
|
|
|
|
}
|
2019-03-11 22:23:34 -04:00
|
|
|
|
|
|
|
|
|
|
// descriptionLinkProcessor creates links for DescriptionHTML
|
2021-04-19 18:25:08 -04:00
|
|
|
|
func descriptionLinkProcessor(ctx *RenderContext, node *html.Node) {
|
2021-06-17 06:35:05 -04:00
|
|
|
|
next := node.NextSibling
|
|
|
|
|
|
for node != nil && node != next {
|
|
|
|
|
|
m := common.LinkRegex.FindStringIndex(node.Data)
|
|
|
|
|
|
if m == nil {
|
|
|
|
|
|
return
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
uri := node.Data[m[0]:m[1]]
|
|
|
|
|
|
replaceContent(node, m[0], m[1], createDescriptionLink(uri, uri))
|
|
|
|
|
|
node = node.NextSibling.NextSibling
|
2019-03-11 22:23:34 -04:00
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
|
func createDescriptionLink(href, content string) *html.Node {
|
|
|
|
|
|
textNode := &html.Node{
|
|
|
|
|
|
Type: html.TextNode,
|
|
|
|
|
|
Data: content,
|
|
|
|
|
|
}
|
|
|
|
|
|
linkNode := &html.Node{
|
|
|
|
|
|
FirstChild: textNode,
|
|
|
|
|
|
LastChild: textNode,
|
|
|
|
|
|
Type: html.ElementNode,
|
|
|
|
|
|
Data: "a",
|
|
|
|
|
|
DataAtom: atom.A,
|
|
|
|
|
|
Attr: []html.Attribute{
|
|
|
|
|
|
{Key: "href", Val: href},
|
|
|
|
|
|
{Key: "target", Val: "_blank"},
|
|
|
|
|
|
{Key: "rel", Val: "noopener noreferrer"},
|
|
|
|
|
|
},
|
|
|
|
|
|
}
|
|
|
|
|
|
textNode.Parent = linkNode
|
|
|
|
|
|
return linkNode
|
|
|
|
|
|
}
|
2025-12-28 11:18:51 -05:00
|
|
|
|
|
|
|
|
|
|
// Adds an optional repo slug and optionally the instance domain and URL
|
|
|
|
|
|
//
|
|
|
|
|
|
// The repo slug is added if the link points to a different repo
|
|
|
|
|
|
// The instance domain and sub-path is added if the link points to a different instance
|
|
|
|
|
|
func optionalRepoSlugAndInstancePath(ctx *RenderContext, text *string, fullURL, slug string) {
|
|
|
|
|
|
if len(ctx.Links.Base) > 0 {
|
|
|
|
|
|
// The fullURL is the url to e.g. the commit. The slug is e.g. `forgejo/forgejo`.
|
|
|
|
|
|
// To retrieve the instance domain and sub-path we need to remove the repo slug
|
|
|
|
|
|
slugStart := strings.LastIndex(fullURL, slug)
|
|
|
|
|
|
targetInstance := fullURL[:slugStart]
|
|
|
|
|
|
|
|
|
|
|
|
// Check if the URL points to a different instance
|
|
|
|
|
|
if setting.AppURL != targetInstance {
|
|
|
|
|
|
// Remove the http scheme for displaying
|
|
|
|
|
|
targetInstance = httpSchemePattern.ReplaceAllString(targetInstance, "")
|
|
|
|
|
|
|
|
|
|
|
|
*text = targetInstance + slug + "@" + *text
|
|
|
|
|
|
} else if !strings.HasSuffix(strings.TrimSuffix(ctx.Links.Base, "/"), slug) {
|
|
|
|
|
|
// If it is a link to a different repo, but on the same instance only add the repo slug
|
|
|
|
|
|
*text = slug + "@" + *text
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
|
|
|
|
|
}
|
2026-01-25 17:10:56 -05:00
|
|
|
|
|
|
|
|
|
|
// escapeInlineCodeBlocks escapes HTML symbols in contents of Markdown inline code blocks
|
|
|
|
|
|
// to prevent clashing with HTML parsing
|
|
|
|
|
|
func escapeInlineCodeBlocks(input string) string {
|
|
|
|
|
|
return InlineCodeBlockRegex.ReplaceAllStringFunc(input, html.EscapeString)
|
|
|
|
|
|
}
|