Merge pull request #1020 from jcracknell/tag-exclude-nethtml

Exclude HTML tags from Markdown post-processing
tokarchuk/v1.17
无闻 10 years ago
commit 750d82b8e2
  1. 52
      modules/base/markdown.go

@ -7,6 +7,7 @@ package base
import ( import (
"bytes" "bytes"
"fmt" "fmt"
"io"
"net/http" "net/http"
"path" "path"
"path/filepath" "path/filepath"
@ -16,6 +17,8 @@ import (
"github.com/russross/blackfriday" "github.com/russross/blackfriday"
"github.com/gogits/gogs/modules/setting" "github.com/gogits/gogs/modules/setting"
"golang.org/x/net/html"
) )
func isletter(c byte) bool { func isletter(c byte) bool {
@ -217,12 +220,53 @@ func RenderRawMarkdown(body []byte, urlPrefix string) []byte {
} }
func RenderMarkdown(rawBytes []byte, urlPrefix string) []byte { func RenderMarkdown(rawBytes []byte, urlPrefix string) []byte {
body := RenderSpecialLink(rawBytes, urlPrefix) result := RenderRawMarkdown(rawBytes, urlPrefix)
body = RenderRawMarkdown(body, urlPrefix) result = PostProcessMarkdown(result, urlPrefix)
body = Sanitizer.SanitizeBytes(body) result = Sanitizer.SanitizeBytes(result)
return body return result
} }
func RenderMarkdownString(raw, urlPrefix string) string { func RenderMarkdownString(raw, urlPrefix string) string {
return string(RenderMarkdown([]byte(raw), urlPrefix)) return string(RenderMarkdown([]byte(raw), urlPrefix))
} }
func PostProcessMarkdown(rawHtml []byte, urlPrefix string) []byte {
var buf bytes.Buffer
tokenizer := html.NewTokenizer(bytes.NewReader(rawHtml))
for html.ErrorToken != tokenizer.Next() {
token := tokenizer.Token()
switch token.Type {
case html.TextToken:
text := []byte(token.String())
text = RenderSpecialLink(text, urlPrefix)
buf.Write(text)
case html.StartTagToken:
buf.WriteString(token.String())
tagName := token.Data
// If this is an excluded tag, we skip processing all output until a close tag is encountered
if strings.EqualFold("a", tagName) || strings.EqualFold("code", tagName) || strings.EqualFold("pre", tagName) {
for html.ErrorToken != tokenizer.Next() {
token = tokenizer.Token()
// Copy the token to the output verbatim
buf.WriteString(token.String())
// If this is the close tag, we are done
if html.EndTagToken == token.Type && strings.EqualFold(tagName, token.Data) { break }
}
}
default:
buf.WriteString(token.String())
}
}
if io.EOF == tokenizer.Err() {
return buf.Bytes()
}
// If we are not at the end of the input, then some other parsing error has occurred, so return
// the input verbatim.
return rawHtml
}

Loading…
Cancel
Save