Quieter Integration Tests (#6513)

* Rename BaseLogger to WriterLogger to help the creation of other providers

* Don't export ColorBytes and ResetBytes from ColoredValues

* Make integration tests only print logs if they fail

* check can color before coloring

* I always forget about MSSQL

* Oh and use LEVEL in sqlite.ini

* Make the test logger log at info - as it means you see the router

* Remove empty expected changes

* Make the migrations quieter too

* Don't display SQL on error - it can be looked at in the file logs if necessary

* Fix skip when using onGiteaRun
tokarchuk/v1.17
zeripath 6 years ago committed by Lauris BH
parent 7ed65a98e8
commit 5422f23ed8
  1. 6
      docs/content/doc/advanced/logging-documentation.en-us.md
  2. 2
      integrations/git_helper_for_declarative_test.go
  3. 25
      integrations/git_test.go
  4. 7
      integrations/integration_test.go
  5. 26
      integrations/migration-test/migration_test.go
  6. 13
      integrations/mssql.ini.tmpl
  7. 13
      integrations/mysql.ini.tmpl
  8. 14
      integrations/mysql8.ini.tmpl
  9. 13
      integrations/pgsql.ini.tmpl
  10. 25
      integrations/sqlite.ini
  11. 104
      integrations/testlogger.go
  12. 328
      modules/log/base.go
  13. 40
      modules/log/colors.go
  14. 4
      modules/log/conn.go
  15. 8
      modules/log/console.go
  16. 4
      modules/log/file.go
  17. 2
      modules/log/file_test.go
  18. 64
      modules/log/flags.go
  19. 4
      modules/log/smtp.go
  20. 273
      modules/log/writer.go
  21. 12
      modules/log/writer_test.go
  22. 9
      modules/setting/log.go
  23. 6
      modules/setting/setting.go

@ -357,10 +357,10 @@ attributes should be cached if this is a commonly used log message.
of bytes representing the color. of bytes representing the color.
These functions will not double wrap a `log.ColoredValue`. They will These functions will not double wrap a `log.ColoredValue`. They will
also set the ResetBytes to the cached resetBytes. also set the `resetBytes` to the cached `resetBytes`.
Be careful not to change the contents of resetBytes or boldBytes as this The `colorBytes` and `resetBytes` are not exported to prevent
will break rendering of logging elsewhere. You have been warned. accidental overwriting of internal values.
## Log Spoofing protection ## Log Spoofing protection

@ -50,7 +50,7 @@ func createSSHUrl(gitPath string, u *url.URL) *url.URL {
} }
func onGiteaRun(t *testing.T, callback func(*testing.T, *url.URL)) { func onGiteaRun(t *testing.T, callback func(*testing.T, *url.URL)) {
prepareTestEnv(t) prepareTestEnv(t, 1)
s := http.Server{ s := http.Server{
Handler: mac, Handler: mac,
} }

@ -38,6 +38,7 @@ func testGit(t *testing.T, u *url.URL) {
u.Path = baseAPITestContext.GitPath() u.Path = baseAPITestContext.GitPath()
t.Run("HTTP", func(t *testing.T) { t.Run("HTTP", func(t *testing.T) {
PrintCurrentTest(t)
httpContext := baseAPITestContext httpContext := baseAPITestContext
httpContext.Reponame = "repo-tmp-17" httpContext.Reponame = "repo-tmp-17"
@ -47,6 +48,7 @@ func testGit(t *testing.T, u *url.URL) {
assert.NoError(t, err) assert.NoError(t, err)
defer os.RemoveAll(dstPath) defer os.RemoveAll(dstPath)
t.Run("Standard", func(t *testing.T) { t.Run("Standard", func(t *testing.T) {
PrintCurrentTest(t)
ensureAnonymousClone(t, u) ensureAnonymousClone(t, u)
t.Run("CreateRepo", doAPICreateRepository(httpContext, false)) t.Run("CreateRepo", doAPICreateRepository(httpContext, false))
@ -57,16 +59,21 @@ func testGit(t *testing.T, u *url.URL) {
t.Run("Clone", doGitClone(dstPath, u)) t.Run("Clone", doGitClone(dstPath, u))
t.Run("PushCommit", func(t *testing.T) { t.Run("PushCommit", func(t *testing.T) {
PrintCurrentTest(t)
t.Run("Little", func(t *testing.T) { t.Run("Little", func(t *testing.T) {
PrintCurrentTest(t)
little = commitAndPush(t, littleSize, dstPath) little = commitAndPush(t, littleSize, dstPath)
}) })
t.Run("Big", func(t *testing.T) { t.Run("Big", func(t *testing.T) {
PrintCurrentTest(t)
big = commitAndPush(t, bigSize, dstPath) big = commitAndPush(t, bigSize, dstPath)
}) })
}) })
}) })
t.Run("LFS", func(t *testing.T) { t.Run("LFS", func(t *testing.T) {
PrintCurrentTest(t)
t.Run("PushCommit", func(t *testing.T) { t.Run("PushCommit", func(t *testing.T) {
PrintCurrentTest(t)
//Setup git LFS //Setup git LFS
_, err = git.NewCommand("lfs").AddArguments("install").RunInDir(dstPath) _, err = git.NewCommand("lfs").AddArguments("install").RunInDir(dstPath)
assert.NoError(t, err) assert.NoError(t, err)
@ -76,17 +83,21 @@ func testGit(t *testing.T, u *url.URL) {
assert.NoError(t, err) assert.NoError(t, err)
t.Run("Little", func(t *testing.T) { t.Run("Little", func(t *testing.T) {
PrintCurrentTest(t)
littleLFS = commitAndPush(t, littleSize, dstPath) littleLFS = commitAndPush(t, littleSize, dstPath)
}) })
t.Run("Big", func(t *testing.T) { t.Run("Big", func(t *testing.T) {
PrintCurrentTest(t)
bigLFS = commitAndPush(t, bigSize, dstPath) bigLFS = commitAndPush(t, bigSize, dstPath)
}) })
}) })
t.Run("Locks", func(t *testing.T) { t.Run("Locks", func(t *testing.T) {
PrintCurrentTest(t)
lockTest(t, u.String(), dstPath) lockTest(t, u.String(), dstPath)
}) })
}) })
t.Run("Raw", func(t *testing.T) { t.Run("Raw", func(t *testing.T) {
PrintCurrentTest(t)
session := loginUser(t, "user2") session := loginUser(t, "user2")
// Request raw paths // Request raw paths
@ -110,6 +121,7 @@ func testGit(t *testing.T, u *url.URL) {
}) })
t.Run("Media", func(t *testing.T) { t.Run("Media", func(t *testing.T) {
PrintCurrentTest(t)
session := loginUser(t, "user2") session := loginUser(t, "user2")
// Request media paths // Request media paths
@ -132,12 +144,14 @@ func testGit(t *testing.T, u *url.URL) {
}) })
t.Run("SSH", func(t *testing.T) { t.Run("SSH", func(t *testing.T) {
PrintCurrentTest(t)
sshContext := baseAPITestContext sshContext := baseAPITestContext
sshContext.Reponame = "repo-tmp-18" sshContext.Reponame = "repo-tmp-18"
keyname := "my-testing-key" keyname := "my-testing-key"
//Setup key the user ssh key //Setup key the user ssh key
withKeyFile(t, keyname, func(keyFile string) { withKeyFile(t, keyname, func(keyFile string) {
t.Run("CreateUserKey", doAPICreateUserKey(sshContext, "test-key", keyFile)) t.Run("CreateUserKey", doAPICreateUserKey(sshContext, "test-key", keyFile))
PrintCurrentTest(t)
//Setup remote link //Setup remote link
sshURL := createSSHUrl(sshContext.GitPath(), u) sshURL := createSSHUrl(sshContext.GitPath(), u)
@ -149,6 +163,7 @@ func testGit(t *testing.T, u *url.URL) {
var little, big, littleLFS, bigLFS string var little, big, littleLFS, bigLFS string
t.Run("Standard", func(t *testing.T) { t.Run("Standard", func(t *testing.T) {
PrintCurrentTest(t)
t.Run("CreateRepo", doAPICreateRepository(sshContext, false)) t.Run("CreateRepo", doAPICreateRepository(sshContext, false))
//TODO get url from api //TODO get url from api
@ -156,16 +171,21 @@ func testGit(t *testing.T, u *url.URL) {
//time.Sleep(5 * time.Minute) //time.Sleep(5 * time.Minute)
t.Run("PushCommit", func(t *testing.T) { t.Run("PushCommit", func(t *testing.T) {
PrintCurrentTest(t)
t.Run("Little", func(t *testing.T) { t.Run("Little", func(t *testing.T) {
PrintCurrentTest(t)
little = commitAndPush(t, littleSize, dstPath) little = commitAndPush(t, littleSize, dstPath)
}) })
t.Run("Big", func(t *testing.T) { t.Run("Big", func(t *testing.T) {
PrintCurrentTest(t)
big = commitAndPush(t, bigSize, dstPath) big = commitAndPush(t, bigSize, dstPath)
}) })
}) })
}) })
t.Run("LFS", func(t *testing.T) { t.Run("LFS", func(t *testing.T) {
PrintCurrentTest(t)
t.Run("PushCommit", func(t *testing.T) { t.Run("PushCommit", func(t *testing.T) {
PrintCurrentTest(t)
//Setup git LFS //Setup git LFS
_, err = git.NewCommand("lfs").AddArguments("install").RunInDir(dstPath) _, err = git.NewCommand("lfs").AddArguments("install").RunInDir(dstPath)
assert.NoError(t, err) assert.NoError(t, err)
@ -175,17 +195,21 @@ func testGit(t *testing.T, u *url.URL) {
assert.NoError(t, err) assert.NoError(t, err)
t.Run("Little", func(t *testing.T) { t.Run("Little", func(t *testing.T) {
PrintCurrentTest(t)
littleLFS = commitAndPush(t, littleSize, dstPath) littleLFS = commitAndPush(t, littleSize, dstPath)
}) })
t.Run("Big", func(t *testing.T) { t.Run("Big", func(t *testing.T) {
PrintCurrentTest(t)
bigLFS = commitAndPush(t, bigSize, dstPath) bigLFS = commitAndPush(t, bigSize, dstPath)
}) })
}) })
t.Run("Locks", func(t *testing.T) { t.Run("Locks", func(t *testing.T) {
PrintCurrentTest(t)
lockTest(t, u.String(), dstPath) lockTest(t, u.String(), dstPath)
}) })
}) })
t.Run("Raw", func(t *testing.T) { t.Run("Raw", func(t *testing.T) {
PrintCurrentTest(t)
session := loginUser(t, "user2") session := loginUser(t, "user2")
// Request raw paths // Request raw paths
@ -209,6 +233,7 @@ func testGit(t *testing.T, u *url.URL) {
}) })
t.Run("Media", func(t *testing.T) { t.Run("Media", func(t *testing.T) {
PrintCurrentTest(t)
session := loginUser(t, "user2") session := loginUser(t, "user2")
// Request media paths // Request media paths

@ -165,7 +165,12 @@ func initIntegrationTest() {
routers.GlobalInit() routers.GlobalInit()
} }
func prepareTestEnv(t testing.TB) { func prepareTestEnv(t testing.TB, skip ...int) {
ourSkip := 2
if len(skip) > 0 {
ourSkip += skip[0]
}
PrintCurrentTest(t, ourSkip)
assert.NoError(t, models.LoadFixtures()) assert.NoError(t, models.LoadFixtures())
assert.NoError(t, os.RemoveAll(setting.RepoRootPath)) assert.NoError(t, os.RemoveAll(setting.RepoRootPath))
assert.NoError(t, os.RemoveAll(models.LocalCopyPath())) assert.NoError(t, os.RemoveAll(models.LocalCopyPath()))

@ -9,13 +9,13 @@ import (
"database/sql" "database/sql"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
"log"
"os" "os"
"path" "path"
"regexp" "regexp"
"sort" "sort"
"testing" "testing"
"code.gitea.io/gitea/integrations"
"code.gitea.io/gitea/models" "code.gitea.io/gitea/models"
"code.gitea.io/gitea/models/migrations" "code.gitea.io/gitea/models/migrations"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
@ -26,21 +26,22 @@ import (
var currentEngine *xorm.Engine var currentEngine *xorm.Engine
func initMigrationTest() { func initMigrationTest(t *testing.T) {
integrations.PrintCurrentTest(t, 2)
giteaRoot := os.Getenv("GITEA_ROOT") giteaRoot := os.Getenv("GITEA_ROOT")
if giteaRoot == "" { if giteaRoot == "" {
fmt.Println("Environment variable $GITEA_ROOT not set") integrations.Printf("Environment variable $GITEA_ROOT not set\n")
os.Exit(1) os.Exit(1)
} }
setting.AppPath = path.Join(giteaRoot, "gitea") setting.AppPath = path.Join(giteaRoot, "gitea")
if _, err := os.Stat(setting.AppPath); err != nil { if _, err := os.Stat(setting.AppPath); err != nil {
fmt.Printf("Could not find gitea binary at %s\n", setting.AppPath) integrations.Printf("Could not find gitea binary at %s\n", setting.AppPath)
os.Exit(1) os.Exit(1)
} }
giteaConf := os.Getenv("GITEA_CONF") giteaConf := os.Getenv("GITEA_CONF")
if giteaConf == "" { if giteaConf == "" {
fmt.Println("Environment variable $GITEA_CONF not set") integrations.Printf("Environment variable $GITEA_CONF not set\n")
os.Exit(1) os.Exit(1)
} else if !path.IsAbs(giteaConf) { } else if !path.IsAbs(giteaConf) {
setting.CustomConf = path.Join(giteaRoot, giteaConf) setting.CustomConf = path.Join(giteaRoot, giteaConf)
@ -51,6 +52,7 @@ func initMigrationTest() {
setting.NewContext() setting.NewContext()
setting.CheckLFSVersion() setting.CheckLFSVersion()
models.LoadConfigs() models.LoadConfigs()
setting.NewLogServices(true)
} }
func getDialect() string { func getDialect() string {
@ -125,7 +127,7 @@ func restoreOldDB(t *testing.T, version string) bool {
data, err := readSQLFromFile(version) data, err := readSQLFromFile(version)
assert.NoError(t, err) assert.NoError(t, err)
if len(data) == 0 { if len(data) == 0 {
log.Printf("No db found to restore for %s version: %s\n", models.DbCfg.Type, version) integrations.Printf("No db found to restore for %s version: %s\n", models.DbCfg.Type, version)
return false return false
} }
@ -212,7 +214,8 @@ func wrappedMigrate(x *xorm.Engine) error {
} }
func doMigrationTest(t *testing.T, version string) { func doMigrationTest(t *testing.T, version string) {
log.Printf("Performing migration test for %s version: %s", models.DbCfg.Type, version) integrations.PrintCurrentTest(t)
integrations.Printf("Performing migration test for %s version: %s\n", models.DbCfg.Type, version)
if !restoreOldDB(t, version) { if !restoreOldDB(t, version) {
return return
} }
@ -227,19 +230,22 @@ func doMigrationTest(t *testing.T, version string) {
} }
func TestMigrations(t *testing.T) { func TestMigrations(t *testing.T) {
initMigrationTest() initMigrationTest(t)
dialect := models.DbCfg.Type dialect := models.DbCfg.Type
versions, err := availableVersions() versions, err := availableVersions()
assert.NoError(t, err) assert.NoError(t, err)
if len(versions) == 0 { if len(versions) == 0 {
log.Printf("No old database versions available to migration test for %s\n", dialect) integrations.Printf("No old database versions available to migration test for %s\n", dialect)
return return
} }
log.Printf("Preparing to test %d migrations for %s\n", len(versions), dialect) integrations.Printf("Preparing to test %d migrations for %s\n", len(versions), dialect)
for _, version := range versions { for _, version := range versions {
t.Run(fmt.Sprintf("Migrate-%s-%s", dialect, version), func(t *testing.T) {
doMigrationTest(t, version) doMigrationTest(t, version)
})
} }
} }

@ -60,11 +60,16 @@ PROVIDER = file
PROVIDER_CONFIG = data/sessions-mssql PROVIDER_CONFIG = data/sessions-mssql
[log] [log]
MODE = console,file MODE = test,file
ROOT_PATH = mssql-log ROOT_PATH = sqlite-log
REDIRECT_MACARON_LOG = true
ROUTER = ,
MACARON = ,
XORM = file
[log.console] [log.test]
LEVEL = Warn LEVEL = Info
COLORIZE = true
[log.file] [log.file]
LEVEL = Debug LEVEL = Debug

@ -60,11 +60,16 @@ PROVIDER = file
PROVIDER_CONFIG = data/sessions-mysql PROVIDER_CONFIG = data/sessions-mysql
[log] [log]
MODE = console,file MODE = test,file
ROOT_PATH = mysql-log ROOT_PATH = sqlite-log
REDIRECT_MACARON_LOG = true
ROUTER = ,
MACARON = ,
XORM = file
[log.console] [log.test]
LEVEL = Warn LEVEL = Info
COLORIZE = true
[log.file] [log.file]
LEVEL = Debug LEVEL = Debug

@ -57,15 +57,21 @@ PROVIDER = file
PROVIDER_CONFIG = data/sessions-mysql8 PROVIDER_CONFIG = data/sessions-mysql8
[log] [log]
MODE = console,file MODE = test,file
ROOT_PATH = mysql8-log ROOT_PATH = sqlite-log
REDIRECT_MACARON_LOG = true
ROUTER = ,
MACARON = ,
XORM = file
[log.console] [log.test]
LEVEL = Warn LEVEL = Info
COLORIZE = true
[log.file] [log.file]
LEVEL = Debug LEVEL = Debug
[security] [security]
INSTALL_LOCK = true INSTALL_LOCK = true
SECRET_KEY = 9pCviYTWSb SECRET_KEY = 9pCviYTWSb

@ -60,11 +60,16 @@ PROVIDER = file
PROVIDER_CONFIG = data/sessions-pgsql PROVIDER_CONFIG = data/sessions-pgsql
[log] [log]
MODE = console,file MODE = test,file
ROOT_PATH = pgsql-log ROOT_PATH = sqlite-log
REDIRECT_MACARON_LOG = true
ROUTER = ,
MACARON = ,
XORM = file
[log.console] [log.test]
LEVEL = Warn LEVEL = Info
COLORIZE = true
[log.file] [log.file]
LEVEL = Debug LEVEL = Debug

@ -33,18 +33,19 @@ APP_DATA_PATH = integrations/gitea-integration-sqlite/data
ENABLE_GZIP = true ENABLE_GZIP = true
[mailer] [mailer]
ENABLED = false ENABLED = true
MAILER_TYPE = dummy
FROM = sqlite-integration-test@gitea.io
[service] [service]
REGISTER_EMAIL_CONFIRM = false REGISTER_EMAIL_CONFIRM = false
ENABLE_NOTIFY_MAIL = false ENABLE_NOTIFY_MAIL = true
DISABLE_REGISTRATION = false DISABLE_REGISTRATION = false
ENABLE_CAPTCHA = false ENABLE_CAPTCHA = false
REQUIRE_SIGNIN_VIEW = false REQUIRE_SIGNIN_VIEW = false
DEFAULT_KEEP_EMAIL_PRIVATE = false DEFAULT_KEEP_EMAIL_PRIVATE = false
DEFAULT_ALLOW_CREATE_ORGANIZATION = true DEFAULT_ALLOW_CREATE_ORGANIZATION = true
NO_REPLY_ADDRESS = noreply.example.org NO_REPLY_ADDRESS = noreply.example.org
ENABLE_NOTIFY_MAIL = true
[picture] [picture]
DISABLE_GRAVATAR = false DISABLE_GRAVATAR = false
@ -54,11 +55,16 @@ ENABLE_FEDERATED_AVATAR = false
PROVIDER = file PROVIDER = file
[log] [log]
MODE = console,file MODE = test,file
ROOT_PATH = sqlite-log ROOT_PATH = sqlite-log
REDIRECT_MACARON_LOG = true
ROUTER = ,
MACARON = ,
XORM = file
[log.console] [log.test]
LEVEL = Warn LEVEL = Info
COLORIZE = true
[log.file] [log.file]
LEVEL = Debug LEVEL = Debug
@ -68,7 +74,6 @@ INSTALL_LOCK = true
SECRET_KEY = 9pCviYTWSb SECRET_KEY = 9pCviYTWSb
INTERNAL_TOKEN = eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJuYmYiOjE0OTI3OTU5ODN9.OQkH5UmzID2XBdwQ9TAI6Jj2t1X-wElVTjbE7aoN4I8 INTERNAL_TOKEN = eyJhbGciOiJIUzI1NiIsInR5cCI6IkpXVCJ9.eyJuYmYiOjE0OTI3OTU5ODN9.OQkH5UmzID2XBdwQ9TAI6Jj2t1X-wElVTjbE7aoN4I8
[mailer] [oauth2]
ENABLED = true JWT_SECRET = KZb_QLUd4fYVyxetjxC4eZkrBgWM2SndOOWDNtgUUko
MAILER_TYPE = dummy
FROM = sqlite-integration-test@gitea.io

@ -0,0 +1,104 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package integrations
import (
"encoding/json"
"fmt"
"os"
"runtime"
"strings"
"testing"
"code.gitea.io/gitea/modules/log"
)
var prefix string
// TestLogger is a logger which will write to the testing log
type TestLogger struct {
log.WriterLogger
}
var writerCloser = &testLoggerWriterCloser{}
type testLoggerWriterCloser struct {
t testing.TB
}
func (w *testLoggerWriterCloser) Write(p []byte) (int, error) {
if w.t != nil {
if len(p) > 0 && p[len(p)-1] == '\n' {
p = p[:len(p)-1]
}
w.t.Log(string(p))
return len(p), nil
}
return len(p), nil
}
func (w *testLoggerWriterCloser) Close() error {
return nil
}
// PrintCurrentTest prints the current test to os.Stdout
func PrintCurrentTest(t testing.TB, skip ...int) {
actualSkip := 1
if len(skip) > 0 {
actualSkip = skip[0]
}
_, filename, line, _ := runtime.Caller(actualSkip)
if log.CanColorStdout {
fmt.Fprintf(os.Stdout, "=== %s (%s:%d)\n", log.NewColoredValue(t.Name()), strings.TrimPrefix(filename, prefix), line)
} else {
fmt.Fprintf(os.Stdout, "=== %s (%s:%d)\n", t.Name(), strings.TrimPrefix(filename, prefix), line)
}
writerCloser.t = t
}
// Printf takes a format and args and prints the string to os.Stdout
func Printf(format string, args ...interface{}) {
if log.CanColorStdout {
for i := 0; i < len(args); i++ {
args[i] = log.NewColoredValue(args[i])
}
}
fmt.Fprintf(os.Stdout, "\t"+format, args...)
}
// NewTestLogger creates a TestLogger as a log.LoggerProvider
func NewTestLogger() log.LoggerProvider {
logger := &TestLogger{}
logger.Colorize = log.CanColorStdout
logger.Level = log.TRACE
return logger
}
// Init inits connection writer with json config.
// json config only need key "level".
func (log *TestLogger) Init(config string) error {
err := json.Unmarshal([]byte(config), log)
if err != nil {
return err
}
log.NewWriterLogger(writerCloser)
return nil
}
// Flush when log should be flushed
func (log *TestLogger) Flush() {
}
// GetName returns the default name for this implementation
func (log *TestLogger) GetName() string {
return "test"
}
func init() {
log.Register("test", NewTestLogger)
_, filename, _, _ := runtime.Caller(0)
prefix = strings.TrimSuffix(filename, "integrations/testlogger.go")
}

@ -1,328 +0,0 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package log
import (
"bytes"
"fmt"
"io"
"regexp"
"strings"
"sync"
)
// These flags define which text to prefix to each log entry generated
// by the Logger. Bits are or'ed together to control what's printed.
// There is no control over the order they appear (the order listed
// here) or the format they present (as described in the comments).
// The prefix is followed by a colon only if more than time is stated
// is specified. For example, flags Ldate | Ltime
// produce, 2009/01/23 01:23:23 message.
// The standard is:
// 2009/01/23 01:23:23 ...a/b/c/d.go:23:runtime.Caller() [I]: message
const (
Ldate = 1 << iota // the date in the local time zone: 2009/01/23
Ltime // the time in the local time zone: 01:23:23
Lmicroseconds // microsecond resolution: 01:23:23.123123. assumes Ltime.
Llongfile // full file name and line number: /a/b/c/d.go:23
Lshortfile // final file name element and line number: d.go:23. overrides Llongfile
Lfuncname // function name of the caller: runtime.Caller()
Lshortfuncname // last part of the function name
LUTC // if Ldate or Ltime is set, use UTC rather than the local time zone
Llevelinitial // Initial character of the provided level in brackets eg. [I] for info
Llevel // Provided level in brackets [INFO]
// Last 20 characters of the filename
Lmedfile = Lshortfile | Llongfile
// LstdFlags is the initial value for the standard logger
LstdFlags = Ldate | Ltime | Lmedfile | Lshortfuncname | Llevelinitial
)
var flagFromString = map[string]int{
"none": 0,
"date": Ldate,
"time": Ltime,
"microseconds": Lmicroseconds,
"longfile": Llongfile,
"shortfile": Lshortfile,
"funcname": Lfuncname,
"shortfuncname": Lshortfuncname,
"utc": LUTC,
"levelinitial": Llevelinitial,
"level": Llevel,
"medfile": Lmedfile,
"stdflags": LstdFlags,
}
// FlagsFromString takes a comma separated list of flags and returns
// the flags for this string
func FlagsFromString(from string) int {
flags := 0
for _, flag := range strings.Split(strings.ToLower(from), ",") {
f, ok := flagFromString[strings.TrimSpace(flag)]
if ok {
flags = flags | f
}
}
return flags
}
type byteArrayWriter []byte
func (b *byteArrayWriter) Write(p []byte) (int, error) {
*b = append(*b, p...)
return len(p), nil
}
// BaseLogger represent a basic logger for Gitea
type BaseLogger struct {
out io.WriteCloser
mu sync.Mutex
Level Level `json:"level"`
StacktraceLevel Level `json:"stacktraceLevel"`
Flags int `json:"flags"`
Prefix string `json:"prefix"`
Colorize bool `json:"colorize"`
Expression string `json:"expression"`
regexp *regexp.Regexp
}
func (b *BaseLogger) createLogger(out io.WriteCloser, level ...Level) {
b.mu.Lock()
defer b.mu.Unlock()
b.out = out
switch b.Flags {
case 0:
b.Flags = LstdFlags
case -1:
b.Flags = 0
}
if len(level) > 0 {
b.Level = level[0]
}
b.createExpression()
}
func (b *BaseLogger) createExpression() {
if len(b.Expression) > 0 {
var err error
b.regexp, err = regexp.Compile(b.Expression)
if err != nil {
b.regexp = nil
}
}
}
// GetLevel returns the logging level for this logger
func (b *BaseLogger) GetLevel() Level {
return b.Level
}
// GetStacktraceLevel returns the stacktrace logging level for this logger
func (b *BaseLogger) GetStacktraceLevel() Level {
return b.StacktraceLevel
}
// Copy of cheap integer to fixed-width decimal to ascii from logger.
func itoa(buf *[]byte, i int, wid int) {
var b [20]byte
bp := len(b) - 1
for i >= 10 || wid > 1 {
wid--
q := i / 10
b[bp] = byte('0' + i - q*10)
bp--
i = q
}
// i < 10
b[bp] = byte('0' + i)
*buf = append(*buf, b[bp:]...)
}
func (b *BaseLogger) createMsg(buf *[]byte, event *Event) {
*buf = append(*buf, b.Prefix...)
t := event.time
if b.Flags&(Ldate|Ltime|Lmicroseconds) != 0 {
if b.Colorize {
*buf = append(*buf, fgCyanBytes...)
}
if b.Flags&LUTC != 0 {
t = t.UTC()
}
if b.Flags&Ldate != 0 {
year, month, day := t.Date()
itoa(buf, year, 4)
*buf = append(*buf, '/')
itoa(buf, int(month), 2)
*buf = append(*buf, '/')
itoa(buf, day, 2)
*buf = append(*buf, ' ')
}
if b.Flags&(Ltime|Lmicroseconds) != 0 {
hour, min, sec := t.Clock()
itoa(buf, hour, 2)
*buf = append(*buf, ':')
itoa(buf, min, 2)
*buf = append(*buf, ':')
itoa(buf, sec, 2)
if b.Flags&Lmicroseconds != 0 {
*buf = append(*buf, '.')
itoa(buf, t.Nanosecond()/1e3, 6)
}
*buf = append(*buf, ' ')
}
if b.Colorize {
*buf = append(*buf, resetBytes...)
}
}
if b.Flags&(Lshortfile|Llongfile) != 0 {
if b.Colorize {
*buf = append(*buf, fgGreenBytes...)
}
file := event.filename
if b.Flags&Lmedfile == Lmedfile {
startIndex := len(file) - 20
if startIndex > 0 {
file = "..." + file[startIndex:]
}
} else if b.Flags&Lshortfile != 0 {
startIndex := strings.LastIndexByte(file, '/')
if startIndex > 0 && startIndex < len(file) {
file = file[startIndex+1:]
}
}
*buf = append(*buf, file...)
*buf = append(*buf, ':')
itoa(buf, event.line, -1)
if b.Flags&(Lfuncname|Lshortfuncname) != 0 {
*buf = append(*buf, ':')
} else {
if b.Colorize {
*buf = append(*buf, resetBytes...)
}
*buf = append(*buf, ' ')
}
}
if b.Flags&(Lfuncname|Lshortfuncname) != 0 {
if b.Colorize {
*buf = append(*buf, fgGreenBytes...)
}
funcname := event.caller
if b.Flags&Lshortfuncname != 0 {
lastIndex := strings.LastIndexByte(funcname, '.')
if lastIndex > 0 && len(funcname) > lastIndex+1 {
funcname = funcname[lastIndex+1:]
}
}
*buf = append(*buf, funcname...)
if b.Colorize {
*buf = append(*buf, resetBytes...)
}
*buf = append(*buf, ' ')
}
if b.Flags&(Llevel|Llevelinitial) != 0 {
level := strings.ToUpper(event.level.String())
if b.Colorize {
*buf = append(*buf, levelToColor[event.level]...)
}
*buf = append(*buf, '[')
if b.Flags&Llevelinitial != 0 {
*buf = append(*buf, level[0])
} else {
*buf = append(*buf, level...)
}
*buf = append(*buf, ']')
if b.Colorize {
*buf = append(*buf, resetBytes...)
}
*buf = append(*buf, ' ')
}
var msg = []byte(event.msg)
if len(msg) > 0 && msg[len(msg)-1] == '\n' {
msg = msg[:len(msg)-1]
}
pawMode := allowColor
if !b.Colorize {
pawMode = removeColor
}
baw := byteArrayWriter(*buf)
(&protectedANSIWriter{
w: &baw,
mode: pawMode,
}).Write([]byte(msg))
*buf = baw
if event.stacktrace != "" && b.StacktraceLevel <= event.level {
lines := bytes.Split([]byte(event.stacktrace), []byte("\n"))
if len(lines) > 1 {
for _, line := range lines {
*buf = append(*buf, "\n\t"...)
*buf = append(*buf, line...)
}
}
*buf = append(*buf, '\n')
}
*buf = append(*buf, '\n')
}
// LogEvent logs the event to the internal writer
func (b *BaseLogger) LogEvent(event *Event) error {
if b.Level > event.level {
return nil
}
b.mu.Lock()
defer b.mu.Unlock()
if !b.Match(event) {
return nil
}
var buf []byte
b.createMsg(&buf, event)
_, err := b.out.Write(buf)
return err
}
// Match checks if the given event matches the logger's regexp expression
func (b *BaseLogger) Match(event *Event) bool {
if b.regexp == nil {
return true
}
if b.regexp.Match([]byte(fmt.Sprintf("%s:%d:%s", event.filename, event.line, event.caller))) {
return true
}
// Match on the non-colored msg - therefore strip out colors
var msg []byte
baw := byteArrayWriter(msg)
(&protectedANSIWriter{
w: &baw,
mode: removeColor,
}).Write([]byte(event.msg))
msg = baw
if b.regexp.Match(msg) {
return true
}
return false
}
// Close the base logger
func (b *BaseLogger) Close() {
b.mu.Lock()
defer b.mu.Unlock()
if b.out != nil {
b.out.Close()
}
}
// GetName returns empty for these provider loggers
func (b *BaseLogger) GetName() string {
return ""
}

@ -268,8 +268,8 @@ normalLoop:
// ColoredValue will Color the provided value // ColoredValue will Color the provided value
type ColoredValue struct { type ColoredValue struct {
ColorBytes *[]byte colorBytes *[]byte
ResetBytes *[]byte resetBytes *[]byte
Value *interface{} Value *interface{}
} }
@ -290,14 +290,14 @@ func NewColoredValuePointer(value *interface{}, color ...ColorAttribute) *Colore
if len(color) > 0 { if len(color) > 0 {
bytes := ColorBytes(color...) bytes := ColorBytes(color...)
return &ColoredValue{ return &ColoredValue{
ColorBytes: &bytes, colorBytes: &bytes,
ResetBytes: &resetBytes, resetBytes: &resetBytes,
Value: value, Value: value,
} }
} }
return &ColoredValue{ return &ColoredValue{
ColorBytes: &fgBoldBytes, colorBytes: &fgBoldBytes,
ResetBytes: &resetBytes, resetBytes: &resetBytes,
Value: value, Value: value,
} }
@ -310,17 +310,37 @@ func NewColoredValueBytes(value interface{}, colorBytes *[]byte) *ColoredValue {
return val return val
} }
return &ColoredValue{ return &ColoredValue{
ColorBytes: colorBytes, colorBytes: colorBytes,
ResetBytes: &resetBytes, resetBytes: &resetBytes,
Value: &value, Value: &value,
} }
} }
// Format will format the provided value and protect against ANSI spoofing within the value // Format will format the provided value and protect against ANSI spoofing within the value
func (cv *ColoredValue) Format(s fmt.State, c rune) { func (cv *ColoredValue) Format(s fmt.State, c rune) {
s.Write([]byte(*cv.ColorBytes)) s.Write([]byte(*cv.colorBytes))
fmt.Fprintf(&protectedANSIWriter{w: s}, fmtString(s, c), *(cv.Value)) fmt.Fprintf(&protectedANSIWriter{w: s}, fmtString(s, c), *(cv.Value))
s.Write([]byte(*cv.ResetBytes)) s.Write([]byte(*cv.resetBytes))
}
// SetColorBytes will allow a user to set the colorBytes of a colored value
func (cv *ColoredValue) SetColorBytes(colorBytes []byte) {
cv.colorBytes = &colorBytes
}
// SetColorBytesPointer will allow a user to set the colorBytes pointer of a colored value
func (cv *ColoredValue) SetColorBytesPointer(colorBytes *[]byte) {
cv.colorBytes = colorBytes
}
// SetResetBytes will allow a user to set the resetBytes pointer of a colored value
func (cv *ColoredValue) SetResetBytes(resetBytes []byte) {
cv.resetBytes = &resetBytes
}
// SetResetBytesPointer will allow a user to set the resetBytes pointer of a colored value
func (cv *ColoredValue) SetResetBytesPointer(resetBytes *[]byte) {
cv.resetBytes = resetBytes
} }
func fmtString(s fmt.State, c rune) string { func fmtString(s fmt.State, c rune) string {

@ -77,7 +77,7 @@ func (i *connWriter) connect() error {
// ConnLogger implements LoggerProvider. // ConnLogger implements LoggerProvider.
// it writes messages in keep-live tcp connection. // it writes messages in keep-live tcp connection.
type ConnLogger struct { type ConnLogger struct {
BaseLogger WriterLogger
ReconnectOnMsg bool `json:"reconnectOnMsg"` ReconnectOnMsg bool `json:"reconnectOnMsg"`
Reconnect bool `json:"reconnect"` Reconnect bool `json:"reconnect"`
Net string `json:"net"` Net string `json:"net"`
@ -98,7 +98,7 @@ func (log *ConnLogger) Init(jsonconfig string) error {
if err != nil { if err != nil {
return err return err
} }
log.createLogger(&connWriter{ log.NewWriterLogger(&connWriter{
ReconnectOnMsg: log.ReconnectOnMsg, ReconnectOnMsg: log.ReconnectOnMsg,
Reconnect: log.Reconnect, Reconnect: log.Reconnect,
Net: log.Net, Net: log.Net,

@ -34,14 +34,14 @@ func (n *nopWriteCloser) Close() error {
// ConsoleLogger implements LoggerProvider and writes messages to terminal. // ConsoleLogger implements LoggerProvider and writes messages to terminal.
type ConsoleLogger struct { type ConsoleLogger struct {
BaseLogger WriterLogger
Stderr bool `json:"stderr"` Stderr bool `json:"stderr"`
} }
// NewConsoleLogger create ConsoleLogger returning as LoggerProvider. // NewConsoleLogger create ConsoleLogger returning as LoggerProvider.
func NewConsoleLogger() LoggerProvider { func NewConsoleLogger() LoggerProvider {
log := &ConsoleLogger{} log := &ConsoleLogger{}
log.createLogger(&nopWriteCloser{ log.NewWriterLogger(&nopWriteCloser{
w: os.Stdout, w: os.Stdout,
}) })
return log return log
@ -55,11 +55,11 @@ func (log *ConsoleLogger) Init(config string) error {
return err return err
} }
if log.Stderr { if log.Stderr {
log.createLogger(&nopWriteCloser{ log.NewWriterLogger(&nopWriteCloser{
w: os.Stderr, w: os.Stderr,
}) })
} else { } else {
log.createLogger(log.out) log.NewWriterLogger(log.out)
} }
return nil return nil
} }

@ -20,7 +20,7 @@ import (
// FileLogger implements LoggerProvider. // FileLogger implements LoggerProvider.
// It writes messages by lines limit, file size limit, or time frequency. // It writes messages by lines limit, file size limit, or time frequency.
type FileLogger struct { type FileLogger struct {
BaseLogger WriterLogger
mw *MuxWriter mw *MuxWriter
// The opened file // The opened file
Filename string `json:"filename"` Filename string `json:"filename"`
@ -106,7 +106,7 @@ func (log *FileLogger) Init(config string) error {
return errors.New("config must have filename") return errors.New("config must have filename")
} }
// set MuxWriter as Logger's io.Writer // set MuxWriter as Logger's io.Writer
log.createLogger(log.mw) log.NewWriterLogger(log.mw)
return log.StartLogger() return log.StartLogger()
} }

@ -89,7 +89,6 @@ func TestFileLogger(t *testing.T) {
assert.Equal(t, expected, string(logData)) assert.Equal(t, expected, string(logData))
event.level = DEBUG event.level = DEBUG
expected = expected + ""
fileLogger.LogEvent(&event) fileLogger.LogEvent(&event)
fileLogger.Flush() fileLogger.Flush()
logData, err = ioutil.ReadFile(filename) logData, err = ioutil.ReadFile(filename)
@ -97,7 +96,6 @@ func TestFileLogger(t *testing.T) {
assert.Equal(t, expected, string(logData)) assert.Equal(t, expected, string(logData))
event.level = TRACE event.level = TRACE
expected = expected + ""
fileLogger.LogEvent(&event) fileLogger.LogEvent(&event)
fileLogger.Flush() fileLogger.Flush()
logData, err = ioutil.ReadFile(filename) logData, err = ioutil.ReadFile(filename)

@ -0,0 +1,64 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package log
import "strings"
// These flags define which text to prefix to each log entry generated
// by the Logger. Bits are or'ed together to control what's printed.
// There is no control over the order they appear (the order listed
// here) or the format they present (as described in the comments).
// The prefix is followed by a colon only if more than time is stated
// is specified. For example, flags Ldate | Ltime
// produce, 2009/01/23 01:23:23 message.
// The standard is:
// 2009/01/23 01:23:23 ...a/logger/c/d.go:23:runtime.Caller() [I]: message
const (
Ldate = 1 << iota // the date in the local time zone: 2009/01/23
Ltime // the time in the local time zone: 01:23:23
Lmicroseconds // microsecond resolution: 01:23:23.123123. assumes Ltime.
Llongfile // full file name and line number: /a/logger/c/d.go:23
Lshortfile // final file name element and line number: d.go:23. overrides Llongfile
Lfuncname // function name of the caller: runtime.Caller()
Lshortfuncname // last part of the function name
LUTC // if Ldate or Ltime is set, use UTC rather than the local time zone
Llevelinitial // Initial character of the provided level in brackets eg. [I] for info
Llevel // Provided level in brackets [INFO]
// Last 20 characters of the filename
Lmedfile = Lshortfile | Llongfile
// LstdFlags is the initial value for the standard logger
LstdFlags = Ldate | Ltime | Lmedfile | Lshortfuncname | Llevelinitial
)
var flagFromString = map[string]int{
"none": 0,
"date": Ldate,
"time": Ltime,
"microseconds": Lmicroseconds,
"longfile": Llongfile,
"shortfile": Lshortfile,
"funcname": Lfuncname,
"shortfuncname": Lshortfuncname,
"utc": LUTC,
"levelinitial": Llevelinitial,
"level": Llevel,
"medfile": Lmedfile,
"stdflags": LstdFlags,
}
// FlagsFromString takes a comma separated list of flags and returns
// the flags for this string
func FlagsFromString(from string) int {
flags := 0
for _, flag := range strings.Split(strings.ToLower(from), ",") {
f, ok := flagFromString[strings.TrimSpace(flag)]
if ok {
flags = flags | f
}
}
return flags
}

@ -31,7 +31,7 @@ func (s *smtpWriter) Close() error {
// SMTPLogger implements LoggerProvider and is used to send emails via given SMTP-server. // SMTPLogger implements LoggerProvider and is used to send emails via given SMTP-server.
type SMTPLogger struct { type SMTPLogger struct {
BaseLogger WriterLogger
Username string `json:"Username"` Username string `json:"Username"`
Password string `json:"password"` Password string `json:"password"`
Host string `json:"host"` Host string `json:"host"`
@ -63,7 +63,7 @@ func (log *SMTPLogger) Init(jsonconfig string) error {
if err != nil { if err != nil {
return err return err
} }
log.createLogger(&smtpWriter{ log.NewWriterLogger(&smtpWriter{
owner: log, owner: log,
}) })
log.sendMailFn = smtp.SendMail log.sendMailFn = smtp.SendMail

@ -0,0 +1,273 @@
// Copyright 2019 The Gitea Authors. All rights reserved.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package log
import (
"bytes"
"fmt"
"io"
"regexp"
"strings"
"sync"
)
type byteArrayWriter []byte
func (b *byteArrayWriter) Write(p []byte) (int, error) {
*b = append(*b, p...)
return len(p), nil
}
// WriterLogger represent a basic logger for Gitea
type WriterLogger struct {
out io.WriteCloser
mu sync.Mutex
Level Level `json:"level"`
StacktraceLevel Level `json:"stacktraceLevel"`
Flags int `json:"flags"`
Prefix string `json:"prefix"`
Colorize bool `json:"colorize"`
Expression string `json:"expression"`
regexp *regexp.Regexp
}
// NewWriterLogger creates a new WriterLogger from the provided WriteCloser.
// Optionally the level can be changed at the same time.
func (logger *WriterLogger) NewWriterLogger(out io.WriteCloser, level ...Level) {
logger.mu.Lock()
defer logger.mu.Unlock()
logger.out = out
switch logger.Flags {
case 0:
logger.Flags = LstdFlags
case -1:
logger.Flags = 0
}
if len(level) > 0 {
logger.Level = level[0]
}
logger.createExpression()
}
func (logger *WriterLogger) createExpression() {
if len(logger.Expression) > 0 {
var err error
logger.regexp, err = regexp.Compile(logger.Expression)
if err != nil {
logger.regexp = nil
}
}
}
// GetLevel returns the logging level for this logger
func (logger *WriterLogger) GetLevel() Level {
return logger.Level
}
// GetStacktraceLevel returns the stacktrace logging level for this logger
func (logger *WriterLogger) GetStacktraceLevel() Level {
return logger.StacktraceLevel
}
// Copy of cheap integer to fixed-width decimal to ascii from logger.
func itoa(buf *[]byte, i int, wid int) {
var logger [20]byte
bp := len(logger) - 1
for i >= 10 || wid > 1 {
wid--
q := i / 10
logger[bp] = byte('0' + i - q*10)
bp--
i = q
}
// i < 10
logger[bp] = byte('0' + i)
*buf = append(*buf, logger[bp:]...)
}
func (logger *WriterLogger) createMsg(buf *[]byte, event *Event) {
*buf = append(*buf, logger.Prefix...)
t := event.time
if logger.Flags&(Ldate|Ltime|Lmicroseconds) != 0 {
if logger.Colorize {
*buf = append(*buf, fgCyanBytes...)
}
if logger.Flags&LUTC != 0 {
t = t.UTC()
}
if logger.Flags&Ldate != 0 {
year, month, day := t.Date()
itoa(buf, year, 4)
*buf = append(*buf, '/')
itoa(buf, int(month), 2)
*buf = append(*buf, '/')
itoa(buf, day, 2)
*buf = append(*buf, ' ')
}
if logger.Flags&(Ltime|Lmicroseconds) != 0 {
hour, min, sec := t.Clock()
itoa(buf, hour, 2)
*buf = append(*buf, ':')
itoa(buf, min, 2)
*buf = append(*buf, ':')
itoa(buf, sec, 2)
if logger.Flags&Lmicroseconds != 0 {
*buf = append(*buf, '.')
itoa(buf, t.Nanosecond()/1e3, 6)
}
*buf = append(*buf, ' ')
}
if logger.Colorize {
*buf = append(*buf, resetBytes...)
}
}
if logger.Flags&(Lshortfile|Llongfile) != 0 {
if logger.Colorize {
*buf = append(*buf, fgGreenBytes...)
}
file := event.filename
if logger.Flags&Lmedfile == Lmedfile {
startIndex := len(file) - 20
if startIndex > 0 {
file = "..." + file[startIndex:]
}
} else if logger.Flags&Lshortfile != 0 {
startIndex := strings.LastIndexByte(file, '/')
if startIndex > 0 && startIndex < len(file) {
file = file[startIndex+1:]
}
}
*buf = append(*buf, file...)
*buf = append(*buf, ':')
itoa(buf, event.line, -1)
if logger.Flags&(Lfuncname|Lshortfuncname) != 0 {
*buf = append(*buf, ':')
} else {
if logger.Colorize {
*buf = append(*buf, resetBytes...)
}
*buf = append(*buf, ' ')
}
}
if logger.Flags&(Lfuncname|Lshortfuncname) != 0 {
if logger.Colorize {
*buf = append(*buf, fgGreenBytes...)
}
funcname := event.caller
if logger.Flags&Lshortfuncname != 0 {
lastIndex := strings.LastIndexByte(funcname, '.')
if lastIndex > 0 && len(funcname) > lastIndex+1 {
funcname = funcname[lastIndex+1:]
}
}
*buf = append(*buf, funcname...)
if logger.Colorize {
*buf = append(*buf, resetBytes...)
}
*buf = append(*buf, ' ')
}
if logger.Flags&(Llevel|Llevelinitial) != 0 {
level := strings.ToUpper(event.level.String())
if logger.Colorize {
*buf = append(*buf, levelToColor[event.level]...)
}
*buf = append(*buf, '[')
if logger.Flags&Llevelinitial != 0 {
*buf = append(*buf, level[0])
} else {
*buf = append(*buf, level...)
}
*buf = append(*buf, ']')
if logger.Colorize {
*buf = append(*buf, resetBytes...)
}
*buf = append(*buf, ' ')
}
var msg = []byte(event.msg)
if len(msg) > 0 && msg[len(msg)-1] == '\n' {
msg = msg[:len(msg)-1]
}
pawMode := allowColor
if !logger.Colorize {
pawMode = removeColor
}
baw := byteArrayWriter(*buf)
(&protectedANSIWriter{
w: &baw,
mode: pawMode,
}).Write([]byte(msg))
*buf = baw
if event.stacktrace != "" && logger.StacktraceLevel <= event.level {
lines := bytes.Split([]byte(event.stacktrace), []byte("\n"))
if len(lines) > 1 {
for _, line := range lines {
*buf = append(*buf, "\n\t"...)
*buf = append(*buf, line...)
}
}
*buf = append(*buf, '\n')
}
*buf = append(*buf, '\n')
}
// LogEvent logs the event to the internal writer
func (logger *WriterLogger) LogEvent(event *Event) error {
if logger.Level > event.level {
return nil
}
logger.mu.Lock()
defer logger.mu.Unlock()
if !logger.Match(event) {
return nil
}
var buf []byte
logger.createMsg(&buf, event)
_, err := logger.out.Write(buf)
return err
}
// Match checks if the given event matches the logger's regexp expression
func (logger *WriterLogger) Match(event *Event) bool {
if logger.regexp == nil {
return true
}
if logger.regexp.Match([]byte(fmt.Sprintf("%s:%d:%s", event.filename, event.line, event.caller))) {
return true
}
// Match on the non-colored msg - therefore strip out colors
var msg []byte
baw := byteArrayWriter(msg)
(&protectedANSIWriter{
w: &baw,
mode: removeColor,
}).Write([]byte(event.msg))
msg = baw
if logger.regexp.Match(msg) {
return true
}
return false
}
// Close the base logger
func (logger *WriterLogger) Close() {
logger.mu.Lock()
defer logger.mu.Unlock()
if logger.out != nil {
logger.out.Close()
}
}
// GetName returns empty for these provider loggers
func (logger *WriterLogger) GetName() string {
return ""
}

@ -38,7 +38,7 @@ func TestBaseLogger(t *testing.T) {
}, },
} }
prefix := "TestPrefix " prefix := "TestPrefix "
b := BaseLogger{ b := WriterLogger{
out: c, out: c,
Level: INFO, Level: INFO,
Flags: LstdFlags | LUTC, Flags: LstdFlags | LUTC,
@ -115,7 +115,7 @@ func TestBaseLoggerDated(t *testing.T) {
}, },
} }
prefix := "" prefix := ""
b := BaseLogger{ b := WriterLogger{
out: c, out: c,
Level: WARN, Level: WARN,
Flags: Ldate | Ltime | Lmicroseconds | Lshortfile | Llevel, Flags: Ldate | Ltime | Lmicroseconds | Lshortfile | Llevel,
@ -195,14 +195,14 @@ func TestBaseLoggerMultiLineNoFlagsRegexp(t *testing.T) {
}, },
} }
prefix := "" prefix := ""
b := BaseLogger{ b := WriterLogger{
Level: DEBUG, Level: DEBUG,
StacktraceLevel: ERROR, StacktraceLevel: ERROR,
Flags: -1, Flags: -1,
Prefix: prefix, Prefix: prefix,
Expression: "FILENAME", Expression: "FILENAME",
} }
b.createLogger(c) b.NewWriterLogger(c)
location, _ := time.LoadLocation("EST") location, _ := time.LoadLocation("EST")
@ -263,14 +263,14 @@ func TestBrokenRegexp(t *testing.T) {
}, },
} }
b := BaseLogger{ b := WriterLogger{
Level: DEBUG, Level: DEBUG,
StacktraceLevel: ERROR, StacktraceLevel: ERROR,
Flags: -1, Flags: -1,
Prefix: prefix, Prefix: prefix,
Expression: "\\", Expression: "\\",
} }
b.createLogger(c) b.NewWriterLogger(c)
assert.Empty(t, b.regexp) assert.Empty(t, b.regexp)
b.Close() b.Close()
assert.Equal(t, true, closed) assert.Equal(t, true, closed)

@ -273,6 +273,15 @@ func newLogService() {
golog.SetOutput(log.NewLoggerAsWriter("INFO", log.GetLogger(log.DEFAULT))) golog.SetOutput(log.NewLoggerAsWriter("INFO", log.GetLogger(log.DEFAULT)))
} }
// NewLogServices creates all the log services
func NewLogServices(disableConsole bool) {
newLogService()
newMacaronLogService()
newRouterLogService()
newAccessLogService()
NewXORMLogService(disableConsole)
}
// NewXORMLogService initializes xorm logger service // NewXORMLogService initializes xorm logger service
func NewXORMLogService(disableConsole bool) { func NewXORMLogService(disableConsole bool) {
EnableXORMLog = Cfg.Section("log").Key("ENABLE_XORM_LOG").MustBool(true) EnableXORMLog = Cfg.Section("log").Key("ENABLE_XORM_LOG").MustBool(true)

@ -982,11 +982,7 @@ func loadOrGenerateInternalToken(sec *ini.Section) string {
// NewServices initializes the services // NewServices initializes the services
func NewServices() { func NewServices() {
newService() newService()
newLogService() NewLogServices(false)
newMacaronLogService()
newAccessLogService()
newRouterLogService()
NewXORMLogService(false)
newCacheService() newCacheService()
newSessionService() newSessionService()
newMailService() newMailService()

Loading…
Cancel
Save