1
0
Fork 0

Mulitple Gitea Doctor improvements (#10943)

* Add `gitea doctor --list` flag to list the checks that will be run, including those by default
* Add `gitea doctor --run` to run specific checks
* Add `gitea doctor --all` to run all checks
* Add db version checker
* Add non-default recalculate merge bases check/fixer to doctor
* Add hook checker (Fix #9878) and ensure hooks are executable (Fix #6319)
* Fix authorized_keys checker - slight change of functionality here because parsing the command is fragile and we should just check if the authorized_keys file is essentially the same as what gitea would produce. (This is still not perfect as order matters - we should probably just md5sum the two files.)
* Add SCRIPT_TYPE check (Fix #10977)
* Add `gitea doctor --fix` to attempt to fix what is possible to easily fix
* Add `gitea doctor --log-file` to set the log-file, be it a file, stdout or to switch off completely. (Fixes previously undetected bug with certain xorm logging configurations - see @6543 comment.)

Signed-off-by: Andrew Thornton <art27@cantab.net>
This commit is contained in:
zeripath 2020-04-06 11:44:47 +01:00 committed by GitHub
parent 1648bf2b60
commit d26885e2bf
No known key found for this signature in database
GPG key ID: 4AEE18F83AFDEB23
4 changed files with 481 additions and 73 deletions

View file

@ -6,18 +6,25 @@ package cmd
import ( import (
"bufio" "bufio"
"errors" "bytes"
"context"
"fmt" "fmt"
"io/ioutil" "io/ioutil"
golog "log"
"os" "os"
"os/exec" "os/exec"
"path/filepath" "path/filepath"
"regexp"
"strings" "strings"
"text/tabwriter"
"code.gitea.io/gitea/models"
"code.gitea.io/gitea/models/migrations"
"code.gitea.io/gitea/modules/git"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/options" "code.gitea.io/gitea/modules/options"
"code.gitea.io/gitea/modules/repository"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
"xorm.io/builder"
"github.com/urfave/cli" "github.com/urfave/cli"
) )
@ -28,10 +35,38 @@ var CmdDoctor = cli.Command{
Usage: "Diagnose problems", Usage: "Diagnose problems",
Description: "A command to diagnose problems with the current Gitea instance according to the given configuration.", Description: "A command to diagnose problems with the current Gitea instance according to the given configuration.",
Action: runDoctor, Action: runDoctor,
Flags: []cli.Flag{
cli.BoolFlag{
Name: "list",
Usage: "List the available checks",
},
cli.BoolFlag{
Name: "default",
Usage: "Run the default checks (if neither --run or --all is set, this is the default behaviour)",
},
cli.StringSliceFlag{
Name: "run",
Usage: "Run the provided checks - (if --default is set, the default checks will also run)",
},
cli.BoolFlag{
Name: "all",
Usage: "Run all the available checks",
},
cli.BoolFlag{
Name: "fix",
Usage: "Automatically fix what we can",
},
cli.StringFlag{
Name: "log-file",
Usage: `Name of the log file (default: "doctor.log"). Set to "-" to output to stdout, set to "" to disable`,
},
},
} }
type check struct { type check struct {
title string title string
name string
isDefault bool
f func(ctx *cli.Context) ([]string, error) f func(ctx *cli.Context) ([]string, error)
abortIfFailed bool abortIfFailed bool
skipDatabaseInit bool skipDatabaseInit bool
@ -42,30 +77,124 @@ var checklist = []check{
{ {
// NOTE: this check should be the first in the list // NOTE: this check should be the first in the list
title: "Check paths and basic configuration", title: "Check paths and basic configuration",
name: "paths",
isDefault: true,
f: runDoctorPathInfo, f: runDoctorPathInfo,
abortIfFailed: true, abortIfFailed: true,
skipDatabaseInit: true, skipDatabaseInit: true,
}, },
{ {
title: "Check if OpenSSH authorized_keys file id correct", title: "Check Database Version",
f: runDoctorLocationMoved, name: "check-db",
isDefault: true,
f: runDoctorCheckDBVersion,
abortIfFailed: true,
},
{
title: "Check if OpenSSH authorized_keys file is up-to-date",
name: "authorized_keys",
isDefault: true,
f: runDoctorAuthorizedKeys,
},
{
title: "Check if SCRIPT_TYPE is available",
name: "script-type",
isDefault: false,
f: runDoctorScriptType,
},
{
title: "Check if hook files are up-to-date and executable",
name: "hooks",
isDefault: false,
f: runDoctorHooks,
},
{
title: "Recalculate merge bases",
name: "recalculate_merge_bases",
isDefault: false,
f: runDoctorPRMergeBase,
}, },
// more checks please append here // more checks please append here
} }
func runDoctor(ctx *cli.Context) error { func runDoctor(ctx *cli.Context) error {
// Silence the console logger // Silence the default loggers
// TODO: Redirect all logs into `doctor.log` ignoring any other log configuration
log.DelNamedLogger("console") log.DelNamedLogger("console")
log.DelNamedLogger(log.DEFAULT) log.DelNamedLogger(log.DEFAULT)
// Now setup our own
logFile := ctx.String("log-file")
if !ctx.IsSet("log-file") {
logFile = "doctor.log"
}
if len(logFile) == 0 {
log.NewLogger(1000, "doctor", "console", `{"level":"NONE","stacktracelevel":"NONE","colorize":"%t"}`)
} else if logFile == "-" {
log.NewLogger(1000, "doctor", "console", `{"level":"trace","stacktracelevel":"NONE"}`)
} else {
log.NewLogger(1000, "doctor", "file", fmt.Sprintf(`{"filename":%q,"level":"trace","stacktracelevel":"NONE"}`, logFile))
}
// Finally redirect the default golog to here
golog.SetFlags(0)
golog.SetPrefix("")
golog.SetOutput(log.NewLoggerAsWriter("INFO", log.GetLogger(log.DEFAULT)))
if ctx.IsSet("list") {
w := tabwriter.NewWriter(os.Stdout, 0, 8, 0, '\t', 0)
_, _ = w.Write([]byte("Default\tName\tTitle\n"))
for _, check := range checklist {
if check.isDefault {
_, _ = w.Write([]byte{'*'})
}
_, _ = w.Write([]byte{'\t'})
_, _ = w.Write([]byte(check.name))
_, _ = w.Write([]byte{'\t'})
_, _ = w.Write([]byte(check.title))
_, _ = w.Write([]byte{'\n'})
}
return w.Flush()
}
var checks []check
if ctx.Bool("all") {
checks = checklist
} else if ctx.IsSet("run") {
addDefault := ctx.Bool("default")
names := ctx.StringSlice("run")
for i, name := range names {
names[i] = strings.ToLower(strings.TrimSpace(name))
}
for _, check := range checklist {
if addDefault && check.isDefault {
checks = append(checks, check)
continue
}
for _, name := range names {
if name == check.name {
checks = append(checks, check)
break
}
}
}
} else {
for _, check := range checklist {
if check.isDefault {
checks = append(checks, check)
}
}
}
dbIsInit := false dbIsInit := false
for i, check := range checklist { for i, check := range checks {
if !dbIsInit && !check.skipDatabaseInit { if !dbIsInit && !check.skipDatabaseInit {
// Only open database after the most basic configuration check // Only open database after the most basic configuration check
if err := initDB(); err != nil { setting.EnableXORMLog = false
if err := initDBDisableConsole(true); err != nil {
fmt.Println(err) fmt.Println(err)
fmt.Println("Check if you are using the right config file. You can use a --config directive to specify one.") fmt.Println("Check if you are using the right config file. You can use a --config directive to specify one.")
return nil return nil
@ -90,14 +219,6 @@ func runDoctor(ctx *cli.Context) error {
return nil return nil
} }
func exePath() (string, error) {
file, err := exec.LookPath(os.Args[0])
if err != nil {
return "", err
}
return filepath.Abs(file)
}
func runDoctorPathInfo(ctx *cli.Context) ([]string, error) { func runDoctorPathInfo(ctx *cli.Context) ([]string, error) {
res := make([]string, 0, 10) res := make([]string, 0, 10)
@ -115,16 +236,31 @@ func runDoctorPathInfo(ctx *cli.Context) ([]string, error) {
check := func(name, path string, is_dir, required, is_write bool) { check := func(name, path string, is_dir, required, is_write bool) {
res = append(res, fmt.Sprintf("%-25s '%s'", name+":", path)) res = append(res, fmt.Sprintf("%-25s '%s'", name+":", path))
if fi, err := os.Stat(path); err != nil { fi, err := os.Stat(path)
if err != nil {
if os.IsNotExist(err) && ctx.Bool("fix") && is_dir {
if err := os.MkdirAll(path, 0777); err != nil {
res = append(res, fmt.Sprintf(" ERROR: %v", err))
fail = true
return
}
fi, err = os.Stat(path)
}
}
if err != nil {
if required { if required {
res = append(res, fmt.Sprintf(" ERROR: %v", err)) res = append(res, fmt.Sprintf(" ERROR: %v", err))
fail = true fail = true
} else { return
res = append(res, fmt.Sprintf(" NOTICE: not accessible (%v)", err))
} }
} else if is_dir && !fi.IsDir() { res = append(res, fmt.Sprintf(" NOTICE: not accessible (%v)", err))
return
}
if is_dir && !fi.IsDir() {
res = append(res, " ERROR: not a directory") res = append(res, " ERROR: not a directory")
fail = true fail = true
return
} else if !is_dir && !fi.Mode().IsRegular() { } else if !is_dir && !fi.Mode().IsRegular() {
res = append(res, " ERROR: not a regular file") res = append(res, " ERROR: not a regular file")
fail = true fail = true
@ -171,7 +307,9 @@ func runDoctorWritableDir(path string) error {
return nil return nil
} }
func runDoctorLocationMoved(ctx *cli.Context) ([]string, error) { const tplCommentPrefix = `# gitea public key`
func runDoctorAuthorizedKeys(ctx *cli.Context) ([]string, error) {
if setting.SSH.StartBuiltinServer || !setting.SSH.CreateAuthorizedKeysFile { if setting.SSH.StartBuiltinServer || !setting.SSH.CreateAuthorizedKeysFile {
return nil, nil return nil, nil
} }
@ -179,49 +317,178 @@ func runDoctorLocationMoved(ctx *cli.Context) ([]string, error) {
fPath := filepath.Join(setting.SSH.RootPath, "authorized_keys") fPath := filepath.Join(setting.SSH.RootPath, "authorized_keys")
f, err := os.Open(fPath) f, err := os.Open(fPath)
if err != nil { if err != nil {
if ctx.Bool("fix") {
return []string{fmt.Sprintf("Error whilst opening authorized_keys: %v. Attempting regeneration", err)}, models.RewriteAllPublicKeys()
}
return nil, err return nil, err
} }
defer f.Close() defer f.Close()
var firstline string linesInAuthorizedKeys := map[string]bool{}
scanner := bufio.NewScanner(f) scanner := bufio.NewScanner(f)
for scanner.Scan() { for scanner.Scan() {
firstline = strings.TrimSpace(scanner.Text()) line := scanner.Text()
if len(firstline) == 0 || firstline[0] == '#' { if strings.HasPrefix(line, tplCommentPrefix) {
continue continue
} }
break linesInAuthorizedKeys[line] = true
} }
f.Close()
// command="/Volumes/data/Projects/gitea/gitea/gitea --config // now we regenerate and check if there are any lines missing
if len(firstline) > 0 { regenerated := &bytes.Buffer{}
exp := regexp.MustCompile(`^[ \t]*(?:command=")([^ ]+) --config='([^']+)' serv key-([^"]+)",(?:[^ ]+) ssh-rsa ([^ ]+) ([^ ]+)[ \t]*$`) if err := models.RegeneratePublicKeys(regenerated); err != nil {
return nil, err
// command="/home/user/gitea --config='/home/user/etc/app.ini' serv key-999",option-1,option-2,option-n ssh-rsa public-key-value key-name }
res := exp.FindStringSubmatch(firstline) scanner = bufio.NewScanner(regenerated)
if res == nil { for scanner.Scan() {
return nil, errors.New("Unknow authorized_keys format") line := scanner.Text()
} if strings.HasPrefix(line, tplCommentPrefix) {
continue
giteaPath := res[1] // => /home/user/gitea }
iniPath := res[2] // => /home/user/etc/app.ini if ok := linesInAuthorizedKeys[line]; ok {
continue
p, err := exePath() }
if err != nil { if ctx.Bool("fix") {
return nil, err return []string{"authorized_keys is out of date, attempting regeneration"}, models.RewriteAllPublicKeys()
} }
p, err = filepath.Abs(p) return []string{"authorized_keys is out of date and should be regenerated with gitea admin regenerate keys"}, nil
if err != nil {
return nil, err
}
if len(giteaPath) > 0 && giteaPath != p {
return []string{fmt.Sprintf("Gitea exe path wants %s but %s on %s", p, giteaPath, fPath)}, nil
}
if len(iniPath) > 0 && iniPath != setting.CustomConf {
return []string{fmt.Sprintf("Gitea config path wants %s but %s on %s", setting.CustomConf, iniPath, fPath)}, nil
}
} }
return nil, nil return nil, nil
} }
func runDoctorCheckDBVersion(ctx *cli.Context) ([]string, error) {
if err := models.NewEngine(context.Background(), migrations.EnsureUpToDate); err != nil {
if ctx.Bool("fix") {
return []string{fmt.Sprintf("WARN: Got Error %v during ensure up to date", err), "Attempting to migrate to the latest DB version to fix this."}, models.NewEngine(context.Background(), migrations.Migrate)
}
return nil, err
}
return nil, nil
}
func iterateRepositories(each func(*models.Repository) ([]string, error)) ([]string, error) {
results := []string{}
err := models.Iterate(
models.DefaultDBContext(),
new(models.Repository),
builder.Gt{"id": 0},
func(idx int, bean interface{}) error {
res, err := each(bean.(*models.Repository))
results = append(results, res...)
return err
},
)
return results, err
}
func iteratePRs(repo *models.Repository, each func(*models.Repository, *models.PullRequest) ([]string, error)) ([]string, error) {
results := []string{}
err := models.Iterate(
models.DefaultDBContext(),
new(models.PullRequest),
builder.Eq{"base_repo_id": repo.ID},
func(idx int, bean interface{}) error {
res, err := each(repo, bean.(*models.PullRequest))
results = append(results, res...)
return err
},
)
return results, err
}
func runDoctorHooks(ctx *cli.Context) ([]string, error) {
// Need to iterate across all of the repositories
return iterateRepositories(func(repo *models.Repository) ([]string, error) {
results, err := repository.CheckDelegateHooks(repo.RepoPath())
if err != nil {
return nil, err
}
if len(results) > 0 && ctx.Bool("fix") {
return []string{fmt.Sprintf("regenerated hooks for %s", repo.FullName())}, repository.CreateDelegateHooks(repo.RepoPath())
}
return results, nil
})
}
func runDoctorPRMergeBase(ctx *cli.Context) ([]string, error) {
numRepos := 0
numPRs := 0
numPRsUpdated := 0
results, err := iterateRepositories(func(repo *models.Repository) ([]string, error) {
numRepos++
return iteratePRs(repo, func(repo *models.Repository, pr *models.PullRequest) ([]string, error) {
numPRs++
results := []string{}
pr.BaseRepo = repo
repoPath := repo.RepoPath()
oldMergeBase := pr.MergeBase
if !pr.HasMerged {
var err error
pr.MergeBase, err = git.NewCommand("merge-base", "--", pr.BaseBranch, pr.GetGitRefName()).RunInDir(repoPath)
if err != nil {
var err2 error
pr.MergeBase, err2 = git.NewCommand("rev-parse", git.BranchPrefix+pr.BaseBranch).RunInDir(repoPath)
if err2 != nil {
results = append(results, fmt.Sprintf("WARN: Unable to get merge base for PR ID %d, #%d onto %s in %s/%s", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name))
log.Error("Unable to get merge base for PR ID %d, Index %d in %s/%s. Error: %v & %v", pr.ID, pr.Index, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err, err2)
return results, nil
}
}
} else {
parentsString, err := git.NewCommand("rev-list", "--parents", "-n", "1", pr.MergedCommitID).RunInDir(repoPath)
if err != nil {
results = append(results, fmt.Sprintf("WARN: Unable to get parents for merged PR ID %d, #%d onto %s in %s/%s", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name))
log.Error("Unable to get parents for merged PR ID %d, Index %d in %s/%s. Error: %v", pr.ID, pr.Index, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err)
return results, nil
}
parents := strings.Split(strings.TrimSpace(parentsString), " ")
if len(parents) < 2 {
return results, nil
}
args := append([]string{"merge-base", "--"}, parents[1:]...)
args = append(args, pr.GetGitRefName())
pr.MergeBase, err = git.NewCommand(args...).RunInDir(repoPath)
if err != nil {
results = append(results, fmt.Sprintf("WARN: Unable to get merge base for merged PR ID %d, #%d onto %s in %s/%s", pr.ID, pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name))
log.Error("Unable to get merge base for merged PR ID %d, Index %d in %s/%s. Error: %v", pr.ID, pr.Index, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, err)
return results, nil
}
}
pr.MergeBase = strings.TrimSpace(pr.MergeBase)
if pr.MergeBase != oldMergeBase {
if ctx.Bool("fix") {
if err := pr.UpdateCols("merge_base"); err != nil {
return results, err
}
} else {
results = append(results, fmt.Sprintf("#%d onto %s in %s/%s: MergeBase should be %s but is %s", pr.Index, pr.BaseBranch, pr.BaseRepo.OwnerName, pr.BaseRepo.Name, oldMergeBase, pr.MergeBase))
}
numPRsUpdated++
}
return results, nil
})
})
if ctx.Bool("fix") {
results = append(results, fmt.Sprintf("%d PR mergebases updated of %d PRs total in %d repos", numPRsUpdated, numPRs, numRepos))
} else {
results = append(results, fmt.Sprintf("%d PRs with incorrect mergebases of %d PRs total in %d repos", numPRsUpdated, numPRs, numRepos))
}
return results, err
}
func runDoctorScriptType(ctx *cli.Context) ([]string, error) {
path, err := exec.LookPath(setting.ScriptType)
if err != nil {
return []string{fmt.Sprintf("ScriptType %s is not on the current PATH", setting.ScriptType)}, err
}
return []string{fmt.Sprintf("ScriptType %s is on the current PATH at %s", setting.ScriptType, path)}, nil
}

View file

@ -206,6 +206,52 @@ var migrations = []Migration{
NewMigration("Add OrgID column to Labels table", addOrgIDLabelColumn), NewMigration("Add OrgID column to Labels table", addOrgIDLabelColumn),
} }
// GetCurrentDBVersion returns the current db version
func GetCurrentDBVersion(x *xorm.Engine) (int64, error) {
if err := x.Sync(new(Version)); err != nil {
return -1, fmt.Errorf("sync: %v", err)
}
currentVersion := &Version{ID: 1}
has, err := x.Get(currentVersion)
if err != nil {
return -1, fmt.Errorf("get: %v", err)
}
if !has {
return -1, nil
}
return currentVersion.Version, nil
}
// ExpectedVersion returns the expected db version
func ExpectedVersion() int64 {
return int64(minDBVersion + len(migrations))
}
// EnsureUpToDate will check if the db is at the correct version
func EnsureUpToDate(x *xorm.Engine) error {
currentDB, err := GetCurrentDBVersion(x)
if err != nil {
return err
}
if currentDB < 0 {
return fmt.Errorf("Database has not been initialised")
}
if minDBVersion > currentDB {
return fmt.Errorf("DB version %d (<= %d) is too old for auto-migration. Upgrade to Gitea 1.6.4 first then upgrade to this version", currentDB, minDBVersion)
}
expected := ExpectedVersion()
if currentDB != expected {
return fmt.Errorf(`Current database version %d is not equal to the expected version %d. Please run "gitea [--config /path/to/app.ini] migrate" to update the database version`, currentDB, expected)
}
return nil
}
// Migrate database to current version // Migrate database to current version
func Migrate(x *xorm.Engine) error { func Migrate(x *xorm.Engine) error {
if err := x.Sync(new(Version)); err != nil { if err := x.Sync(new(Version)); err != nil {

View file

@ -15,6 +15,7 @@ import (
"encoding/pem" "encoding/pem"
"errors" "errors"
"fmt" "fmt"
"io"
"io/ioutil" "io/ioutil"
"math/big" "math/big"
"os" "os"
@ -701,7 +702,21 @@ func rewriteAllPublicKeys(e Engine) error {
} }
} }
err = e.Iterate(new(PublicKey), func(idx int, bean interface{}) (err error) { if err := regeneratePublicKeys(e, t); err != nil {
return err
}
t.Close()
return os.Rename(tmpPath, fPath)
}
// RegeneratePublicKeys regenerates the authorized_keys file
func RegeneratePublicKeys(t io.StringWriter) error {
return regeneratePublicKeys(x, t)
}
func regeneratePublicKeys(e Engine, t io.StringWriter) error {
err := e.Iterate(new(PublicKey), func(idx int, bean interface{}) (err error) {
_, err = t.WriteString((bean.(*PublicKey)).AuthorizedString()) _, err = t.WriteString((bean.(*PublicKey)).AuthorizedString())
return err return err
}) })
@ -709,6 +724,7 @@ func rewriteAllPublicKeys(e Engine) error {
return err return err
} }
fPath := filepath.Join(setting.SSH.RootPath, "authorized_keys")
if com.IsExist(fPath) { if com.IsExist(fPath) {
f, err := os.Open(fPath) f, err := os.Open(fPath)
if err != nil { if err != nil {
@ -729,9 +745,7 @@ func rewriteAllPublicKeys(e Engine) error {
} }
f.Close() f.Close()
} }
return nil
t.Close()
return os.Rename(tmpPath, fPath)
} }
// ________ .__ ____ __. // ________ .__ ____ __.

View file

@ -14,10 +14,26 @@ import (
"code.gitea.io/gitea/models" "code.gitea.io/gitea/models"
"code.gitea.io/gitea/modules/log" "code.gitea.io/gitea/modules/log"
"code.gitea.io/gitea/modules/setting" "code.gitea.io/gitea/modules/setting"
"github.com/unknwon/com"
"xorm.io/builder" "xorm.io/builder"
) )
func getHookTemplates() (hookNames, hookTpls, giteaHookTpls []string) {
hookNames = []string{"pre-receive", "update", "post-receive"}
hookTpls = []string{
fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
fmt.Sprintf("#!/usr/bin/env %s\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\n\"${hook}\" $1 $2 $3\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
}
giteaHookTpls = []string{
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' pre-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' update $1 $2 $3\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' post-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
}
return
}
// CreateDelegateHooks creates all the hooks scripts for the repo // CreateDelegateHooks creates all the hooks scripts for the repo
func CreateDelegateHooks(repoPath string) error { func CreateDelegateHooks(repoPath string) error {
return createDelegateHooks(repoPath) return createDelegateHooks(repoPath)
@ -25,21 +41,7 @@ func CreateDelegateHooks(repoPath string) error {
// createDelegateHooks creates all the hooks scripts for the repo // createDelegateHooks creates all the hooks scripts for the repo
func createDelegateHooks(repoPath string) (err error) { func createDelegateHooks(repoPath string) (err error) {
hookNames, hookTpls, giteaHookTpls := getHookTemplates()
var (
hookNames = []string{"pre-receive", "update", "post-receive"}
hookTpls = []string{
fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
fmt.Sprintf("#!/usr/bin/env %s\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\n\"${hook}\" $1 $2 $3\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
fmt.Sprintf("#!/usr/bin/env %s\ndata=$(cat)\nexitcodes=\"\"\nhookname=$(basename $0)\nGIT_DIR=${GIT_DIR:-$(dirname $0)}\n\nfor hook in ${GIT_DIR}/hooks/${hookname}.d/*; do\ntest -x \"${hook}\" && test -f \"${hook}\" || continue\necho \"${data}\" | \"${hook}\"\nexitcodes=\"${exitcodes} $?\"\ndone\n\nfor i in ${exitcodes}; do\n[ ${i} -eq 0 ] || exit ${i}\ndone\n", setting.ScriptType),
}
giteaHookTpls = []string{
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' pre-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' update $1 $2 $3\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
fmt.Sprintf("#!/usr/bin/env %s\n\"%s\" hook --config='%s' post-receive\n", setting.ScriptType, setting.AppPath, setting.CustomConf),
}
)
hookDir := filepath.Join(repoPath, "hooks") hookDir := filepath.Join(repoPath, "hooks")
for i, hookName := range hookNames { for i, hookName := range hookNames {
@ -58,17 +60,96 @@ func createDelegateHooks(repoPath string) (err error) {
return fmt.Errorf("write old hook file '%s': %v", oldHookPath, err) return fmt.Errorf("write old hook file '%s': %v", oldHookPath, err)
} }
if err = ensureExecutable(oldHookPath); err != nil {
return fmt.Errorf("Unable to set %s executable. Error %v", oldHookPath, err)
}
if err = os.Remove(newHookPath); err != nil && !os.IsNotExist(err) { if err = os.Remove(newHookPath); err != nil && !os.IsNotExist(err) {
return fmt.Errorf("unable to pre-remove new hook file '%s' prior to rewriting: %v", newHookPath, err) return fmt.Errorf("unable to pre-remove new hook file '%s' prior to rewriting: %v", newHookPath, err)
} }
if err = ioutil.WriteFile(newHookPath, []byte(giteaHookTpls[i]), 0777); err != nil { if err = ioutil.WriteFile(newHookPath, []byte(giteaHookTpls[i]), 0777); err != nil {
return fmt.Errorf("write new hook file '%s': %v", newHookPath, err) return fmt.Errorf("write new hook file '%s': %v", newHookPath, err)
} }
if err = ensureExecutable(newHookPath); err != nil {
return fmt.Errorf("Unable to set %s executable. Error %v", oldHookPath, err)
}
} }
return nil return nil
} }
func checkExecutable(filename string) bool {
fileInfo, err := os.Stat(filename)
if err != nil {
return false
}
return (fileInfo.Mode() & 0100) > 0
}
func ensureExecutable(filename string) error {
fileInfo, err := os.Stat(filename)
if err != nil {
return err
}
if (fileInfo.Mode() & 0100) > 0 {
return nil
}
mode := fileInfo.Mode() | 0100
return os.Chmod(filename, mode)
}
// CheckDelegateHooks checks the hooks scripts for the repo
func CheckDelegateHooks(repoPath string) ([]string, error) {
hookNames, hookTpls, giteaHookTpls := getHookTemplates()
hookDir := filepath.Join(repoPath, "hooks")
results := make([]string, 0, 10)
for i, hookName := range hookNames {
oldHookPath := filepath.Join(hookDir, hookName)
newHookPath := filepath.Join(hookDir, hookName+".d", "gitea")
cont := false
if !com.IsExist(oldHookPath) {
results = append(results, fmt.Sprintf("old hook file %s does not exist", oldHookPath))
cont = true
}
if !com.IsExist(oldHookPath + ".d") {
results = append(results, fmt.Sprintf("hooks directory %s does not exist", oldHookPath+".d"))
cont = true
}
if !com.IsExist(newHookPath) {
results = append(results, fmt.Sprintf("new hook file %s does not exist", newHookPath))
cont = true
}
if cont {
continue
}
contents, err := ioutil.ReadFile(oldHookPath)
if err != nil {
return results, err
}
if string(contents) != hookTpls[i] {
results = append(results, fmt.Sprintf("old hook file %s is out of date", oldHookPath))
}
if !checkExecutable(oldHookPath) {
results = append(results, fmt.Sprintf("old hook file %s is not executable", oldHookPath))
}
contents, err = ioutil.ReadFile(newHookPath)
if err != nil {
return results, err
}
if string(contents) != giteaHookTpls[i] {
results = append(results, fmt.Sprintf("new hook file %s is out of date", newHookPath))
}
if !checkExecutable(newHookPath) {
results = append(results, fmt.Sprintf("new hook file %s is not executable", newHookPath))
}
}
return results, nil
}
// SyncRepositoryHooks rewrites all repositories' pre-receive, update and post-receive hooks // SyncRepositoryHooks rewrites all repositories' pre-receive, update and post-receive hooks
// to make sure the binary and custom conf path are up-to-date. // to make sure the binary and custom conf path are up-to-date.
func SyncRepositoryHooks(ctx context.Context) error { func SyncRepositoryHooks(ctx context.Context) error {