better logging

This commit is contained in:
Lou Knauer 2022-01-31 15:14:37 +01:00
parent f82caee28b
commit 84431585f9
6 changed files with 65 additions and 30 deletions

View File

@ -118,7 +118,9 @@ func setup(t *testing.T) *api.RestApi {
}
resolver := &graph.Resolver{DB: db}
resolver.Init()
if err := resolver.Init(); err != nil {
t.Fatal(err)
}
return &api.RestApi{
DB: db,
Resolver: resolver,

View File

@ -4,12 +4,12 @@ import (
"context"
"errors"
"fmt"
"log"
"regexp"
"strings"
"github.com/ClusterCockpit/cc-backend/auth"
"github.com/ClusterCockpit/cc-backend/graph/model"
"github.com/ClusterCockpit/cc-backend/log"
"github.com/ClusterCockpit/cc-backend/schema"
sq "github.com/Masterminds/squirrel"
"github.com/jmoiron/sqlx"
@ -26,26 +26,28 @@ type Resolver struct {
findJobByIdWithUserStmt *sqlx.Stmt
}
func (r *Resolver) Init() {
func (r *Resolver) Init() error {
findJobById, _, err := sq.Select(schema.JobColumns...).From("job").Where("job.id = ?", nil).ToSql()
if err != nil {
log.Fatal(err)
return err
}
r.findJobByIdStmt, err = r.DB.Preparex(findJobById)
if err != nil {
log.Fatal(err)
return err
}
findJobByIdWithUser, _, err := sq.Select(schema.JobColumns...).From("job").Where("job.id = ?", nil).Where("job.user = ?").ToSql()
if err != nil {
log.Fatal(err)
return err
}
r.findJobByIdWithUserStmt, err = r.DB.Preparex(findJobByIdWithUser)
if err != nil {
log.Fatal(err)
return err
}
return nil
}
// Helper function for the `jobs` GraphQL-Query. Is also used elsewhere when a list of jobs is needed.
@ -80,6 +82,7 @@ func (r *Resolver) queryJobs(ctx context.Context, filters []*model.JobFilter, pa
return nil, 0, err
}
log.Debugf("SQL query: `%s`, args: %#v", sql, args)
rows, err := r.DB.Queryx(sql, args...)
if err != nil {
return nil, 0, err

View File

@ -4,11 +4,11 @@ import (
"bufio"
"encoding/json"
"fmt"
"log"
"os"
"path/filepath"
"time"
"github.com/ClusterCockpit/cc-backend/log"
"github.com/ClusterCockpit/cc-backend/schema"
"github.com/jmoiron/sqlx"
)
@ -68,7 +68,7 @@ const JOBS_DB_SCHEMA string = `
// repopulate them using the jobs found in `archive`.
func initDB(db *sqlx.DB, archive string) error {
starttime := time.Now()
fmt.Println("Building database...")
log.Print("Building job table...")
// Basic database structure:
_, err := db.Exec(JOBS_DB_SCHEMA)
@ -103,6 +103,7 @@ func initDB(db *sqlx.DB, archive string) error {
return err
}
fmt.Printf("%d jobs inserted...\r", 0)
i := 0
tags := make(map[string]int64)
handleDirectory := func(filename string) error {
@ -158,11 +159,11 @@ func initDB(db *sqlx.DB, archive string) error {
for _, startTimeDir := range startTimeDirs {
if startTimeDir.Type().IsRegular() && startTimeDir.Name() == "meta.json" {
if err := handleDirectory(dirpath); err != nil {
log.Printf("in %s: %s\n", dirpath, err.Error())
log.Errorf("in %s: %s", dirpath, err.Error())
}
} else if startTimeDir.IsDir() {
if err := handleDirectory(filepath.Join(dirpath, startTimeDir.Name())); err != nil {
log.Printf("in %s: %s\n", filepath.Join(dirpath, startTimeDir.Name()), err.Error())
log.Errorf("in %s: %s", filepath.Join(dirpath, startTimeDir.Name()), err.Error())
}
}
}

View File

@ -11,10 +11,20 @@ import (
"os"
)
var DebugWriter io.Writer = os.Stderr
var InfoWriter io.Writer = os.Stderr
var WarnWriter io.Writer = os.Stderr
var ErrorWriter io.Writer = os.Stderr
var (
DebugWriter io.Writer = os.Stderr
InfoWriter io.Writer = os.Stderr
WarnWriter io.Writer = os.Stderr
ErrorWriter io.Writer = os.Stderr
)
var (
DebugPrefix string = "<7>[DEBUG]"
InfoPrefix string = "<6>[INFO]"
WarnPrefix string = "<4>[WARNING]"
ErrPrefix string = "<3>[ERROR]"
FatalPrefix string = "<3>[FATAL]"
)
func init() {
if lvl, ok := os.LookupEnv("LOGLEVEL"); ok {
@ -37,14 +47,14 @@ func init() {
func Debug(v ...interface{}) {
if DebugWriter != io.Discard {
v = append([]interface{}{"<7>[DEBUG]"}, v...)
v = append([]interface{}{DebugPrefix}, v...)
fmt.Fprintln(DebugWriter, v...)
}
}
func Info(v ...interface{}) {
if InfoWriter != io.Discard {
v = append([]interface{}{"<6>[INFO]"}, v...)
v = append([]interface{}{InfoPrefix}, v...)
fmt.Fprintln(InfoWriter, v...)
}
}
@ -55,21 +65,21 @@ func Print(v ...interface{}) {
func Warn(v ...interface{}) {
if WarnWriter != io.Discard {
v = append([]interface{}{"<4>[WARNING]"}, v...)
v = append([]interface{}{WarnPrefix}, v...)
fmt.Fprintln(WarnWriter, v...)
}
}
func Error(v ...interface{}) {
if ErrorWriter != io.Discard {
v = append([]interface{}{"<3>[ERROR]"}, v...)
v = append([]interface{}{ErrPrefix}, v...)
fmt.Fprintln(ErrorWriter, v...)
}
}
func Fatal(v ...interface{}) {
if ErrorWriter != io.Discard {
v = append([]interface{}{"<0>[FATAL]"}, v...)
v = append([]interface{}{FatalPrefix}, v...)
fmt.Fprintln(ErrorWriter, v...)
}
os.Exit(1)
@ -77,13 +87,13 @@ func Fatal(v ...interface{}) {
func Debugf(format string, v ...interface{}) {
if DebugWriter != io.Discard {
fmt.Fprintf(DebugWriter, "<7>[DEBUG] "+format+"\n", v...)
fmt.Fprintf(DebugWriter, DebugPrefix+" "+format+"\n", v...)
}
}
func Infof(format string, v ...interface{}) {
if InfoWriter != io.Discard {
fmt.Fprintf(InfoWriter, "<6>[INFO] "+format+"\n", v...)
fmt.Fprintf(InfoWriter, InfoPrefix+" "+format+"\n", v...)
}
}
@ -91,21 +101,27 @@ func Printf(format string, v ...interface{}) {
Infof(format, v...)
}
func Finfof(w io.Writer, format string, v ...interface{}) {
if w != io.Discard {
fmt.Fprintf(InfoWriter, InfoPrefix+" "+format+"\n", v...)
}
}
func Warnf(format string, v ...interface{}) {
if WarnWriter != io.Discard {
fmt.Fprintf(WarnWriter, "<4>[WARNING] "+format+"\n", v...)
fmt.Fprintf(WarnWriter, WarnPrefix+" "+format+"\n", v...)
}
}
func Errorf(format string, v ...interface{}) {
if ErrorWriter != io.Discard {
fmt.Fprintf(ErrorWriter, "<3>[ERROR] "+format+"\n", v...)
fmt.Fprintf(ErrorWriter, ErrPrefix+" "+format+"\n", v...)
}
}
func Fatalf(format string, v ...interface{}) {
if ErrorWriter != io.Discard {
fmt.Fprintf(ErrorWriter, "<0>[FATAL] "+format+"\n", v...)
fmt.Fprintf(ErrorWriter, FatalPrefix+" "+format+"\n", v...)
}
os.Exit(1)
}

View File

@ -8,6 +8,7 @@ import (
"errors"
"flag"
"fmt"
"io"
"net"
"net/http"
"net/url"
@ -151,6 +152,11 @@ func main() {
}
}
if strings.HasPrefix(programConfig.DB, "env:") {
envvar := strings.TrimPrefix(programConfig.DB, "env:")
programConfig.DB = os.Getenv(envvar)
}
var err error
if programConfig.DBDriver == "sqlite3" {
db, err = sqlx.Open("sqlite3", fmt.Sprintf("%s?_foreign_keys=on", programConfig.DB))
@ -236,7 +242,9 @@ func main() {
// Build routes...
resolver := &graph.Resolver{DB: db}
resolver.Init()
if err := resolver.Init(); err != nil {
log.Fatal(err)
}
graphQLEndpoint := handler.NewDefaultServer(generated.NewExecutableSchema(generated.Config{Resolvers: resolver}))
if os.Getenv("DEBUG") != "1" {
graphQLEndpoint.SetRecoverFunc(func(ctx context.Context, err interface{}) error {
@ -313,10 +321,14 @@ func main() {
api.MountRoutes(secured)
r.PathPrefix("/").Handler(http.FileServer(http.Dir(programConfig.StaticFiles)))
handler := handlers.CORS(
r.Use(handlers.CompressHandler)
r.Use(handlers.CORS(
handlers.AllowedHeaders([]string{"X-Requested-With", "Content-Type", "Authorization"}),
handlers.AllowedMethods([]string{"GET", "POST", "HEAD", "OPTIONS"}),
handlers.AllowedOrigins([]string{"*"}))(handlers.LoggingHandler(log.InfoWriter, handlers.CompressHandler(r)))
handlers.AllowedOrigins([]string{"*"})))
handler := handlers.CustomLoggingHandler(log.InfoWriter, r, func(w io.Writer, params handlers.LogFormatterParams) {
log.Finfof(w, "%s %s (Response: %d, Size: %d)", params.Request.Method, params.URL.RequestURI(), params.StatusCode, params.Size)
})
var wg sync.WaitGroup
server := http.Server{

View File

@ -2,9 +2,10 @@ package templates
import (
"html/template"
"log"
"net/http"
"os"
"github.com/ClusterCockpit/cc-backend/log"
)
var templatesDir string
@ -54,6 +55,6 @@ func Render(rw http.ResponseWriter, r *http.Request, file string, page *Page) {
}
if err := t.Execute(rw, page); err != nil {
log.Printf("template error: %s\n", err.Error())
log.Errorf("template error: %s", err.Error())
}
}