Refactor variable namings and doc comments

This commit is contained in:
2025-09-27 09:27:36 +02:00
parent 50d000e7e2
commit 4fc78bc382
23 changed files with 112 additions and 267 deletions

View File

@@ -185,7 +185,7 @@ func serverInit() {
}) })
securedapi.Use(func(next http.Handler) http.Handler { securedapi.Use(func(next http.Handler) http.Handler {
return authHandle.AuthApi( return authHandle.AuthAPI(
// On success; // On success;
next, next,
// On failure: JSON Response // On failure: JSON Response
@@ -193,7 +193,7 @@ func serverInit() {
}) })
userapi.Use(func(next http.Handler) http.Handler { userapi.Use(func(next http.Handler) http.Handler {
return authHandle.AuthUserApi( return authHandle.AuthUserAPI(
// On success; // On success;
next, next,
// On failure: JSON Response // On failure: JSON Response
@@ -201,7 +201,7 @@ func serverInit() {
}) })
metricstoreapi.Use(func(next http.Handler) http.Handler { metricstoreapi.Use(func(next http.Handler) http.Handler {
return authHandle.AuthMetricStoreApi( return authHandle.AuthMetricStoreAPI(
// On success; // On success;
next, next,
// On failure: JSON Response // On failure: JSON Response
@@ -209,7 +209,7 @@ func serverInit() {
}) })
configapi.Use(func(next http.Handler) http.Handler { configapi.Use(func(next http.Handler) http.Handler {
return authHandle.AuthConfigApi( return authHandle.AuthConfigAPI(
// On success; // On success;
next, next,
// On failure: JSON Response // On failure: JSON Response
@@ -217,7 +217,7 @@ func serverInit() {
}) })
frontendapi.Use(func(next http.Handler) http.Handler { frontendapi.Use(func(next http.Handler) http.Handler {
return authHandle.AuthFrontendApi( return authHandle.AuthFrontendAPI(
// On success; // On success;
next, next,
// On failure: JSON Response // On failure: JSON Response
@@ -255,7 +255,7 @@ func serverInit() {
router.PathPrefix("/img/").Handler(http.StripPrefix("/img/", http.FileServer(http.Dir("./var/img")))) router.PathPrefix("/img/").Handler(http.StripPrefix("/img/", http.FileServer(http.Dir("./var/img"))))
} }
} }
router.PathPrefix("/").Handler(web.ServeFiles()) router.PathPrefix("/static/").Handler(http.StripPrefix("/static/", web.ServeFiles()))
} else { } else {
router.PathPrefix("/").Handler(http.FileServer(http.Dir(config.Keys.StaticFiles))) router.PathPrefix("/").Handler(http.FileServer(http.Dir(config.Keys.StaticFiles)))
} }
@@ -267,6 +267,35 @@ func serverInit() {
handlers.AllowedHeaders([]string{"X-Requested-With", "Content-Type", "Authorization", "Origin"}), handlers.AllowedHeaders([]string{"X-Requested-With", "Content-Type", "Authorization", "Origin"}),
handlers.AllowedMethods([]string{"GET", "POST", "HEAD", "OPTIONS"}), handlers.AllowedMethods([]string{"GET", "POST", "HEAD", "OPTIONS"}),
handlers.AllowedOrigins([]string{"*"}))) handlers.AllowedOrigins([]string{"*"})))
secured.NotFoundHandler = http.HandlerFunc(func(rw http.ResponseWriter, r *http.Request) {
page := web.Page{
Title: "ClusterCockpit - Not Found",
Build: buildInfo,
}
rw.Header().Add("Content-Type", "text/html; charset=utf-8")
web.RenderTemplate(rw, "404.tmpl", &page)
})
// secured.NotFoundHandler = http.HandlerFunc(http.NotFound)
// router.NotFoundHandler = router.NewRoute().HandlerFunc(http.NotFound).GetHandler()
// printEndpoints(router)
}
func printEndpoints(r *mux.Router) {
r.Walk(func(route *mux.Route, router *mux.Router, ancestors []*mux.Route) error {
path, err := route.GetPathTemplate()
if err != nil {
path = "nopath"
}
methods, err := route.GetMethods()
if err != nil {
methods = append(methods, "nomethod")
}
fmt.Printf("%v %s\n", methods, path)
return nil
})
} }
func serverStart() { func serverStart() {

View File

@@ -2,6 +2,8 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// Package auth implements various authentication methods
package auth package auth
import ( import (
@@ -338,7 +340,7 @@ func (auth *Authentication) Auth(
}) })
} }
func (auth *Authentication) AuthApi( func (auth *Authentication) AuthAPI(
onsuccess http.Handler, onsuccess http.Handler,
onfailure func(rw http.ResponseWriter, r *http.Request, authErr error), onfailure func(rw http.ResponseWriter, r *http.Request, authErr error),
) http.Handler { ) http.Handler {
@@ -381,7 +383,7 @@ func (auth *Authentication) AuthApi(
}) })
} }
func (auth *Authentication) AuthUserApi( func (auth *Authentication) AuthUserAPI(
onsuccess http.Handler, onsuccess http.Handler,
onfailure func(rw http.ResponseWriter, r *http.Request, authErr error), onfailure func(rw http.ResponseWriter, r *http.Request, authErr error),
) http.Handler { ) http.Handler {
@@ -417,7 +419,7 @@ func (auth *Authentication) AuthUserApi(
}) })
} }
func (auth *Authentication) AuthMetricStoreApi( func (auth *Authentication) AuthMetricStoreAPI(
onsuccess http.Handler, onsuccess http.Handler,
onfailure func(rw http.ResponseWriter, r *http.Request, authErr error), onfailure func(rw http.ResponseWriter, r *http.Request, authErr error),
) http.Handler { ) http.Handler {
@@ -453,7 +455,7 @@ func (auth *Authentication) AuthMetricStoreApi(
}) })
} }
func (auth *Authentication) AuthConfigApi( func (auth *Authentication) AuthConfigAPI(
onsuccess http.Handler, onsuccess http.Handler,
onfailure func(rw http.ResponseWriter, r *http.Request, authErr error), onfailure func(rw http.ResponseWriter, r *http.Request, authErr error),
) http.Handler { ) http.Handler {
@@ -474,7 +476,7 @@ func (auth *Authentication) AuthConfigApi(
}) })
} }
func (auth *Authentication) AuthFrontendApi( func (auth *Authentication) AuthFrontendAPI(
onsuccess http.Handler, onsuccess http.Handler,
onfailure func(rw http.ResponseWriter, r *http.Request, authErr error), onfailure func(rw http.ResponseWriter, r *http.Request, authErr error),
) http.Handler { ) http.Handler {

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package auth package auth
import ( import (
@@ -135,7 +136,7 @@ func (ja *JWTAuthenticator) AuthViaJWT(
}, nil }, nil
} }
// Generate a new JWT that can be used for authentication // ProvideJWT generates a new JWT that can be used for authentication
func (ja *JWTAuthenticator) ProvideJWT(user *schema.User) (string, error) { func (ja *JWTAuthenticator) ProvideJWT(user *schema.User) (string, error) {
if ja.privateKey == nil { if ja.privateKey == nil {
return "", errors.New("environment variable 'JWT_PRIVATE_KEY' not set") return "", errors.New("environment variable 'JWT_PRIVATE_KEY' not set")

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package auth package auth
import ( import (

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package auth package auth
import ( import (

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package auth package auth
import ( import (
@@ -18,7 +19,7 @@ import (
) )
type LdapConfig struct { type LdapConfig struct {
Url string `json:"url"` URL string `json:"url"`
UserBase string `json:"user_base"` UserBase string `json:"user_base"`
SearchDN string `json:"search_dn"` SearchDN string `json:"search_dn"`
UserBind string `json:"user_bind"` UserBind string `json:"user_bind"`
@@ -130,7 +131,7 @@ func (la *LdapAuthenticator) Login(
} }
defer l.Close() defer l.Close()
userDn := strings.Replace(Keys.LdapConfig.UserBind, "{username}", user.Username, -1) userDn := strings.ReplaceAll(Keys.LdapConfig.UserBind, "{username}", user.Username)
if err := l.Bind(userDn, r.FormValue("password")); err != nil { if err := l.Bind(userDn, r.FormValue("password")); err != nil {
cclog.Errorf("AUTH/LDAP > Authentication for user %s failed: %v", cclog.Errorf("AUTH/LDAP > Authentication for user %s failed: %v",
user.Username, err) user.Username, err)
@@ -141,9 +142,9 @@ func (la *LdapAuthenticator) Login(
} }
func (la *LdapAuthenticator) Sync() error { func (la *LdapAuthenticator) Sync() error {
const IN_DB int = 1 const InDB int = 1
const IN_LDAP int = 2 const InLdap int = 2
const IN_BOTH int = 3 const InBoth int = 3
ur := repository.GetUserRepository() ur := repository.GetUserRepository()
lc := Keys.LdapConfig lc := Keys.LdapConfig
@@ -154,7 +155,7 @@ func (la *LdapAuthenticator) Sync() error {
} }
for _, username := range usernames { for _, username := range usernames {
users[username] = IN_DB users[username] = InDB
} }
l, err := la.getLdapConnection(true) l, err := la.getLdapConnection(true)
@@ -183,18 +184,18 @@ func (la *LdapAuthenticator) Sync() error {
_, ok := users[username] _, ok := users[username]
if !ok { if !ok {
users[username] = IN_LDAP users[username] = InLdap
newnames[username] = entry.GetAttributeValue(la.UserAttr) newnames[username] = entry.GetAttributeValue(la.UserAttr)
} else { } else {
users[username] = IN_BOTH users[username] = InBoth
} }
} }
for username, where := range users { for username, where := range users {
if where == IN_DB && lc.SyncDelOldUsers { if where == InDB && lc.SyncDelOldUsers {
ur.DelUser(username) ur.DelUser(username)
cclog.Debugf("sync: remove %v (does not show up in LDAP anymore)", username) cclog.Debugf("sync: remove %v (does not show up in LDAP anymore)", username)
} else if where == IN_LDAP { } else if where == InLdap {
name := newnames[username] name := newnames[username]
var roles []string var roles []string
@@ -222,7 +223,7 @@ func (la *LdapAuthenticator) Sync() error {
func (la *LdapAuthenticator) getLdapConnection(admin bool) (*ldap.Conn, error) { func (la *LdapAuthenticator) getLdapConnection(admin bool) (*ldap.Conn, error) {
lc := Keys.LdapConfig lc := Keys.LdapConfig
conn, err := ldap.DialURL(lc.Url) conn, err := ldap.DialURL(lc.URL)
if err != nil { if err != nil {
cclog.Warn("LDAP URL dial failed") cclog.Warn("LDAP URL dial failed")
return nil, err return nil, err

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package auth package auth
import ( import (

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package auth package auth
import ( import (

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package auth package auth
var configSchema = ` var configSchema = `

View File

@@ -2,6 +2,8 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// Package config implements the program configuration data structures, validation and parsing
package config package config
import ( import (
@@ -12,20 +14,12 @@ import (
cclog "github.com/ClusterCockpit/cc-lib/ccLogger" cclog "github.com/ClusterCockpit/cc-lib/ccLogger"
) )
type ResampleConfig struct {
// Array of resampling target resolutions, in seconds; Example: [600,300,60]
Resolutions []int `json:"resolutions"`
// Trigger next zoom level at less than this many visible datapoints
Trigger int `json:"trigger"`
}
// Format of the configuration (file). See below for the defaults.
type ProgramConfig struct { type ProgramConfig struct {
// Address where the http (or https) server will listen on (for example: 'localhost:80'). // Address where the http (or https) server will listen on (for example: 'localhost:80').
Addr string `json:"addr"` Addr string `json:"addr"`
// Addresses from which secured admin API endpoints can be reached, can be wildcard "*" // Addresses from which secured admin API endpoints can be reached, can be wildcard "*"
ApiAllowedIPs []string `json:"apiAllowedIPs"` APIAllowedIPs []string `json:"apiAllowedIPs"`
// Drop root permissions once .env was read and the port was taken. // Drop root permissions once .env was read and the port was taken.
User string `json:"user"` User string `json:"user"`
@@ -59,16 +53,12 @@ type ProgramConfig struct {
SessionMaxAge string `json:"session-max-age"` SessionMaxAge string `json:"session-max-age"`
// If both those options are not empty, use HTTPS using those certificates. // If both those options are not empty, use HTTPS using those certificates.
HttpsCertFile string `json:"https-cert-file"` HTTPSCertFile string `json:"https-cert-file"`
HttpsKeyFile string `json:"https-key-file"` HTTPSKeyFile string `json:"https-key-file"`
// If not the empty string and `addr` does not end in ":80", // If not the empty string and `addr` does not end in ":80",
// redirect every request incoming at port 80 to that url. // redirect every request incoming at port 80 to that url.
RedirectHttpTo string `json:"redirect-http-to"` RedirectHTTPTo string `json:"redirect-http-to"`
// If overwritten, at least all the options in the defaults below must
// be provided! Most options here can be overwritten by the user.
UiDefaults map[string]any `json:"ui-defaults"`
// Where to store MachineState files // Where to store MachineState files
MachineStateDir string `json:"machine-state-dir"` MachineStateDir string `json:"machine-state-dir"`
@@ -87,6 +77,13 @@ type ProgramConfig struct {
EnableResampling *ResampleConfig `json:"resampling"` EnableResampling *ResampleConfig `json:"resampling"`
} }
type ResampleConfig struct {
// Array of resampling target resolutions, in seconds; Example: [600,300,60]
Resolutions []int `json:"resolutions"`
// Trigger next zoom level at less than this many visible datapoints
Trigger int `json:"trigger"`
}
type IntRange struct { type IntRange struct {
From int `json:"from"` From int `json:"from"`
To int `json:"to"` To int `json:"to"`
@@ -123,28 +120,6 @@ var Keys ProgramConfig = ProgramConfig{
SessionMaxAge: "168h", SessionMaxAge: "168h",
StopJobsExceedingWalltime: 0, StopJobsExceedingWalltime: 0,
ShortRunningJobsDuration: 5 * 60, ShortRunningJobsDuration: 5 * 60,
UiDefaults: map[string]any{
"analysis_view_histogramMetrics": []string{"flops_any", "mem_bw", "mem_used"},
"analysis_view_scatterPlotMetrics": [][]string{{"flops_any", "mem_bw"}, {"flops_any", "cpu_load"}, {"cpu_load", "mem_bw"}},
"job_view_nodestats_selectedMetrics": []string{"flops_any", "mem_bw", "mem_used"},
"job_view_selectedMetrics": []string{"flops_any", "mem_bw", "mem_used"},
"job_view_showFootprint": true,
"job_list_usePaging": false,
"plot_general_colorBackground": true,
"plot_general_colorscheme": []string{"#00bfff", "#0000ff", "#ff00ff", "#ff0000", "#ff8000", "#ffff00", "#80ff00"},
"plot_general_lineWidth": 3,
"plot_list_jobsPerPage": 50,
"plot_list_selectedMetrics": []string{"cpu_load", "mem_used", "flops_any", "mem_bw"},
"plot_view_plotsPerRow": 3,
"plot_view_showPolarplot": true,
"plot_view_showRoofline": true,
"plot_view_showStatTable": true,
"system_view_selectedMetric": "cpu_load",
"analysis_view_selectedTopEntity": "user",
"analysis_view_selectedTopCategory": "totalWalltime",
"status_view_selectedTopUserCategory": "totalJobs",
"status_view_selectedTopProjectCategory": "totalJobs",
},
} }
func Init(mainConfig json.RawMessage, clusterConfig json.RawMessage) { func Init(mainConfig json.RawMessage, clusterConfig json.RawMessage) {

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package config package config
import ( import (

View File

@@ -10,9 +10,6 @@ import (
var InternalCCMSFlag bool = false var InternalCCMSFlag bool = false
// --------------------
// Metric Store config
// --------------------
type MetricStoreConfig struct { type MetricStoreConfig struct {
Checkpoints struct { Checkpoints struct {
FileFormat string `json:"file-format"` FileFormat string `json:"file-format"`
@@ -55,7 +52,7 @@ type NatsConfig struct {
var MetricStoreKeys MetricStoreConfig var MetricStoreKeys MetricStoreConfig
// For aggregation over multiple values at different cpus/sockets/..., not time! // AggregationStrategy for aggregation over multiple values at different cpus/sockets/..., not time!
type AggregationStrategy int type AggregationStrategy int
const ( const (
@@ -93,7 +90,7 @@ var Metrics map[string]MetricConfig
func InitMetricStore(msConfig json.RawMessage) { func InitMetricStore(msConfig json.RawMessage) {
// Validate(msConfigSchema, msConfig) // Validate(msConfigSchema, msConfig)
dec := json.NewDecoder(bytes.NewReader(msConfig)) dec := json.NewDecoder(bytes.NewReader(msConfig))
dec.DisallowUnknownFields() // dec.DisallowUnknownFields()
if err := dec.Decode(&MetricStoreKeys); err != nil { if err := dec.Decode(&MetricStoreKeys); err != nil {
cclog.Abortf("[METRICSTORE]> Metric Store Config Init: Could not decode config file '%s'.\nError: %s\n", msConfig, err.Error()) cclog.Abortf("[METRICSTORE]> Metric Store Config Init: Could not decode config file '%s'.\nError: %s\n", msConfig, err.Error())
} }
@@ -106,11 +103,10 @@ func GetMetricFrequency(metricName string) (int64, error) {
return 0, fmt.Errorf("[METRICSTORE]> metric %s not found", metricName) return 0, fmt.Errorf("[METRICSTORE]> metric %s not found", metricName)
} }
// add logic to add metrics. Redundant metrics should be updated with max frequency. // AddMetric adds logic to add metrics. Redundant metrics should be updated with max frequency.
// use metric.Name to check if the metric already exists. // use metric.Name to check if the metric already exists.
// if not, add it to the Metrics map. // if not, add it to the Metrics map.
func AddMetric(name string, metric MetricConfig) error { func AddMetric(name string, metric MetricConfig) error {
if Metrics == nil { if Metrics == nil {
Metrics = make(map[string]MetricConfig, 0) Metrics = make(map[string]MetricConfig, 0)
} }

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package config package config
var configSchema = ` var configSchema = `
@@ -119,177 +120,6 @@ var configSchema = `
"required": ["apiAllowedIPs"] "required": ["apiAllowedIPs"]
}` }`
var uiConfigSchema = `
{
"type": "object",
"properties": {
"jobList": {
"description": "Job list defaults. Applies to user- and jobs views.",
"type": "object",
"properties": {
"usePaging": {
"description": "If classic paging is used instead of continuous scrolling by default.",
"type": "boolean"
},
"showFootprint": {
"description": "If footprint bars are shown as first column by default.",
"type": "boolean"
}
},
"required": ["usePaging", "showFootprint"]
},
"nodeList": {
"description": "Node list defaults. Applies to node list view.",
"type": "object",
"properties": {
"usePaging": {
"description": "If classic paging is used instead of continuous scrolling by default.",
"type": "boolean"
}
},
"required": ["usePaging"]
},
"jobView": {
"description": "Job view defaults.",
"type": "object",
"properties": {
"showPolarPlot": {
"description": "If the job metric footprints polar plot is shown by default.",
"type": "boolean"
},
"showFootprint": {
"description": "If the annotated job metric footprint bars are shown by default.",
"type": "boolean"
},
"showRoofline": {
"description": "If the job roofline plot is shown by default.",
"type": "boolean"
},
"showStatTable": {
"description": "If the job metric statistics table is shown by default.",
"type": "boolean"
}
},
"required": ["showFootprint"]
},
"metricConfig": {
"description": "Global initial metric selections for primary views of all clusters.",
"type": "object",
"properties": {
"jobListMetrics": {
"description": "Initial metrics shown for new users in job lists (User and jobs view).",
"type": "array",
"items": {
"type": "string",
"minItems": 1
}
},
"jobViewPlotMetrics": {
"description": "Initial metrics shown for new users as job view metric plots.",
"type": "array",
"items": {
"type": "string",
"minItems": 1
}
},
"jobViewTableMetrics": {
"description": "Initial metrics shown for new users in job view statistics table.",
"type": "array",
"items": {
"type": "string",
"minItems": 1
}
},
"clusters": {
"description": "Overrides for global defaults by cluster and subcluster.",
"type": "array",
"items": {
"type": "object",
"properties": {
"name": {
"description": "The name of the cluster.",
"kind": {
"type": "string",
"enum": ["influxdb", "prometheus", "cc-metric-store", "cc-metric-store-internal", "test"]
},
"url": {
"type": "string"
},
"subClusters" {
"description": "The array of overrides per subcluster.",
"type":"array",
"items": {
"type": "object",
"properties": {
"name": {
"description": "The name of the subcluster.",
"type": "string"
},
"jobListMetrics": {
"description": "Initial metrics shown for new users in job lists (User and jobs view) for subcluster.",
"type": "array",
"items": {
"type": "string",
"minItems": 1
}
},
"jobViewPlotMetrics": {
"description": "Initial metrics shown for new users as job view timeplots for subcluster.",
"type": "array",
"items": {
"type": "string",
"minItems": 1
}
},
"jobViewTableMetrics": {
"description": "Initial metrics shown for new users in job view statistics table for subcluster.",
"type": "array",
"items": {
"type": "string",
"minItems": 1
}
}
},
"required": ["name"],
"minItems": 1
}
}
},
"required": ["name", "subClusters"],
"minItems": 1
}
},
"required": ["jobListMetrics", "jobViewPlotMetrics", "jobViewTableMetrics"]
}
},
"plotConfiguration": {
"description": "Initial settings for plot render options.",
"type": "object",
"properties": {
"colorBackground": {
"description": "If the metric plot backgrounds are initially colored by threshold limits.",
"type": "boolean"
},
"plotsPerRow": {
"description": "How many plots are initially rendered in per row. Applies to job, single node, and analysis views.",
"type": "integer"
},
"lineWidth": {
"description": "Initial thickness of rendered plotlines. Applies to metric plot, job compare plot and roofline.",
"type": "integer"
},
"colorScheme": {
"description": "Initial colorScheme to be used for metric plots.",
"type": "array",
"items": {
"type": "string"
}
}
},
"required": ["colorBackground", "plotsPerRow", "lineWidth"]
}
}`
var clustersSchema = ` var clustersSchema = `
{ {
"type": "array", "type": "array",
@@ -306,7 +136,7 @@ var clustersSchema = `
"properties": { "properties": {
"kind": { "kind": {
"type": "string", "type": "string",
"enum": ["influxdb", "prometheus", "cc-metric-store", "test"] "enum": ["influxdb", "prometheus", "cc-metric-store", "cc-metric-store-internal", "test"]
}, },
"url": { "url": {
"type": "string" "type": "string"
@@ -315,7 +145,7 @@ var clustersSchema = `
"type": "string" "type": "string"
} }
}, },
"required": ["kind", "url"] "required": ["kind"]
}, },
"filterRanges": { "filterRanges": {
"description": "This option controls the slider ranges for the UI controls of numNodes, duration, and startTime.", "description": "This option controls the slider ranges for the UI controls of numNodes, duration, and startTime.",

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package config package config
import ( import (

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package metricdata package metricdata
import ( import (
@@ -33,12 +34,10 @@ type CCMetricStoreConfigInternal struct {
} }
// Bloat Code // Bloat Code
type CCMetricStoreInternal struct { type CCMetricStoreInternal struct{}
}
// Bloat Code // Bloat Code
func (ccms *CCMetricStoreInternal) Init(rawConfig json.RawMessage) error { func (ccms *CCMetricStoreInternal) Init(rawConfig json.RawMessage) error {
return nil return nil
} }

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package metricdata package metricdata
import ( import (
@@ -17,7 +18,7 @@ var TestLoadDataCallback func(job *schema.Job, metrics []string, scopes []schema
panic("TODO") panic("TODO")
} }
// Only a mock for unit-testing. // TestMetricDataRepository is only a mock for unit-testing.
type TestMetricDataRepository struct{} type TestMetricDataRepository struct{}
func (tmdr *TestMetricDataRepository) Init(_ json.RawMessage) error { func (tmdr *TestMetricDataRepository) Init(_ json.RawMessage) error {
@@ -73,11 +74,10 @@ func (tmdr *TestMetricDataRepository) LoadNodeListData(
panic("TODO") panic("TODO")
} }
func DeepCopy(jd_temp schema.JobData) schema.JobData { func DeepCopy(jdTemp schema.JobData) schema.JobData {
jd := make(schema.JobData, len(jdTemp))
jd := make(schema.JobData, len(jd_temp)) for k, v := range jdTemp {
for k, v := range jd_temp { jd[k] = make(map[schema.MetricScope]*schema.JobMetric, len(jdTemp[k]))
jd[k] = make(map[schema.MetricScope]*schema.JobMetric, len(jd_temp[k]))
for k_, v_ := range v { for k_, v_ := range v {
jd[k][k_] = new(schema.JobMetric) jd[k][k_] = new(schema.JobMetric)
jd[k][k_].Series = make([]schema.Series, len(v_.Series)) jd[k][k_].Series = make([]schema.Series, len(v_.Series))

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package archive package archive
var configSchema = ` var configSchema = `

View File

@@ -2,6 +2,8 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
// Package archive implements the job archive interface and various backend implementations
package archive package archive
import ( import (
@@ -108,7 +110,6 @@ func GetHandle() ArchiveBackend {
return ar return ar
} }
// Helper to metricdataloader.LoadAverages().
func LoadAveragesFromArchive( func LoadAveragesFromArchive(
job *schema.Job, job *schema.Job,
metrics []string, metrics []string,
@@ -131,7 +132,6 @@ func LoadAveragesFromArchive(
return nil return nil
} }
// Helper to metricdataloader.LoadJobStats().
func LoadStatsFromArchive( func LoadStatsFromArchive(
job *schema.Job, job *schema.Job,
metrics []string, metrics []string,
@@ -160,7 +160,6 @@ func LoadStatsFromArchive(
return data, nil return data, nil
} }
// Helper to metricdataloader.LoadScopedJobStats().
func LoadScopedStatsFromArchive( func LoadScopedStatsFromArchive(
job *schema.Job, job *schema.Job,
metrics []string, metrics []string,
@@ -185,7 +184,7 @@ func GetStatistics(job *schema.Job) (map[string]schema.JobStatistics, error) {
return metaFile.Statistics, nil return metaFile.Statistics, nil
} }
// If the job is archived, find its `meta.json` file and override the Metadata // UpdateMetadata checks if the job is archived, find its `meta.json` file and override the Metadata
// in that JSON file. If the job is not archived, nothing is done. // in that JSON file. If the job is not archived, nothing is done.
func UpdateMetadata(job *schema.Job, metadata map[string]string) error { func UpdateMetadata(job *schema.Job, metadata map[string]string) error {
mutex.Lock() mutex.Lock()
@@ -206,7 +205,7 @@ func UpdateMetadata(job *schema.Job, metadata map[string]string) error {
return ar.StoreJobMeta(jobMeta) return ar.StoreJobMeta(jobMeta)
} }
// If the job is archived, find its `meta.json` file and override the tags list // UpdateTags checks if the job is archived, find its `meta.json` file and override the tags list
// in that JSON file. If the job is not archived, nothing is done. // in that JSON file. If the job is not archived, nothing is done.
func UpdateTags(job *schema.Job, tags []*schema.Tag) error { func UpdateTags(job *schema.Job, tags []*schema.Tag) error {
mutex.Lock() mutex.Lock()

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package archive package archive
import ( import (

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package archive package archive
import ( import (
@@ -336,7 +337,7 @@ func (fsa *FsArchive) Move(jobs []*schema.Job, path string) {
source := getDirectory(job, fsa.path) source := getDirectory(job, fsa.path)
target := getDirectory(job, path) target := getDirectory(job, path)
if err := os.MkdirAll(filepath.Clean(filepath.Join(target, "..")), 0777); err != nil { if err := os.MkdirAll(filepath.Clean(filepath.Join(target, "..")), 0o777); err != nil {
cclog.Errorf("JobArchive Move MkDir error: %v", err) cclog.Errorf("JobArchive Move MkDir error: %v", err)
} }
if err := os.Rename(source, target); err != nil { if err := os.Rename(source, target); err != nil {
@@ -395,7 +396,7 @@ func (fsa *FsArchive) CompressLast(starttime int64) int64 {
b, err := os.ReadFile(filename) b, err := os.ReadFile(filename)
if err != nil { if err != nil {
cclog.Errorf("fsBackend Compress - %v", err) cclog.Errorf("fsBackend Compress - %v", err)
os.WriteFile(filename, []byte(fmt.Sprintf("%d", starttime)), 0644) os.WriteFile(filename, fmt.Appendf(nil, "%d", starttime), 0o644)
return starttime return starttime
} }
last, err := strconv.ParseInt(strings.TrimSuffix(string(b), "\n"), 10, 64) last, err := strconv.ParseInt(strings.TrimSuffix(string(b), "\n"), 10, 64)
@@ -405,12 +406,12 @@ func (fsa *FsArchive) CompressLast(starttime int64) int64 {
} }
cclog.Infof("fsBackend Compress - start %d last %d", starttime, last) cclog.Infof("fsBackend Compress - start %d last %d", starttime, last)
os.WriteFile(filename, []byte(fmt.Sprintf("%d", starttime)), 0644) os.WriteFile(filename, fmt.Appendf(nil, "%d", starttime), 0o644)
return last return last
} }
func (fsa *FsArchive) LoadJobData(job *schema.Job) (schema.JobData, error) { func (fsa *FsArchive) LoadJobData(job *schema.Job) (schema.JobData, error) {
var isCompressed bool = true isCompressed := true
filename := getPath(job, fsa.path, "data.json.gz") filename := getPath(job, fsa.path, "data.json.gz")
if !util.CheckFileExists(filename) { if !util.CheckFileExists(filename) {
@@ -422,7 +423,7 @@ func (fsa *FsArchive) LoadJobData(job *schema.Job) (schema.JobData, error) {
} }
func (fsa *FsArchive) LoadJobStats(job *schema.Job) (schema.ScopedJobStats, error) { func (fsa *FsArchive) LoadJobStats(job *schema.Job) (schema.ScopedJobStats, error) {
var isCompressed bool = true isCompressed := true
filename := getPath(job, fsa.path, "data.json.gz") filename := getPath(job, fsa.path, "data.json.gz")
if !util.CheckFileExists(filename) { if !util.CheckFileExists(filename) {
@@ -495,7 +496,7 @@ func (fsa *FsArchive) Iter(loadMetricData bool) <-chan JobContainer {
} }
if loadMetricData { if loadMetricData {
var isCompressed bool = true isCompressed := true
filename := filepath.Join(dirpath, startTimeDir.Name(), "data.json.gz") filename := filepath.Join(dirpath, startTimeDir.Name(), "data.json.gz")
if !util.CheckFileExists(filename) { if !util.CheckFileExists(filename) {
@@ -549,7 +550,7 @@ func (fsa *FsArchive) ImportJob(
jobData *schema.JobData, jobData *schema.JobData,
) error { ) error {
dir := getPath(jobMeta, fsa.path, "") dir := getPath(jobMeta, fsa.path, "")
if err := os.MkdirAll(dir, 0777); err != nil { if err := os.MkdirAll(dir, 0o777); err != nil {
cclog.Error("Error while creating job archive path") cclog.Error("Error while creating job archive path")
return err return err
} }

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package archive package archive
import ( import (
@@ -14,7 +15,7 @@ import (
) )
func DecodeJobData(r io.Reader, k string) (schema.JobData, error) { func DecodeJobData(r io.Reader, k string) (schema.JobData, error) {
data := cache.Get(k, func() (value interface{}, ttl time.Duration, size int) { data := cache.Get(k, func() (value any, ttl time.Duration, size int) {
var d schema.JobData var d schema.JobData
if err := json.NewDecoder(r).Decode(&d); err != nil { if err := json.NewDecoder(r).Decode(&d); err != nil {
cclog.Warn("Error while decoding raw job data json") cclog.Warn("Error while decoding raw job data json")

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package archive package archive
import ( import (
@@ -79,8 +80,8 @@ type NLExprString string
func (nle NLExprString) consume(input string) (next string, ok bool) { func (nle NLExprString) consume(input string) (next string, ok bool) {
str := string(nle) str := string(nle)
if strings.HasPrefix(input, str) { if after, ok0 := strings.CutPrefix(input, str); ok0 {
return strings.TrimPrefix(input, str), true return after, true
} }
return "", false return "", false
} }

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend. // All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style // Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file. // license that can be found in the LICENSE file.
package archive package archive
type S3ArchiveConfig struct { type S3ArchiveConfig struct {