add only_metrics

This commit is contained in:
brinkcoder 2025-03-05 01:07:26 +01:00
parent 9a2898a6a3
commit 2d792684ff
2 changed files with 55 additions and 60 deletions

View File

@ -8,7 +8,7 @@ import (
"strings" "strings"
"time" "time"
lp "github.com/ClusterCockpit/cc-energy-manager/pkg/cc-message" lp "github.com/ClusterCockpit/cc-lib/ccMessage"
cclog "github.com/ClusterCockpit/cc-metric-collector/pkg/ccLogger" cclog "github.com/ClusterCockpit/cc-metric-collector/pkg/ccLogger"
) )
@ -22,46 +22,41 @@ const LOADAVGFILE = "/proc/loadavg"
type LoadavgCollector struct { type LoadavgCollector struct {
metricCollector metricCollector
tags map[string]string tags map[string]string
load_matches []string config struct {
load_skips []bool
proc_matches []string
proc_skips []bool
config struct {
ExcludeMetrics []string `json:"exclude_metrics,omitempty"` ExcludeMetrics []string `json:"exclude_metrics,omitempty"`
OnlyMetrics []string `json:"only_metrics,omitempty"`
} }
} }
func (m *LoadavgCollector) shouldOutput(metricName string) bool {
if len(m.config.OnlyMetrics) > 0 {
for _, n := range m.config.OnlyMetrics {
if n == metricName {
return true
}
}
return false
}
for _, n := range m.config.ExcludeMetrics {
if n == metricName {
return false
}
}
return true
}
func (m *LoadavgCollector) Init(config json.RawMessage) error { func (m *LoadavgCollector) Init(config json.RawMessage) error {
m.name = "LoadavgCollector" m.name = "LoadavgCollector"
m.parallel = true m.parallel = true
m.setup() m.setup()
if len(config) > 0 { if len(config) > 0 {
err := json.Unmarshal(config, &m.config) if err := json.Unmarshal(config, &m.config); err != nil {
if err != nil {
return err return err
} }
} }
m.meta = map[string]string{ m.meta = map[string]string{"source": m.name, "group": "LOAD"}
"source": m.name,
"group": "LOAD"}
m.tags = map[string]string{"type": "node"} m.tags = map[string]string{"type": "node"}
m.load_matches = []string{
"load_one",
"load_five",
"load_fifteen"}
m.load_skips = make([]bool, len(m.load_matches))
m.proc_matches = []string{
"proc_run",
"proc_total"}
m.proc_skips = make([]bool, len(m.proc_matches))
for i, name := range m.load_matches {
_, m.load_skips[i] = stringArrayContains(m.config.ExcludeMetrics, name)
}
for i, name := range m.proc_matches {
_, m.proc_skips[i] = stringArrayContains(m.config.ExcludeMetrics, name)
}
m.init = true m.init = true
return nil return nil
} }
@ -72,50 +67,43 @@ func (m *LoadavgCollector) Read(interval time.Duration, output chan lp.CCMessage
} }
buffer, err := os.ReadFile(LOADAVGFILE) buffer, err := os.ReadFile(LOADAVGFILE)
if err != nil { if err != nil {
cclog.ComponentError( cclog.ComponentError(m.name, fmt.Sprintf("Read(): Failed to read file '%s': %v", LOADAVGFILE, err))
m.name,
fmt.Sprintf("Read(): Failed to read file '%s': %v", LOADAVGFILE, err))
return return
} }
now := time.Now() now := time.Now()
ls := strings.Split(string(buffer), " ")
// Load metrics // Load metrics
ls := strings.Split(string(buffer), ` `) loadMetrics := []string{"load_one", "load_five", "load_fifteen"}
for i, name := range m.load_matches { for i, name := range loadMetrics {
x, err := strconv.ParseFloat(ls[i], 64) x, err := strconv.ParseFloat(ls[i], 64)
if err != nil { if err != nil {
cclog.ComponentError( cclog.ComponentError(m.name, fmt.Sprintf("Read(): Failed to convert '%s' to float64: %v", ls[i], err))
m.name,
fmt.Sprintf("Read(): Failed to convert '%s' to float64: %v", ls[i], err))
continue continue
} }
if m.load_skips[i] { if m.shouldOutput(name) {
continue y, err := lp.NewMessage(name, m.tags, m.meta, map[string]interface{}{"value": x}, now)
} if err == nil {
y, err := lp.NewMessage(name, m.tags, m.meta, map[string]interface{}{"value": x}, now) output <- y
if err == nil { }
output <- y
} }
} }
// Process metrics // Process metrics
lv := strings.Split(ls[3], `/`) lv := strings.Split(ls[3], `/`)
for i, name := range m.proc_matches { procMetrics := []string{"proc_run", "proc_total"}
for i, name := range procMetrics {
x, err := strconv.ParseInt(lv[i], 10, 64) x, err := strconv.ParseInt(lv[i], 10, 64)
if err != nil { if err != nil {
cclog.ComponentError( cclog.ComponentError(m.name, fmt.Sprintf("Read(): Failed to convert '%s' to int64: %v", lv[i], err))
m.name,
fmt.Sprintf("Read(): Failed to convert '%s' to float64: %v", lv[i], err))
continue continue
} }
if m.proc_skips[i] { if m.shouldOutput(name) {
continue y, err := lp.NewMessage(name, m.tags, m.meta, map[string]interface{}{"value": x}, now)
if err == nil {
output <- y
}
} }
y, err := lp.NewMessage(name, m.tags, m.meta, map[string]interface{}{"value": x}, now)
if err == nil {
output <- y
}
} }
} }

View File

@ -1,19 +1,26 @@
## `loadavg` collector ## `loadavg` collector
```json ```json
"loadavg": { "loadavg": {
"exclude_metrics": [ "exclude_metrics": [
"proc_run" "proc_run"
],
"only_metrics": [
"load_one",
"proc_total"
] ]
} }
``` ```
The `loadavg` collector reads data from `/proc/loadavg` and outputs a handful **node** metrics. If a metric is not required, it can be excluded from forwarding it to the sink. The `loadavg` collector reads data from `/proc/loadavg` and outputs a handful **node** metrics.
Both filtering mechanisms are supported:
- `exclude_metrics`: Excludes the specified metrics.
- `only_metrics`: If provided, only the listed metrics are collected. This takes precedence over `exclude_metrics`.
Metrics: Metrics:
* `load_one` - `load_one`
* `load_five` - `load_five`
* `load_fifteen` - `load_fifteen`
* `proc_run` - `proc_run`
* `proc_total` - `proc_total`