From 7ee85a07dc822715596035af5c00c807d45be1b2 Mon Sep 17 00:00:00 2001
From: Thomas Roehl <Thomas.Roehl@googlemail.com>
Date: Fri, 20 Dec 2024 18:28:32 +0100
Subject: [PATCH 01/18] Remove go-toolkit as build requirement for RPM builds
 if run in CI

---
 scripts/cc-metric-collector.spec | 2 ++
 1 file changed, 2 insertions(+)

diff --git a/scripts/cc-metric-collector.spec b/scripts/cc-metric-collector.spec
index 6a5e219..008f190 100644
--- a/scripts/cc-metric-collector.spec
+++ b/scripts/cc-metric-collector.spec
@@ -6,7 +6,9 @@ Summary:        Metric collection daemon from the ClusterCockpit suite
 License:        MIT
 Source0:        %{name}-%{version}.tar.gz
 
+%if "%{getenv:CI}" != "1"
 BuildRequires:  go-toolset
+%endif
 BuildRequires:  systemd-rpm-macros
 # for header downloads
 BuildRequires:  wget

From 3d70c8afc946b32366410246e3b1cf280f92c7ec Mon Sep 17 00:00:00 2001
From: Thomas Roehl <Thomas.Roehl@googlemail.com>
Date: Fri, 20 Dec 2024 18:43:21 +0100
Subject: [PATCH 02/18] Remove condition around BuildRequires and use
 go-toolkit for RPM builds

---
 .github/workflows/Release.yml    | 36 +++++++++++++++++++-------------
 scripts/cc-metric-collector.spec |  2 --
 2 files changed, 22 insertions(+), 16 deletions(-)

diff --git a/.github/workflows/Release.yml b/.github/workflows/Release.yml
index 219ea0d..0029f37 100644
--- a/.github/workflows/Release.yml
+++ b/.github/workflows/Release.yml
@@ -41,10 +41,12 @@ jobs:
         submodules: recursive
         fetch-depth: 0
 
+    # - name: Setup Golang
+    #   uses: actions/setup-go@v5
+    #   with:
+    #     go-version: 'stable'
     - name: Setup Golang
-      uses: actions/setup-go@v5
-      with:
-        go-version: 'stable'
+      run: dnf --assumeyes --disableplugin=subscription-manager install go-toolkit
 
     - name: RPM build MetricCollector
       id: rpmbuild
@@ -110,10 +112,12 @@ jobs:
         submodules: recursive
         fetch-depth: 0
 
+    # - name: Setup Golang
+    #   uses: actions/setup-go@v5
+    #   with:
+    #     go-version: 'stable'
     - name: Setup Golang
-      uses: actions/setup-go@v5
-      with:
-        go-version: 'stable'
+      run: dnf --assumeyes --disableplugin=subscription-manager install go-toolkit
 
     - name: RPM build MetricCollector
       id: rpmbuild
@@ -176,10 +180,12 @@ jobs:
         submodules: recursive
         fetch-depth: 0
 
+    # - name: Setup Golang
+    #   uses: actions/setup-go@v5
+    #   with:
+    #     go-version: 'stable'
     - name: Setup Golang
-      uses: actions/setup-go@v5
-      with:
-        go-version: 'stable'
+      run: dnf --assumeyes --disableplugin=subscription-manager install go-toolkit
 
     - name: RPM build MetricCollector
       id: rpmbuild
@@ -211,7 +217,7 @@ jobs:
 
     # Use dnf to install development packages
     - name: Install development packages
-      run: dnf --assumeyes --disableplugin=subscription-manager install rpm-build go-srpm-macros gcc make python39 git wget openssl-devel diffutils delve
+      run: dnf --assumeyes --disableplugin=subscription-manager install rpm-build go-srpm-macros gcc make python39 git wget openssl-devel diffutils delve  go-toolkit
 
     # Checkout git repository and submodules
     # fetch-depth must be 0 to use git describe
@@ -223,10 +229,12 @@ jobs:
         fetch-depth: 0
 
     # See: https://github.com/marketplace/actions/setup-go-environment
+    # - name: Setup Golang
+    #   uses: actions/setup-go@v5
+    #   with:
+    #     go-version: 'stable'
     - name: Setup Golang
-      uses: actions/setup-go@v5
-      with:
-        go-version: 'stable'
+      run: dnf --assumeyes --disableplugin=subscription-manager install go-toolkit
 
     - name: RPM build MetricCollector
       id: rpmbuild
@@ -310,7 +318,7 @@ jobs:
     - name: Install development packages
       run: |
           apt update && apt --assume-yes upgrade
-          apt --assume-yes install build-essential sed git wget bash
+          apt --assume-yes install build-essential sed git wget bash go
     # Checkout git repository and submodules
     # fetch-depth must be 0 to use git describe
     # See: https://github.com/marketplace/actions/checkout
diff --git a/scripts/cc-metric-collector.spec b/scripts/cc-metric-collector.spec
index 008f190..6a5e219 100644
--- a/scripts/cc-metric-collector.spec
+++ b/scripts/cc-metric-collector.spec
@@ -6,9 +6,7 @@ Summary:        Metric collection daemon from the ClusterCockpit suite
 License:        MIT
 Source0:        %{name}-%{version}.tar.gz
 
-%if "%{getenv:CI}" != "1"
 BuildRequires:  go-toolset
-%endif
 BuildRequires:  systemd-rpm-macros
 # for header downloads
 BuildRequires:  wget

From c01096c157f3711c9b4ecda3dcea12e6adfd6af8 Mon Sep 17 00:00:00 2001
From: Thomas Roehl <Thomas.Roehl@googlemail.com>
Date: Fri, 20 Dec 2024 18:49:28 +0100
Subject: [PATCH 03/18] use go-toolkit for RPM builds

---
 .github/workflows/runonce.yml | 32 ++++++++++++++++++++------------
 1 file changed, 20 insertions(+), 12 deletions(-)

diff --git a/.github/workflows/runonce.yml b/.github/workflows/runonce.yml
index 4d7b596..e4a27d6 100644
--- a/.github/workflows/runonce.yml
+++ b/.github/workflows/runonce.yml
@@ -115,10 +115,12 @@ jobs:
         fetch-depth: 0
 
     # See: https://github.com/marketplace/actions/setup-go-environment
+    # - name: Setup Golang
+    #   uses: actions/setup-go@v5
+    #   with:
+    #     go-version: 'stable'
     - name: Setup Golang
-      uses: actions/setup-go@v5
-      with:
-        go-version: 'stable'
+      run: dnf --assumeyes --disableplugin=subscription-manager install go-toolkit
 
     - name: RPM build MetricCollector
       id: rpmbuild
@@ -153,10 +155,12 @@ jobs:
         fetch-depth: 0
 
     # See: https://github.com/marketplace/actions/setup-go-environment
+    # - name: Setup Golang
+    #   uses: actions/setup-go@v5
+    #   with:
+    #     go-version: 'stable'
     - name: Setup Golang
-      uses: actions/setup-go@v5
-      with:
-        go-version: 'stable'
+      run: dnf --assumeyes --disableplugin=subscription-manager install go-toolkit
 
     - name: RPM build MetricCollector
       id: rpmbuild
@@ -189,10 +193,12 @@ jobs:
         fetch-depth: 0
 
     # See: https://github.com/marketplace/actions/setup-go-environment
+    # - name: Setup Golang
+    #   uses: actions/setup-go@v5
+    #   with:
+    #     go-version: 'stable'
     - name: Setup Golang
-      uses: actions/setup-go@v5
-      with:
-        go-version: 'stable'
+      run: dnf --assumeyes --disableplugin=subscription-manager install go-toolkit
 
     - name: RPM build MetricCollector
       id: rpmbuild
@@ -224,10 +230,12 @@ jobs:
         fetch-depth: 0
 
     # See: https://github.com/marketplace/actions/setup-go-environment
+    # - name: Setup Golang
+    #   uses: actions/setup-go@v5
+    #   with:
+    #     go-version: 'stable'
     - name: Setup Golang
-      uses: actions/setup-go@v5
-      with:
-        go-version: 'stable'
+      run: dnf --assumeyes --disableplugin=subscription-manager install go-toolkit
 
     - name: RPM build MetricCollector
       id: rpmbuild

From d881093524f82b7c62590dbc0c719a304db4391a Mon Sep 17 00:00:00 2001
From: Thomas Roehl <Thomas.Roehl@googlemail.com>
Date: Fri, 20 Dec 2024 20:12:03 +0100
Subject: [PATCH 04/18] Install go-toolkit to fulfill build requirements for
 RPM

---
 .github/workflows/Release.yml | 28 ++++++++++++++++++++++++----
 .github/workflows/runonce.yml | 28 ++++++++++++++++++++++++----
 2 files changed, 48 insertions(+), 8 deletions(-)

diff --git a/.github/workflows/Release.yml b/.github/workflows/Release.yml
index 0029f37..b08f703 100644
--- a/.github/workflows/Release.yml
+++ b/.github/workflows/Release.yml
@@ -46,7 +46,12 @@ jobs:
     #   with:
     #     go-version: 'stable'
     - name: Setup Golang
-      run: dnf --assumeyes --disableplugin=subscription-manager install go-toolkit
+      run: |
+          dnf --assumeyes --disableplugin=subscription-manager install \
+              https://repo.almalinux.org/almalinux/8/AppStream/x86_64/os/Packages/go-toolset-1.22.9-1.module_el8.10.0+3938+8c723e16.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/8/AppStream/x86_64/os/Packages/golang-1.22.9-1.module_el8.10.0+3938+8c723e16.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/8/AppStream/x86_64/os/Packages/golang-bin-1.22.9-1.module_el8.10.0+3938+8c723e16.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/8/AppStream/x86_64/os/Packages/golang-src-1.22.9-1.module_el8.10.0+3938+8c723e16.noarch.rpm
 
     - name: RPM build MetricCollector
       id: rpmbuild
@@ -117,7 +122,12 @@ jobs:
     #   with:
     #     go-version: 'stable'
     - name: Setup Golang
-      run: dnf --assumeyes --disableplugin=subscription-manager install go-toolkit
+      run: |
+          dnf --assumeyes --disableplugin=subscription-manager install \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/go-toolset-1.22.7-2.el9_5.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-1.22.7-2.el9_5.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-bin-1.22.7-2.el9_5.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-src-1.22.7-2.el9_5.noarch.rpm
 
     - name: RPM build MetricCollector
       id: rpmbuild
@@ -185,7 +195,12 @@ jobs:
     #   with:
     #     go-version: 'stable'
     - name: Setup Golang
-      run: dnf --assumeyes --disableplugin=subscription-manager install go-toolkit
+      run: |
+          dnf --assumeyes --disableplugin=subscription-manager install \
+              https://repo.almalinux.org/almalinux/8/AppStream/x86_64/os/Packages/go-toolset-1.22.9-1.module_el8.10.0+3938+8c723e16.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/8/AppStream/x86_64/os/Packages/golang-1.22.9-1.module_el8.10.0+3938+8c723e16.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/8/AppStream/x86_64/os/Packages/golang-bin-1.22.9-1.module_el8.10.0+3938+8c723e16.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/8/AppStream/x86_64/os/Packages/golang-src-1.22.9-1.module_el8.10.0+3938+8c723e16.noarch.rpm
 
     - name: RPM build MetricCollector
       id: rpmbuild
@@ -234,7 +249,12 @@ jobs:
     #   with:
     #     go-version: 'stable'
     - name: Setup Golang
-      run: dnf --assumeyes --disableplugin=subscription-manager install go-toolkit
+      run: |
+          dnf --assumeyes --disableplugin=subscription-manager install \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/go-toolset-1.22.7-2.el9_5.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-1.22.7-2.el9_5.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-bin-1.22.7-2.el9_5.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-src-1.22.7-2.el9_5.noarch.rpm
 
     - name: RPM build MetricCollector
       id: rpmbuild
diff --git a/.github/workflows/runonce.yml b/.github/workflows/runonce.yml
index e4a27d6..869e294 100644
--- a/.github/workflows/runonce.yml
+++ b/.github/workflows/runonce.yml
@@ -120,7 +120,12 @@ jobs:
     #   with:
     #     go-version: 'stable'
     - name: Setup Golang
-      run: dnf --assumeyes --disableplugin=subscription-manager install go-toolkit
+      run: |
+          dnf --assumeyes --disableplugin=subscription-manager install \
+              https://repo.almalinux.org/almalinux/8/AppStream/x86_64/os/Packages/go-toolset-1.22.9-1.module_el8.10.0+3938+8c723e16.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/8/AppStream/x86_64/os/Packages/golang-1.22.9-1.module_el8.10.0+3938+8c723e16.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/8/AppStream/x86_64/os/Packages/golang-bin-1.22.9-1.module_el8.10.0+3938+8c723e16.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/8/AppStream/x86_64/os/Packages/golang-src-1.22.9-1.module_el8.10.0+3938+8c723e16.noarch.rpm
 
     - name: RPM build MetricCollector
       id: rpmbuild
@@ -160,7 +165,12 @@ jobs:
     #   with:
     #     go-version: 'stable'
     - name: Setup Golang
-      run: dnf --assumeyes --disableplugin=subscription-manager install go-toolkit
+      run: |
+          dnf --assumeyes --disableplugin=subscription-manager install \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/go-toolset-1.22.7-2.el9_5.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-1.22.7-2.el9_5.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-bin-1.22.7-2.el9_5.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-src-1.22.7-2.el9_5.noarch.rpm
 
     - name: RPM build MetricCollector
       id: rpmbuild
@@ -198,7 +208,12 @@ jobs:
     #   with:
     #     go-version: 'stable'
     - name: Setup Golang
-      run: dnf --assumeyes --disableplugin=subscription-manager install go-toolkit
+      run: |
+          dnf --assumeyes --disableplugin=subscription-manager install \
+              https://repo.almalinux.org/almalinux/8/AppStream/x86_64/os/Packages/go-toolset-1.22.9-1.module_el8.10.0+3938+8c723e16.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/8/AppStream/x86_64/os/Packages/golang-1.22.9-1.module_el8.10.0+3938+8c723e16.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/8/AppStream/x86_64/os/Packages/golang-bin-1.22.9-1.module_el8.10.0+3938+8c723e16.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/8/AppStream/x86_64/os/Packages/golang-src-1.22.9-1.module_el8.10.0+3938+8c723e16.noarch.rpm
 
     - name: RPM build MetricCollector
       id: rpmbuild
@@ -235,7 +250,12 @@ jobs:
     #   with:
     #     go-version: 'stable'
     - name: Setup Golang
-      run: dnf --assumeyes --disableplugin=subscription-manager install go-toolkit
+      run: |
+          dnf --assumeyes --disableplugin=subscription-manager install \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/go-toolset-1.22.7-2.el9_5.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-1.22.7-2.el9_5.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-bin-1.22.7-2.el9_5.x86_64.rpm \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-src-1.22.7-2.el9_5.noarch.rpm
 
     - name: RPM build MetricCollector
       id: rpmbuild

From 7e6870c7b3b907dbe62ec83d9d87c6021f51b3da Mon Sep 17 00:00:00 2001
From: Thomas Roehl <Thomas.Roehl@googlemail.com>
Date: Fri, 20 Dec 2024 20:15:59 +0100
Subject: [PATCH 05/18] Add golang-race for UBI9 and Alma9

---
 .github/workflows/Release.yml | 6 ++++--
 .github/workflows/runonce.yml | 6 ++++--
 2 files changed, 8 insertions(+), 4 deletions(-)

diff --git a/.github/workflows/Release.yml b/.github/workflows/Release.yml
index b08f703..4d65cb6 100644
--- a/.github/workflows/Release.yml
+++ b/.github/workflows/Release.yml
@@ -127,7 +127,8 @@ jobs:
               https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/go-toolset-1.22.7-2.el9_5.x86_64.rpm \
               https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-1.22.7-2.el9_5.x86_64.rpm \
               https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-bin-1.22.7-2.el9_5.x86_64.rpm \
-              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-src-1.22.7-2.el9_5.noarch.rpm
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-src-1.22.7-2.el9_5.noarch.rpm \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-race-1.22.7-2.el9_5.x86_64.rpm
 
     - name: RPM build MetricCollector
       id: rpmbuild
@@ -254,7 +255,8 @@ jobs:
               https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/go-toolset-1.22.7-2.el9_5.x86_64.rpm \
               https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-1.22.7-2.el9_5.x86_64.rpm \
               https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-bin-1.22.7-2.el9_5.x86_64.rpm \
-              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-src-1.22.7-2.el9_5.noarch.rpm
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-src-1.22.7-2.el9_5.noarch.rpm \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-race-1.22.7-2.el9_5.x86_64.rpm
 
     - name: RPM build MetricCollector
       id: rpmbuild
diff --git a/.github/workflows/runonce.yml b/.github/workflows/runonce.yml
index 869e294..ffa6c19 100644
--- a/.github/workflows/runonce.yml
+++ b/.github/workflows/runonce.yml
@@ -170,7 +170,8 @@ jobs:
               https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/go-toolset-1.22.7-2.el9_5.x86_64.rpm \
               https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-1.22.7-2.el9_5.x86_64.rpm \
               https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-bin-1.22.7-2.el9_5.x86_64.rpm \
-              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-src-1.22.7-2.el9_5.noarch.rpm
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-src-1.22.7-2.el9_5.noarch.rpm \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-race-1.22.7-2.el9_5.x86_64.rpm
 
     - name: RPM build MetricCollector
       id: rpmbuild
@@ -255,7 +256,8 @@ jobs:
               https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/go-toolset-1.22.7-2.el9_5.x86_64.rpm \
               https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-1.22.7-2.el9_5.x86_64.rpm \
               https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-bin-1.22.7-2.el9_5.x86_64.rpm \
-              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-src-1.22.7-2.el9_5.noarch.rpm
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-src-1.22.7-2.el9_5.noarch.rpm \
+              https://repo.almalinux.org/almalinux/9/AppStream/x86_64/os/Packages/golang-race-1.22.7-2.el9_5.x86_64.rpm
 
     - name: RPM build MetricCollector
       id: rpmbuild

From 1f35f6d3ca1b6c8826d914b20065ee44ffd5b34e Mon Sep 17 00:00:00 2001
From: Thomas Roehl <Thomas.Roehl@googlemail.com>
Date: Fri, 20 Dec 2024 20:26:38 +0100
Subject: [PATCH 06/18] Fix wrongly named packages

---
 .github/workflows/Release.yml | 4 ++--
 1 file changed, 2 insertions(+), 2 deletions(-)

diff --git a/.github/workflows/Release.yml b/.github/workflows/Release.yml
index 4d65cb6..21b06e5 100644
--- a/.github/workflows/Release.yml
+++ b/.github/workflows/Release.yml
@@ -233,7 +233,7 @@ jobs:
 
     # Use dnf to install development packages
     - name: Install development packages
-      run: dnf --assumeyes --disableplugin=subscription-manager install rpm-build go-srpm-macros gcc make python39 git wget openssl-devel diffutils delve  go-toolkit
+      run: dnf --assumeyes --disableplugin=subscription-manager install rpm-build go-srpm-macros gcc make python39 git wget openssl-devel diffutils delve 
 
     # Checkout git repository and submodules
     # fetch-depth must be 0 to use git describe
@@ -340,7 +340,7 @@ jobs:
     - name: Install development packages
       run: |
           apt update && apt --assume-yes upgrade
-          apt --assume-yes install build-essential sed git wget bash go
+          apt --assume-yes install build-essential sed git wget bash
     # Checkout git repository and submodules
     # fetch-depth must be 0 to use git describe
     # See: https://github.com/marketplace/actions/checkout

From e968aa19912a41fc1e802e8ed77b7abdf5244024 Mon Sep 17 00:00:00 2001
From: Thomas Roehl <Thomas.Roehl@googlemail.com>
Date: Fri, 20 Dec 2024 20:33:10 +0100
Subject: [PATCH 07/18] Fix wrongly named packages

---
 .github/workflows/Release.yml | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/.github/workflows/Release.yml b/.github/workflows/Release.yml
index f2436e5..21b06e5 100644
--- a/.github/workflows/Release.yml
+++ b/.github/workflows/Release.yml
@@ -340,7 +340,7 @@ jobs:
     - name: Install development packages
       run: |
           apt update && apt --assume-yes upgrade
-          apt --assume-yes install build-essential sed git wget bash go
+          apt --assume-yes install build-essential sed git wget bash
     # Checkout git repository and submodules
     # fetch-depth must be 0 to use git describe
     # See: https://github.com/marketplace/actions/checkout

From 94c80307e8009c32caf4423895159289b14fca12 Mon Sep 17 00:00:00 2001
From: Thomas Roehl <Thomas.Roehl@googlemail.com>
Date: Fri, 20 Dec 2024 21:03:03 +0100
Subject: [PATCH 08/18] Fix Release part

---
 .github/workflows/Release.yml | 12 ++++++++----
 1 file changed, 8 insertions(+), 4 deletions(-)

diff --git a/.github/workflows/Release.yml b/.github/workflows/Release.yml
index cb02a1a..8709786 100644
--- a/.github/workflows/Release.yml
+++ b/.github/workflows/Release.yml
@@ -73,8 +73,8 @@ jobs:
         NEW_SRPM=${OLD_SRPM/el8/alma8}
         mv "${OLD_RPM}" "${NEW_RPM}"
         mv "${OLD_SRPM}" "${NEW_SRPM}"
-        echo "EL8_SRPM=${NEW_SRPM}" >> $GITHUB_OUTPUT
-        echo "EL8_RPM=${NEW_RPM}" >> $GITHUB_OUTPUT
+        echo "SRPM=${NEW_SRPM}" >> $GITHUB_OUTPUT
+        echo "RPM=${NEW_RPM}" >> $GITHUB_OUTPUT
 
     # See: https://github.com/actions/upload-artifact
     - name: Save RPM as artifact
@@ -152,8 +152,8 @@ jobs:
         NEW_SRPM=${OLD_SRPM/el9/alma9}
         mv "${OLD_RPM}" "${NEW_RPM}"
         mv "${OLD_SRPM}" "${NEW_SRPM}"
-        echo "EL9_SRPM=${NEW_SRPM}" >> $GITHUB_OUTPUT
-        echo "EL9_RPM=${NEW_RPM}" >> $GITHUB_OUTPUT
+        echo "SRPM=${NEW_SRPM}" >> $GITHUB_OUTPUT
+        echo "RPM=${NEW_RPM}" >> $GITHUB_OUTPUT
 
     # See: https://github.com/actions/upload-artifact
     - name: Save RPM as artifact
@@ -235,6 +235,10 @@ jobs:
     # See: https://catalog.redhat.com/software/containers/ubi8/ubi/5c359854d70cc534b3a3784e?container-tabs=gti
     container: redhat/ubi9
     # The job outputs link to the outputs of the 'rpmbuild' step
+    # The job outputs link to the outputs of the 'rpmbuild' step
+    outputs:
+      rpm : ${{steps.rpmbuild.outputs.RPM}}
+      srpm : ${{steps.rpmbuild.outputs.SRPM}}
     steps:
 
     # Use dnf to install development packages

From ee4e1baf5bab9a3db8ca0b1eada15223b38b5948 Mon Sep 17 00:00:00 2001
From: Thomas Roehl <Thomas.Roehl@googlemail.com>
Date: Fri, 20 Dec 2024 21:07:33 +0100
Subject: [PATCH 09/18] Fix Release part

---
 .github/workflows/Release.yml | 8 ++++----
 1 file changed, 4 insertions(+), 4 deletions(-)

diff --git a/.github/workflows/Release.yml b/.github/workflows/Release.yml
index 8709786..a2dfff4 100644
--- a/.github/workflows/Release.yml
+++ b/.github/workflows/Release.yml
@@ -81,13 +81,13 @@ jobs:
       uses: actions/upload-artifact@v4
       with:
         name: cc-metric-collector RPM for AlmaLinux 8
-        path: ${{ steps.rpmrename.outputs.EL8_RPM }}
+        path: ${{ steps.rpmrename.outputs.RPM }}
         overwrite: true
     - name: Save SRPM as artifact
       uses: actions/upload-artifact@v4
       with:
         name: cc-metric-collector SRPM for AlmaLinux 8
-        path: ${{ steps.rpmrename.outputs.EL8_SRPM }}
+        path: ${{ steps.rpmrename.outputs.SRPM }}
         overwrite: true
 
   #
@@ -160,13 +160,13 @@ jobs:
       uses: actions/upload-artifact@v4
       with:
         name: cc-metric-collector RPM for AlmaLinux 9
-        path: ${{ steps.rpmrename.outputs.EL9_RPM }}
+        path: ${{ steps.rpmrename.outputs.RPM }}
         overwrite: true
     - name: Save SRPM as artifact
       uses: actions/upload-artifact@v4
       with:
         name: cc-metric-collector SRPM for AlmaLinux 9
-        path: ${{ steps.rpmrename.outputs.EL9_SRPM }}
+        path: ${{ steps.rpmrename.outputs.SRPM }}
         overwrite: true
 
   #

From bcecdd033b52769c07421fd05d2a15b540b37826 Mon Sep 17 00:00:00 2001
From: Thomas Roehl <Thomas.Roehl@googlemail.com>
Date: Mon, 23 Dec 2024 17:51:43 +0100
Subject: [PATCH 10/18] Fix documentation of RAPL collector

---
 collectors/raplMetric.md | 7 ++-----
 1 file changed, 2 insertions(+), 5 deletions(-)

diff --git a/collectors/raplMetric.md b/collectors/raplMetric.md
index f857d7c..8eb792f 100644
--- a/collectors/raplMetric.md
+++ b/collectors/raplMetric.md
@@ -1,11 +1,9 @@
-# Running average power limit (RAPL) metric collector
+## `rapl` collector
 
 This collector reads running average power limit (RAPL) monitoring attributes to compute average power consumption metrics. See <https://www.kernel.org/doc/html/latest/power/powercap/powercap.html#monitoring-attributes>.
 
 The Likwid metric collector provides similar functionality.
 
-## Configuration
-
 ```json
   "rapl": {
     "exclude_device_by_id": ["0:1", "0:2"],
@@ -13,6 +11,5 @@ The Likwid metric collector provides similar functionality.
   }
 ```
 
-## Metrics
-
+Metrics:
 * `rapl_average_power`: average power consumption in Watt. The average is computed over the entire runtime from the last measurement to the current measurement

From e02a018327a14bd277ea4436a6d66ad0fe768c32 Mon Sep 17 00:00:00 2001
From: Thomas Roehl <Thomas.Roehl@googlemail.com>
Date: Mon, 23 Dec 2024 17:52:34 +0100
Subject: [PATCH 11/18] Mark all JSON config fields of message processor as
 omitempty

---
 pkg/messageProcessor/messageProcessor.go | 42 ++++++++++++------------
 1 file changed, 21 insertions(+), 21 deletions(-)

diff --git a/pkg/messageProcessor/messageProcessor.go b/pkg/messageProcessor/messageProcessor.go
index 9bcc54a..6163fa8 100644
--- a/pkg/messageProcessor/messageProcessor.go
+++ b/pkg/messageProcessor/messageProcessor.go
@@ -22,27 +22,27 @@ type messageProcessorTagConfig struct {
 }
 
 type messageProcessorConfig struct {
-	StageOrder       []string                    `json:"stage_order,omitempty"`        // List of stages to execute them in the specified order and to skip unrequired ones
-	DropMessages     []string                    `json:"drop_messages,omitempty"`      // List of metric names to drop. For fine-grained dropping use drop_messages_if
-	DropMessagesIf   []string                    `json:"drop_messages_if,omitempty"`   // List of evaluatable terms to drop messages
-	RenameMessages   map[string]string           `json:"rename_messages,omitempty"`    // Map of metric names to rename
-	RenameMessagesIf map[string]string           `json:"rename_messages_if,omitempty"` // Map to rename metric name based on a condition
-	NormalizeUnits   bool                        `json:"normalize_units,omitempty"`    // Check unit meta flag and normalize it using cc-units
-	ChangeUnitPrefix map[string]string           `json:"change_unit_prefix,omitempty"` // Add prefix that should be applied to the messages
-	AddTagsIf        []messageProcessorTagConfig `json:"add_tags_if"`                  // List of tags that are added when the condition is met
-	DelTagsIf        []messageProcessorTagConfig `json:"delete_tags_if"`               // List of tags that are removed when the condition is met
-	AddMetaIf        []messageProcessorTagConfig `json:"add_meta_if"`                  // List of meta infos that are added when the condition is met
-	DelMetaIf        []messageProcessorTagConfig `json:"delete_meta_if"`               // List of meta infos that are removed when the condition is met
-	AddFieldIf       []messageProcessorTagConfig `json:"add_field_if"`                 // List of fields that are added when the condition is met
-	DelFieldIf       []messageProcessorTagConfig `json:"delete_field_if"`              // List of fields that are removed when the condition is met
-	DropByType       []string                    `json:"drop_by_message_type"`         // List of message types that should be dropped
-	MoveTagToMeta    []messageProcessorTagConfig `json:"move_tag_to_meta_if"`
-	MoveTagToField   []messageProcessorTagConfig `json:"move_tag_to_field_if"`
-	MoveMetaToTag    []messageProcessorTagConfig `json:"move_meta_to_tag_if"`
-	MoveMetaToField  []messageProcessorTagConfig `json:"move_meta_to_field_if"`
-	MoveFieldToTag   []messageProcessorTagConfig `json:"move_field_to_tag_if"`
-	MoveFieldToMeta  []messageProcessorTagConfig `json:"move_field_to_meta_if"`
-	AddBaseEnv       map[string]interface{}      `json:"add_base_env"`
+	StageOrder       []string                    `json:"stage_order,omitempty"`          // List of stages to execute them in the specified order and to skip unrequired ones
+	DropMessages     []string                    `json:"drop_messages,omitempty"`        // List of metric names to drop. For fine-grained dropping use drop_messages_if
+	DropMessagesIf   []string                    `json:"drop_messages_if,omitempty"`     // List of evaluatable terms to drop messages
+	RenameMessages   map[string]string           `json:"rename_messages,omitempty"`      // Map of metric names to rename
+	RenameMessagesIf map[string]string           `json:"rename_messages_if,omitempty"`   // Map to rename metric name based on a condition
+	NormalizeUnits   bool                        `json:"normalize_units,omitempty"`      // Check unit meta flag and normalize it using cc-units
+	ChangeUnitPrefix map[string]string           `json:"change_unit_prefix,omitempty"`   // Add prefix that should be applied to the messages
+	AddTagsIf        []messageProcessorTagConfig `json:"add_tags_if,omitempty"`          // List of tags that are added when the condition is met
+	DelTagsIf        []messageProcessorTagConfig `json:"delete_tags_if,omitempty"`       // List of tags that are removed when the condition is met
+	AddMetaIf        []messageProcessorTagConfig `json:"add_meta_if,omitempty"`          // List of meta infos that are added when the condition is met
+	DelMetaIf        []messageProcessorTagConfig `json:"delete_meta_if,omitempty"`       // List of meta infos that are removed when the condition is met
+	AddFieldIf       []messageProcessorTagConfig `json:"add_field_if,omitempty"`         // List of fields that are added when the condition is met
+	DelFieldIf       []messageProcessorTagConfig `json:"delete_field_if,omitempty"`      // List of fields that are removed when the condition is met
+	DropByType       []string                    `json:"drop_by_message_type,omitempty"` // List of message types that should be dropped
+	MoveTagToMeta    []messageProcessorTagConfig `json:"move_tag_to_meta_if,omitempty"`
+	MoveTagToField   []messageProcessorTagConfig `json:"move_tag_to_field_if,omitempty"`
+	MoveMetaToTag    []messageProcessorTagConfig `json:"move_meta_to_tag_if,omitempty"`
+	MoveMetaToField  []messageProcessorTagConfig `json:"move_meta_to_field_if,omitempty"`
+	MoveFieldToTag   []messageProcessorTagConfig `json:"move_field_to_tag_if,omitempty"`
+	MoveFieldToMeta  []messageProcessorTagConfig `json:"move_field_to_meta_if,omitempty"`
+	AddBaseEnv       map[string]interface{}      `json:"add_base_env,omitempty"`
 }
 
 type messageProcessor struct {

From 70a6afc549209b58bd0b188509c287be124cab00 Mon Sep 17 00:00:00 2001
From: Thomas Roehl <Thomas.Roehl@googlemail.com>
Date: Mon, 23 Dec 2024 17:55:48 +0100
Subject: [PATCH 12/18] Generate HUGO inputs out of Markdown files

---
 scripts/generate_docs.sh | 175 +++++++++++++++++++++++++++++++++++++++
 1 file changed, 175 insertions(+)
 create mode 100755 scripts/generate_docs.sh

diff --git a/scripts/generate_docs.sh b/scripts/generate_docs.sh
new file mode 100755
index 0000000..5f4a8c9
--- /dev/null
+++ b/scripts/generate_docs.sh
@@ -0,0 +1,175 @@
+#!/bin/bash -l
+
+SRCDIR="$(pwd)"
+DESTDIR="$1"
+
+if [ -z "$DESTDIR" ]; then
+    echo "Destination folder not provided"
+    exit 1
+fi
+
+
+COLLECTORS=$(find "${SRCDIR}/collectors" -name "*Metric.md")
+SINKS=$(find "${SRCDIR}/sinks"  -name "*Sink.md")
+RECEIVERS=$(find "${SRCDIR}/receivers"  -name "*Receiver.md")
+
+
+
+# Collectors
+mkdir -p "${DESTDIR}/collectors"
+for F in $COLLECTORS; do
+    echo "$F"
+    FNAME=$(basename "$F")
+    TITLE=$(grep -E "^##" "$F" | head -n 1 | sed -e 's+## ++g')
+    echo "'${TITLE//\`/}'"
+    if [ "${TITLE}" == "" ]; then continue; fi
+    rm --force "${DESTDIR}/collectors/${FNAME}"
+    cat << EOF >> "${DESTDIR}/collectors/${FNAME}"
+---
+title: ${TITLE//\`/}
+description: >
+  Toplevel ${FNAME/.md/}
+categories: [cc-metric-collector]
+tags: [cc-metric-collector, Collector, ${FNAME/Metric.md/}]
+weight: 2
+---
+
+EOF
+    cat "$F" >> "${DESTDIR}/collectors/${FNAME}"
+done
+
+if [ -e "${SRCDIR}/collectors/README.md" ]; then
+    cat << EOF > "${DESTDIR}/collectors/_index.md"
+---
+title: cc-metric-collector's collectors
+description: Documentation of cc-metric-collector's collectors
+categories: [cc-metric-collector]
+tags: [cc-metric-collector, Collector, General]
+weight: 40
+---
+
+EOF
+    cat "${SRCDIR}/collectors/README.md" >> "${DESTDIR}/collectors/_index.md"
+fi
+
+# Sinks
+mkdir -p "${DESTDIR}/sinks"
+for F in $SINKS; do
+    echo "$F"
+    FNAME=$(basename "$F")
+    TITLE=$(grep -E "^##" "$F" | head -n 1 | sed -e 's+## ++g')
+    echo "'${TITLE//\`/}'"
+    if [ "${TITLE}" == "" ]; then continue; fi
+    rm --force "${DESTDIR}/sinks/${FNAME}"
+    cat << EOF >> "${DESTDIR}/sinks/${FNAME}"
+---
+title: ${TITLE//\`/}
+description: >
+  Toplevel ${FNAME/.md/}
+categories: [cc-metric-collector]
+tags: [cc-metric-collector, Sink, ${FNAME/Sink.md/}]
+weight: 2
+---
+
+EOF
+    cat "$F" >> "${DESTDIR}/sinks/${FNAME}"
+done
+
+if [ -e "${SRCDIR}/collectors/README.md" ]; then
+    cat << EOF > "${DESTDIR}/sinks/_index.md"
+---
+title: cc-metric-collector's sinks
+description: Documentation of cc-metric-collector's sinks
+categories: [cc-metric-collector]
+tags: [cc-metric-collector, Sink, General]
+weight: 40
+---
+
+EOF
+    cat "${SRCDIR}/sinks/README.md" >> "${DESTDIR}/sinks/_index.md"
+fi
+
+
+# Receivers
+mkdir -p "${DESTDIR}/receivers"
+for F in $RECEIVERS; do
+    echo "$F"
+    FNAME=$(basename "$F")
+    TITLE=$(grep -E "^##" "$F" | head -n 1 | sed -e 's+## ++g')
+    echo "'${TITLE//\`/}'"
+    if [ "${TITLE}" == "" ]; then continue; fi
+    rm --force "${DESTDIR}/receivers/${FNAME}"
+    cat << EOF >> "${DESTDIR}/receivers/${FNAME}"
+---
+title: ${TITLE//\`/}
+description: >
+  Toplevel ${FNAME/.md/}
+categories: [cc-metric-collector]
+tags: [cc-metric-collector, Receiver, ${FNAME/Receiver.md/}]
+weight: 2
+---
+
+EOF
+    cat "$F" >> "${DESTDIR}/receivers/${FNAME}"
+done
+
+if [ -e "${SRCDIR}/receivers/README.md" ]; then
+    cat << EOF > "${DESTDIR}/receivers/_index.md"
+---
+title: cc-metric-collector's receivers
+description: Documentation of cc-metric-collector's receivers
+categories: [cc-metric-collector]
+tags: [cc-metric-collector, Receiver, General]
+weight: 40
+---
+
+EOF
+    cat "${SRCDIR}/receivers/README.md" >> "${DESTDIR}/receivers/_index.md"
+fi
+
+mkdir -p "${DESTDIR}/internal/metricRouter"
+if [ -e "${SRCDIR}/internal/metricRouter/README.md" ]; then
+    cat << EOF > "${DESTDIR}/internal/metricRouter/_index.md"
+---
+title: cc-metric-collector's router
+description: Documentation of cc-metric-collector's router
+categories: [cc-metric-collector]
+tags: [cc-metric-collector, Router, General]
+weight: 40
+---
+
+EOF
+    cat "${SRCDIR}/internal/metricRouter/README.md" >> "${DESTDIR}/internal/metricRouter/_index.md"
+fi
+
+if [ -e "${SRCDIR}/README.md" ]; then
+    cat << EOF > "${DESTDIR}/_index.md"
+---
+title: cc-metric-collector
+description: Documentation of cc-metric-collector
+categories: [cc-metric-collector]
+tags: [cc-metric-collector, General]
+weight: 40
+---
+
+EOF
+    cat "${SRCDIR}/README.md" >> "${DESTDIR}/_index.md"
+    sed -i -e 's+README.md+_index.md+g' "${DESTDIR}/_index.md"
+fi
+
+
+mkdir -p "${DESTDIR}/pkg/messageProcessor"
+if [ -e "${SRCDIR}/pkg/messageProcessor/README.md" ]; then
+    cat << EOF > "${DESTDIR}/pkg/messageProcessor/_index.md"
+---
+title: cc-metric-collector's message processor
+description: Documentation of cc-metric-collector's message processor
+categories: [cc-metric-collector]
+tags: [cc-metric-collector, Message Processor]
+weight: 40
+---
+
+EOF
+    cat "${SRCDIR}/pkg/messageProcessor/README.md" >> "${DESTDIR}/pkg/messageProcessor/_index.md"
+fi
+

From 7d3180b526bdba11f17f19d14dfc9fa7fba0efdc Mon Sep 17 00:00:00 2001
From: Thomas Roehl <thomas.roehl@fau.de>
Date: Fri, 27 Dec 2024 15:00:14 +0000
Subject: [PATCH 13/18] Check creation of CCMessage in NATS receiver

---
 receivers/natsReceiver.go | 11 ++++++-----
 1 file changed, 6 insertions(+), 5 deletions(-)

diff --git a/receivers/natsReceiver.go b/receivers/natsReceiver.go
index ffb6dab..4f9f552 100644
--- a/receivers/natsReceiver.go
+++ b/receivers/natsReceiver.go
@@ -91,17 +91,18 @@ func (r *NatsReceiver) _NatsReceive(m *nats.Msg) {
 				return
 			}
 
-			y, _ := lp.NewMessage(
+			y, err := lp.NewMessage(
 				string(measurement),
 				tags,
 				nil,
 				fields,
 				t,
 			)
-
-			m, err := r.mp.ProcessMessage(y)
-			if err == nil && m != nil {
-				r.sink <- m
+			if err == nil {
+				m, err := r.mp.ProcessMessage(y)
+				if err == nil && m != nil && r.sink != nil {
+					r.sink <- m
+				}
 			}
 		}
 	}

From 7b343d0bab7082f5e37b95700f05d5782ebca9e6 Mon Sep 17 00:00:00 2001
From: Thomas Roehl <thomas.roehl@fau.de>
Date: Fri, 27 Dec 2024 15:22:59 +0000
Subject: [PATCH 14/18] Use CCMessage FromBytes instead of Influx's decoder

---
 receivers/httpReceiver.go | 77 +++++---------------------------------
 receivers/natsReceiver.go | 78 ++++++---------------------------------
 2 files changed, 21 insertions(+), 134 deletions(-)

diff --git a/receivers/httpReceiver.go b/receivers/httpReceiver.go
index d7965c6..ae6d87b 100644
--- a/receivers/httpReceiver.go
+++ b/receivers/httpReceiver.go
@@ -13,7 +13,6 @@ import (
 	lp "github.com/ClusterCockpit/cc-energy-manager/pkg/cc-message"
 	cclog "github.com/ClusterCockpit/cc-metric-collector/pkg/ccLogger"
 	mp "github.com/ClusterCockpit/cc-metric-collector/pkg/messageProcessor"
-	influx "github.com/influxdata/line-protocol/v2/lineprotocol"
 )
 
 const HTTP_RECEIVER_PORT = "8080"
@@ -151,80 +150,22 @@ func (r *HttpReceiver) ServerHttp(w http.ResponseWriter, req *http.Request) {
 		}
 	}
 	if r.sink != nil {
-		d := influx.NewDecoder(req.Body)
-		for d.Next() {
-
-			// Decode measurement name
-			measurement, err := d.Measurement()
+		buf := make([]byte, 0, req.ContentLength)
+		len, err := req.Body.Read(buf)
+		if err == nil && len > 0 {
+			messages, err := lp.FromBytes(buf)
 			if err != nil {
-				msg := "ServerHttp: Failed to decode measurement: " + err.Error()
+				msg := "ServerHttp: Failed to decode messages: " + err.Error()
 				cclog.ComponentError(r.name, msg)
 				http.Error(w, msg, http.StatusInternalServerError)
 				return
 			}
-
-			// Decode tags
-			tags := make(map[string]string)
-			for {
-				key, value, err := d.NextTag()
-				if err != nil {
-					msg := "ServerHttp: Failed to decode tag: " + err.Error()
-					cclog.ComponentError(r.name, msg)
-					http.Error(w, msg, http.StatusInternalServerError)
-					return
+			for _, y := range messages {
+				m, err := r.mp.ProcessMessage(y)
+				if err == nil && m != nil {
+					r.sink <- m
 				}
-				if key == nil {
-					break
-				}
-				tags[string(key)] = string(value)
 			}
-
-			// Decode fields
-			fields := make(map[string]interface{})
-			for {
-				key, value, err := d.NextField()
-				if err != nil {
-					msg := "ServerHttp: Failed to decode field: " + err.Error()
-					cclog.ComponentError(r.name, msg)
-					http.Error(w, msg, http.StatusInternalServerError)
-					return
-				}
-				if key == nil {
-					break
-				}
-				fields[string(key)] = value.Interface()
-			}
-
-			// Decode time stamp
-			t, err := d.Time(influx.Nanosecond, time.Time{})
-			if err != nil {
-				msg := "ServerHttp: Failed to decode time stamp: " + err.Error()
-				cclog.ComponentError(r.name, msg)
-				http.Error(w, msg, http.StatusInternalServerError)
-				return
-			}
-
-			y, _ := lp.NewMessage(
-				string(measurement),
-				tags,
-				nil,
-				fields,
-				t,
-			)
-
-			m, err := r.mp.ProcessMessage(y)
-			if err == nil && m != nil {
-				r.sink <- m
-			}
-
-		}
-		// Check for IO errors
-		err := d.Err()
-		if err != nil {
-			msg := "ServerHttp: Failed to decode: " + err.Error()
-			cclog.ComponentError(r.name, msg)
-			http.Error(w, msg, http.StatusInternalServerError)
-			return
 		}
 	}
 
diff --git a/receivers/natsReceiver.go b/receivers/natsReceiver.go
index 4f9f552..50072ec 100644
--- a/receivers/natsReceiver.go
+++ b/receivers/natsReceiver.go
@@ -5,20 +5,18 @@ import (
 	"errors"
 	"fmt"
 	"os"
-	"time"
 
 	lp "github.com/ClusterCockpit/cc-energy-manager/pkg/cc-message"
 	cclog "github.com/ClusterCockpit/cc-metric-collector/pkg/ccLogger"
 	mp "github.com/ClusterCockpit/cc-metric-collector/pkg/messageProcessor"
-	influx "github.com/influxdata/line-protocol/v2/lineprotocol"
 	nats "github.com/nats-io/nats.go"
 )
 
 type NatsReceiverConfig struct {
 	defaultReceiverConfig
-	Addr    string `json:"address"`
-	Port    string `json:"port"`
-	Subject string `json:"subject"`
+	Addr     string `json:"address"`
+	Port     string `json:"port"`
+	Subject  string `json:"subject"`
 	User     string `json:"user,omitempty"`
 	Password string `json:"password,omitempty"`
 	NkeyFile string `json:"nkey_file,omitempty"`
@@ -42,67 +40,15 @@ func (r *NatsReceiver) Start() {
 func (r *NatsReceiver) _NatsReceive(m *nats.Msg) {
 
 	if r.sink != nil {
-		d := influx.NewDecoderWithBytes(m.Data)
-		for d.Next() {
-
-			// Decode measurement name
-			measurement, err := d.Measurement()
-			if err != nil {
-				msg := "_NatsReceive: Failed to decode measurement: " + err.Error()
-				cclog.ComponentError(r.name, msg)
-				return
-			}
-
-			// Decode tags
-			tags := make(map[string]string)
-			for {
-				key, value, err := d.NextTag()
-				if err != nil {
-					msg := "_NatsReceive: Failed to decode tag: " + err.Error()
-					cclog.ComponentError(r.name, msg)
-					return
-				}
-				if key == nil {
-					break
-				}
-				tags[string(key)] = string(value)
-			}
-
-			// Decode fields
-			fields := make(map[string]interface{})
-			for {
-				key, value, err := d.NextField()
-				if err != nil {
-					msg := "_NatsReceive: Failed to decode field: " + err.Error()
-					cclog.ComponentError(r.name, msg)
-					return
-				}
-				if key == nil {
-					break
-				}
-				fields[string(key)] = value.Interface()
-			}
-
-			// Decode time stamp
-			t, err := d.Time(influx.Nanosecond, time.Time{})
-			if err != nil {
-				msg := "_NatsReceive: Failed to decode time: " + err.Error()
-				cclog.ComponentError(r.name, msg)
-				return
-			}
-
-			y, err := lp.NewMessage(
-				string(measurement),
-				tags,
-				nil,
-				fields,
-				t,
-			)
-			if err == nil {
-				m, err := r.mp.ProcessMessage(y)
-				if err == nil && m != nil && r.sink != nil {
-					r.sink <- m
-				}
+		messages, err := lp.FromBytes(m.Data)
+		if err != nil {
+			msg := "_NatsReceive: Failed to decode messages: " + err.Error()
+			cclog.ComponentError(r.name, msg)
+		}
+		for _, y := range messages {
+			m, err := r.mp.ProcessMessage(y)
+			if err == nil && m != nil && r.sink != nil {
+				r.sink <- m
 			}
 		}
 	}

From 6901b06e44ce2a9f384d9b12a550bb74be855d5f Mon Sep 17 00:00:00 2001
From: Michael Panzlaff <michael.panzlaff@fau.de>
Date: Mon, 3 Feb 2025 15:23:51 +0100
Subject: [PATCH 15/18] Rename 'process_message' to 'process_messages' in
 metricRouter config

This makes the behavior more consistent with the other modules, which
have their MessageProcessor named 'process_messages'. This most likely
was just a typo.
---
 internal/metricRouter/metricRouter.go | 2 +-
 1 file changed, 1 insertion(+), 1 deletion(-)

diff --git a/internal/metricRouter/metricRouter.go b/internal/metricRouter/metricRouter.go
index e30e436..ed08ccb 100644
--- a/internal/metricRouter/metricRouter.go
+++ b/internal/metricRouter/metricRouter.go
@@ -40,7 +40,7 @@ type metricRouterConfig struct {
 	NormalizeUnits    bool                                 `json:"normalize_units"`     // Check unit meta flag and normalize it using cc-units
 	ChangeUnitPrefix  map[string]string                    `json:"change_unit_prefix"`  // Add prefix that should be applied to the metrics
 	// dropMetrics       map[string]bool                      // Internal map for O(1) lookup
-	MessageProcessor json.RawMessage `json:"process_message,omitempty"`
+	MessageProcessor json.RawMessage `json:"process_messages,omitempty"`
 }
 
 // Metric router data structure

From 0f92f10b666f844ce21636ffee67ca06ba437ceb Mon Sep 17 00:00:00 2001
From: brinkcoder <Robert.Externbrink@ruhr-uni-bochum.de>
Date: Wed, 19 Feb 2025 11:32:15 +0100
Subject: [PATCH 16/18] Add optional interface alias in netstat (#130)

* Check creation of CCMessage in NATS receiver

* add optional interface aliases for netstatMetric

* small fix

---------

Co-authored-by: Thomas Roehl <thomas.roehl@fau.de>
Co-authored-by: exterr2f <Robert.Externbrink@rub.de>
Co-authored-by: Thomas Gruber <Thomas.Roehl@googlemail.com>
---
 collectors/netstatMetric.go | 56 ++++++++++++++++++++++++++-----------
 collectors/netstatMetric.md | 16 +++++++----
 2 files changed, 49 insertions(+), 23 deletions(-)

diff --git a/collectors/netstatMetric.go b/collectors/netstatMetric.go
index 7933e53..4cff5d8 100644
--- a/collectors/netstatMetric.go
+++ b/collectors/netstatMetric.go
@@ -10,15 +10,16 @@ import (
 	"time"
 
 	cclog "github.com/ClusterCockpit/cc-metric-collector/pkg/ccLogger"
-	lp "github.com/ClusterCockpit/cc-energy-manager/pkg/cc-message"
+	lp "github.com/ClusterCockpit/cc-lib/ccMessage"
 )
 
 const NETSTATFILE = "/proc/net/dev"
 
 type NetstatCollectorConfig struct {
-	IncludeDevices     []string `json:"include_devices"`
-	SendAbsoluteValues bool     `json:"send_abs_values"`
-	SendDerivedValues  bool     `json:"send_derived_values"`
+	IncludeDevices     []string              `json:"include_devices"`
+	SendAbsoluteValues bool                  `json:"send_abs_values"`
+	SendDerivedValues  bool                  `json:"send_derived_values"`
+	InterfaceAliases   map[string][]string   `json:"interface_aliases,omitempty"`
 }
 
 type NetstatCollectorMetric struct {
@@ -32,9 +33,26 @@ type NetstatCollectorMetric struct {
 
 type NetstatCollector struct {
 	metricCollector
-	config        NetstatCollectorConfig
-	matches       map[string][]NetstatCollectorMetric
-	lastTimestamp time.Time
+	config           NetstatCollectorConfig
+	aliasToCanonical map[string]string
+	matches          map[string][]NetstatCollectorMetric
+	lastTimestamp    time.Time
+}
+
+func (m *NetstatCollector) buildAliasMapping() {
+	m.aliasToCanonical = make(map[string]string)
+	for canon, aliases := range m.config.InterfaceAliases {
+		for _, alias := range aliases {
+			m.aliasToCanonical[alias] = canon
+		}
+	}
+}
+
+func getCanonicalName(raw string, aliasToCanonical map[string]string) string {
+	if canon, ok := aliasToCanonical[raw]; ok {
+		return canon
+	}
+	return raw
 }
 
 func (m *NetstatCollector) Init(config json.RawMessage) error {
@@ -77,6 +95,8 @@ func (m *NetstatCollector) Init(config json.RawMessage) error {
 		}
 	}
 
+	m.buildAliasMapping()
+
 	// Check access to net statistic file
 	file, err := os.Open(NETSTATFILE)
 	if err != nil {
@@ -97,18 +117,20 @@ func (m *NetstatCollector) Init(config json.RawMessage) error {
 		// Split line into fields
 		f := strings.Fields(l)
 
-		// Get net device entry
-		dev := strings.Trim(f[0], ": ")
+		// Get raw and canonical names
+		raw := strings.Trim(f[0], ": ")
+		canonical := getCanonicalName(raw, m.aliasToCanonical)
 
 		// Check if device is a included device
-		if _, ok := stringArrayContains(m.config.IncludeDevices, dev); ok {
-			tags := map[string]string{"stype": "network", "stype-id": dev, "type": "node"}
+		if _, ok := stringArrayContains(m.config.IncludeDevices, canonical); ok {
+			// Tag will contain original device name (raw).
+			tags := map[string]string{"stype": "network", "stype-id": raw, "type": "node"}
 			meta_unit_byte := map[string]string{"source": m.name, "group": "Network", "unit": "bytes"}
 			meta_unit_byte_per_sec := map[string]string{"source": m.name, "group": "Network", "unit": "bytes/sec"}
 			meta_unit_pkts := map[string]string{"source": m.name, "group": "Network", "unit": "packets"}
 			meta_unit_pkts_per_sec := map[string]string{"source": m.name, "group": "Network", "unit": "packets/sec"}
 
-			m.matches[dev] = []NetstatCollectorMetric{
+			m.matches[canonical] = []NetstatCollectorMetric{
 				{
 					name:       "net_bytes_in",
 					index:      fieldReceiveBytes,
@@ -143,7 +165,6 @@ func (m *NetstatCollector) Init(config json.RawMessage) error {
 				},
 			}
 		}
-
 	}
 
 	if len(m.matches) == 0 {
@@ -164,7 +185,7 @@ func (m *NetstatCollector) Read(interval time.Duration, output chan lp.CCMessage
 	// Save current timestamp
 	m.lastTimestamp = now
 
-	file, err := os.Open(string(NETSTATFILE))
+	file, err := os.Open(NETSTATFILE)
 	if err != nil {
 		cclog.ComponentError(m.name, err.Error())
 		return
@@ -183,11 +204,12 @@ func (m *NetstatCollector) Read(interval time.Duration, output chan lp.CCMessage
 		// Split line into fields
 		f := strings.Fields(l)
 
-		// Get net device entry
-		dev := strings.Trim(f[0], ":")
+		// Get raw and canonical names
+		raw := strings.Trim(f[0], ":")
+		canonical := getCanonicalName(raw, m.aliasToCanonical)
 
 		// Check if device is a included device
-		if devmetrics, ok := m.matches[dev]; ok {
+		if devmetrics, ok := m.matches[canonical]; ok {
 			for i := range devmetrics {
 				metric := &devmetrics[i]
 
diff --git a/collectors/netstatMetric.md b/collectors/netstatMetric.md
index 7ce0af9..fc5ee4d 100644
--- a/collectors/netstatMetric.md
+++ b/collectors/netstatMetric.md
@@ -4,14 +4,19 @@
 ```json
   "netstat": {
     "include_devices": [
-      "eth0"
+      "eth0",
+      "eno1"
     ],
-    "send_abs_values" : true,
-    "send_derived_values" : true
+    "send_abs_values": true,
+    "send_derived_values": true,
+    "interface_aliases": {
+      "eno1": ["eno1np0", "eno1_alt"],
+      "eth0": ["eth0_alias"]
+    }
   }
 ```
 
-The `netstat` collector reads data from `/proc/net/dev` and outputs a handful **node** metrics. With the `include_devices` list you can specify which network devices should be measured. **Note**: Most other collectors use an _exclude_ list instead of an include list.
+The `netstat` collector reads data from `/proc/net/dev` and outputs a handful **node** metrics. With the `include_devices` list you can specify which network devices should be measured. **Note**: Most other collectors use an _exclude_ list instead of an include list. Optionally, you can define an interface_aliases mapping. For each canonical device (as listed in include_devices), you may provide an array of aliases that may be reported by the system. When an alias is detected, it is preferred for matching, while the output tag stype-id always shows the actual system-reported name.
 
 Metrics:
 * `net_bytes_in` (`unit=bytes`)
@@ -23,5 +28,4 @@ Metrics:
 * `net_pkts_in_bw` (`unit=packets/sec` if `send_derived_values == true`)
 * `net_pkts_out_bw` (`unit=packets/sec` if `send_derived_values == true`)
 
-The device name is added as tag `stype=network,stype-id=<device>`.
-
+The device name is added as tag `stype=network,stype-id=<device>`.
\ No newline at end of file

From 87346e2eae2975b9a5e2d622f2ac4a19dcab83c5 Mon Sep 17 00:00:00 2001
From: brinkcoder <Robert.Externbrink@ruhr-uni-bochum.de>
Date: Wed, 19 Feb 2025 11:33:13 +0100
Subject: [PATCH 17/18] Fix excluded metrics for diskstat and add
 exclude_mounts (#131)

* Check creation of CCMessage in NATS receiver

* fix excluded metrics and add optional mountpoint exclude

---------

Co-authored-by: Thomas Roehl <thomas.roehl@fau.de>
Co-authored-by: exterr2f <Robert.Externbrink@rub.de>
Co-authored-by: Thomas Gruber <Thomas.Roehl@googlemail.com>
---
 collectors/diskstatMetric.go | 74 ++++++++++++++++++++++--------------
 collectors/diskstatMetric.md |  5 ++-
 2 files changed, 50 insertions(+), 29 deletions(-)

diff --git a/collectors/diskstatMetric.go b/collectors/diskstatMetric.go
index 0298362..92e53f3 100644
--- a/collectors/diskstatMetric.go
+++ b/collectors/diskstatMetric.go
@@ -9,22 +9,20 @@ import (
 	"time"
 
 	cclog "github.com/ClusterCockpit/cc-metric-collector/pkg/ccLogger"
-	lp "github.com/ClusterCockpit/cc-energy-manager/pkg/cc-message"
+	lp "github.com/ClusterCockpit/cc-lib/ccMessage"
 )
 
-//	"log"
-
 const MOUNTFILE = `/proc/self/mounts`
 
 type DiskstatCollectorConfig struct {
 	ExcludeMetrics []string `json:"exclude_metrics,omitempty"`
+	ExcludeMounts []string `json:"exclude_mounts,omitempty"`
 }
 
 type DiskstatCollector struct {
 	metricCollector
-	//matches map[string]int
-	config IOstatCollectorConfig
-	//devices map[string]IOstatCollectorEntry
+	config         DiskstatCollectorConfig
+	allowedMetrics map[string]bool
 }
 
 func (m *DiskstatCollector) Init(config json.RawMessage) error {
@@ -33,12 +31,21 @@ func (m *DiskstatCollector) Init(config json.RawMessage) error {
 	m.meta = map[string]string{"source": m.name, "group": "Disk"}
 	m.setup()
 	if len(config) > 0 {
-		err := json.Unmarshal(config, &m.config)
-		if err != nil {
+		if err := json.Unmarshal(config, &m.config); err != nil {
 			return err
 		}
 	}
-	file, err := os.Open(string(MOUNTFILE))
+	m.allowedMetrics = map[string]bool{
+		"disk_total":    true,
+		"disk_free":     true,
+		"part_max_used": true,
+	}
+	for _, excl := range m.config.ExcludeMetrics {
+		if _, ok := m.allowedMetrics[excl]; ok {
+			m.allowedMetrics[excl] = false
+		}
+	}
+	file, err := os.Open(MOUNTFILE)
 	if err != nil {
 		cclog.ComponentError(m.name, err.Error())
 		return err
@@ -53,7 +60,7 @@ func (m *DiskstatCollector) Read(interval time.Duration, output chan lp.CCMessag
 		return
 	}
 
-	file, err := os.Open(string(MOUNTFILE))
+	file, err := os.Open(MOUNTFILE)
 	if err != nil {
 		cclog.ComponentError(m.name, err.Error())
 		return
@@ -62,6 +69,7 @@ func (m *DiskstatCollector) Read(interval time.Duration, output chan lp.CCMessag
 
 	part_max_used := uint64(0)
 	scanner := bufio.NewScanner(file)
+mountLoop:
 	for scanner.Scan() {
 		line := scanner.Text()
 		if len(line) == 0 {
@@ -77,13 +85,17 @@ func (m *DiskstatCollector) Read(interval time.Duration, output chan lp.CCMessag
 		if strings.Contains(linefields[1], "boot") {
 			continue
 		}
-		path := strings.Replace(linefields[1], `\040`, " ", -1)
-		stat := syscall.Statfs_t{
-			Blocks: 0,
-			Bsize:  0,
-			Bfree:  0,
+
+		mountPath := strings.Replace(linefields[1], `\040`, " ", -1)
+
+		for _, excl := range m.config.ExcludeMounts {
+			if strings.Contains(mountPath, excl) {
+				continue mountLoop
+			}
 		}
-		err := syscall.Statfs(path, &stat)
+
+		stat := syscall.Statfs_t{}
+		err := syscall.Statfs(mountPath, &stat)
 		if err != nil {
 			continue
 		}
@@ -92,16 +104,20 @@ func (m *DiskstatCollector) Read(interval time.Duration, output chan lp.CCMessag
 		}
 		tags := map[string]string{"type": "node", "device": linefields[0]}
 		total := (stat.Blocks * uint64(stat.Bsize)) / uint64(1000000000)
-		y, err := lp.NewMessage("disk_total", tags, m.meta, map[string]interface{}{"value": total}, time.Now())
-		if err == nil {
-			y.AddMeta("unit", "GBytes")
-			output <- y
+		if m.allowedMetrics["disk_total"] {
+			y, err := lp.NewMessage("disk_total", tags, m.meta, map[string]interface{}{"value": total}, time.Now())
+			if err == nil {
+				y.AddMeta("unit", "GBytes")
+				output <- y
+			}
 		}
 		free := (stat.Bfree * uint64(stat.Bsize)) / uint64(1000000000)
-		y, err = lp.NewMessage("disk_free", tags, m.meta, map[string]interface{}{"value": free}, time.Now())
-		if err == nil {
-			y.AddMeta("unit", "GBytes")
-			output <- y
+		if m.allowedMetrics["disk_free"] {
+			y, err := lp.NewMessage("disk_free", tags, m.meta, map[string]interface{}{"value": free}, time.Now())
+			if err == nil {
+				y.AddMeta("unit", "GBytes")
+				output <- y
+			}
 		}
 		if total > 0 {
 			perc := (100 * (total - free)) / total
@@ -110,10 +126,12 @@ func (m *DiskstatCollector) Read(interval time.Duration, output chan lp.CCMessag
 			}
 		}
 	}
-	y, err := lp.NewMessage("part_max_used", map[string]string{"type": "node"}, m.meta, map[string]interface{}{"value": int(part_max_used)}, time.Now())
-	if err == nil {
-		y.AddMeta("unit", "percent")
-		output <- y
+	if m.allowedMetrics["part_max_used"] {
+		y, err := lp.NewMessage("part_max_used", map[string]string{"type": "node"}, m.meta, map[string]interface{}{"value": int(part_max_used)}, time.Now())
+		if err == nil {
+			y.AddMeta("unit", "percent")
+			output <- y
+		}
 	}
 }
 
diff --git a/collectors/diskstatMetric.md b/collectors/diskstatMetric.md
index a38f154..5a4b7a8 100644
--- a/collectors/diskstatMetric.md
+++ b/collectors/diskstatMetric.md
@@ -6,10 +6,13 @@
     "exclude_metrics": [
       "disk_total"
     ],
+    "exclude_mounts": [
+      "slurm-tmpfs"
+    ]
   }
 ```
 
-The `diskstat` collector reads data from `/proc/self/mounts` and outputs a handful **node** metrics. If a metric is not required, it can be excluded from forwarding it to the sink.
+The `diskstat` collector reads data from `/proc/self/mounts` and outputs a handful **node** metrics. If a metric is not required, it can be excluded from forwarding it to the sink. Additionally, any mount point containing one of the strings specified in `exclude_mounts` will be skipped during metric collection.
 
 Metrics per device (with `device` tag):
 * `disk_total` (unit `GBytes`)

From f9acc51a50b76938797517a3ce51502f90362648 Mon Sep 17 00:00:00 2001
From: brinkcoder <Robert.Externbrink@ruhr-uni-bochum.de>
Date: Wed, 19 Feb 2025 11:34:06 +0100
Subject: [PATCH 18/18] Add derived values for nfsiostat (#132)

* Check creation of CCMessage in NATS receiver

* add derived_values for nfsiostatMetric

---------

Co-authored-by: Thomas Roehl <thomas.roehl@fau.de>
Co-authored-by: exterr2f <Robert.Externbrink@rub.de>
Co-authored-by: Thomas Gruber <Thomas.Roehl@googlemail.com>
---
 collectors/nfsiostatMetric.go | 60 ++++++++++++++++++++++-------------
 collectors/nfsiostatMetric.md | 19 +++++++----
 2 files changed, 51 insertions(+), 28 deletions(-)

diff --git a/collectors/nfsiostatMetric.go b/collectors/nfsiostatMetric.go
index 09686e9..28491bb 100644
--- a/collectors/nfsiostatMetric.go
+++ b/collectors/nfsiostatMetric.go
@@ -10,7 +10,7 @@ import (
 	"time"
 
 	cclog "github.com/ClusterCockpit/cc-metric-collector/pkg/ccLogger"
-	lp "github.com/ClusterCockpit/cc-energy-manager/pkg/cc-message"
+	lp "github.com/ClusterCockpit/cc-lib/ccMessage"
 )
 
 // These are the fields we read from the JSON configuration
@@ -18,17 +18,20 @@ type NfsIOStatCollectorConfig struct {
 	ExcludeMetrics          []string `json:"exclude_metrics,omitempty"`
 	ExcludeFilesystem       []string `json:"exclude_filesystem,omitempty"`
 	UseServerAddressAsSType bool     `json:"use_server_as_stype,omitempty"`
+	SendAbsoluteValues      bool     `json:"send_abs_values"`
+	SendDerivedValues       bool     `json:"send_derived_values"`
 }
 
 // This contains all variables we need during execution and the variables
 // defined by metricCollector (name, init, ...)
 type NfsIOStatCollector struct {
 	metricCollector
-	config NfsIOStatCollectorConfig    // the configuration structure
-	meta   map[string]string           // default meta information
-	tags   map[string]string           // default tags
-	data   map[string]map[string]int64 // data storage for difference calculation
-	key    string                      // which device info should be used as subtype ID? 'server' or 'mntpoint', see NfsIOStatCollectorConfig.UseServerAddressAsSType
+	config        NfsIOStatCollectorConfig    // the configuration structure
+	meta          map[string]string           // default meta information
+	tags          map[string]string           // default tags
+	data          map[string]map[string]int64 // data storage for difference calculation
+	key           string                      // which device info should be used as subtype ID? 'server' or 'mntpoint'
+	lastTimestamp time.Time
 }
 
 var deviceRegex = regexp.MustCompile(`device (?P<server>[^ ]+) mounted on (?P<mntpoint>[^ ]+) with fstype nfs(?P<version>\d*) statvers=[\d\.]+`)
@@ -81,7 +84,6 @@ func (m *NfsIOStatCollector) readNfsiostats() map[string]map[string]int64 {
 							data[current[m.key]][name] = val
 						}
 					}
-
 				}
 				current = nil
 			}
@@ -98,6 +100,9 @@ func (m *NfsIOStatCollector) Init(config json.RawMessage) error {
 	m.meta = map[string]string{"source": m.name, "group": "NFS", "unit": "bytes"}
 	m.tags = map[string]string{"type": "node"}
 	m.config.UseServerAddressAsSType = false
+	// Set default configuration
+	m.config.SendAbsoluteValues = true
+	m.config.SendDerivedValues = false
 	if len(config) > 0 {
 		err = json.Unmarshal(config, &m.config)
 		if err != nil {
@@ -110,12 +115,15 @@ func (m *NfsIOStatCollector) Init(config json.RawMessage) error {
 		m.key = "server"
 	}
 	m.data = m.readNfsiostats()
+	m.lastTimestamp = time.Now()
 	m.init = true
 	return err
 }
 
 func (m *NfsIOStatCollector) Read(interval time.Duration, output chan lp.CCMessage) {
-	timestamp := time.Now()
+	now := time.Now()
+	timeDiff := now.Sub(m.lastTimestamp).Seconds()
+	m.lastTimestamp = now
 
 	// Get the current values for all mountpoints
 	newdata := m.readNfsiostats()
@@ -123,21 +131,30 @@ func (m *NfsIOStatCollector) Read(interval time.Duration, output chan lp.CCMessa
 	for mntpoint, values := range newdata {
 		// Was the mount point already present in the last iteration
 		if old, ok := m.data[mntpoint]; ok {
-			// Calculate the difference of old and new values
-			for i := range values {
-				x := values[i] - old[i]
-				y, err := lp.NewMessage(fmt.Sprintf("nfsio_%s", i), m.tags, m.meta, map[string]interface{}{"value": x}, timestamp)
-				if err == nil {
-					if strings.HasPrefix(i, "page") {
-						y.AddMeta("unit", "4K_Pages")
+			for name, newVal := range values {
+				if m.config.SendAbsoluteValues {
+					msg, err := lp.NewMessage(fmt.Sprintf("nfsio_%s", name), m.tags, m.meta, map[string]interface{}{"value": newVal}, now)
+					if err == nil {
+						msg.AddTag("stype", "filesystem")
+						msg.AddTag("stype-id", mntpoint)
+						output <- msg
 					}
-					y.AddTag("stype", "filesystem")
-					y.AddTag("stype-id", mntpoint)
-					// Send it to output channel
-					output <- y
 				}
-				// Update old to the new value for the next iteration
-				old[i] = values[i]
+				if m.config.SendDerivedValues {
+					rate := float64(newVal-old[name]) / timeDiff
+					msg, err := lp.NewMessage(fmt.Sprintf("nfsio_%s_bw", name), m.tags, m.meta, map[string]interface{}{"value": rate}, now)
+					if err == nil {
+						if strings.HasPrefix(name, "page") {
+							msg.AddMeta("unit", "4K_pages/s")
+						} else {
+							msg.AddMeta("unit", "bytes/sec")
+						}
+						msg.AddTag("stype", "filesystem")
+						msg.AddTag("stype-id", mntpoint)
+						output <- msg
+					}
+				}
+				old[name] = newVal
 			}
 		} else {
 			// First time we see this mount point, store all values
@@ -157,7 +174,6 @@ func (m *NfsIOStatCollector) Read(interval time.Duration, output chan lp.CCMessa
 			m.data[mntpoint] = nil
 		}
 	}
-
 }
 
 func (m *NfsIOStatCollector) Close() {
diff --git a/collectors/nfsiostatMetric.md b/collectors/nfsiostatMetric.md
index 7f374e7..3f02b0c 100644
--- a/collectors/nfsiostatMetric.md
+++ b/collectors/nfsiostatMetric.md
@@ -3,16 +3,18 @@
 ```json
   "nfsiostat": {
     "exclude_metrics": [
-      "nfsio_oread"
+      "oread", "pageread"
     ],
-    "exclude_filesystems" : [
-        "/mnt",
+    "exclude_filesystems": [
+      "/mnt"
     ],
-    "use_server_as_stype": false
+    "use_server_as_stype": false,
+    "send_abs_values": false,
+    "send_derived_values": true
   }
 ```
 
-The `nfsiostat` collector reads data from `/proc/self/mountstats` and outputs a handful **node** metrics for each NFS filesystem. If a metric or filesystem is not required, it can be excluded from forwarding it to the sink.
+The `nfsiostat` collector reads data from `/proc/self/mountstats` and outputs a handful **node** metrics for each NFS filesystem. If a metric or filesystem is not required, it can be excluded from forwarding it to the sink. **Note:** When excluding metrics, you must provide the base metric name (e.g. pageread) without the nfsio_ prefix. This exclusion applies to both absolute and derived values.
 
 Metrics:
 * `nfsio_nread`: Bytes transferred by normal `read()` calls
@@ -24,4 +26,9 @@ Metrics:
 * `nfsio_nfsread`: Bytes transferred for reading from the server
 * `nfsio_nfswrite`: Pages transferred by writing to the server
 
-The `nfsiostat` collector adds the mountpoint to the tags as `stype=filesystem,stype-id=<mountpoint>`. If the server address should be used instead of the mountpoint, use the `use_server_as_stype` config setting.
\ No newline at end of file
+For each of these, if derived values are enabled, an additional metric is sent with the `_bw` suffix, which represents the rate:
+
+  * For normal byte metrics: `unit=bytes/sec`
+  * For page metrics: `unit=4K_pages/s`
+
+The `nfsiostat` collector adds the mountpoint to the tags as `stype=filesystem,stype-id=<mountpoint>`. If the server address should be used instead of the mountpoint, use the `use_server_as_stype` config setting.