Refactor variable namings and doc comments

This commit is contained in:
2025-09-27 09:27:36 +02:00
parent 50d000e7e2
commit 4fc78bc382
23 changed files with 112 additions and 267 deletions

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package archive
var configSchema = `

View File

@@ -2,6 +2,8 @@
// All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
// Package archive implements the job archive interface and various backend implementations
package archive
import (
@@ -108,7 +110,6 @@ func GetHandle() ArchiveBackend {
return ar
}
// Helper to metricdataloader.LoadAverages().
func LoadAveragesFromArchive(
job *schema.Job,
metrics []string,
@@ -131,7 +132,6 @@ func LoadAveragesFromArchive(
return nil
}
// Helper to metricdataloader.LoadJobStats().
func LoadStatsFromArchive(
job *schema.Job,
metrics []string,
@@ -160,7 +160,6 @@ func LoadStatsFromArchive(
return data, nil
}
// Helper to metricdataloader.LoadScopedJobStats().
func LoadScopedStatsFromArchive(
job *schema.Job,
metrics []string,
@@ -185,7 +184,7 @@ func GetStatistics(job *schema.Job) (map[string]schema.JobStatistics, error) {
return metaFile.Statistics, nil
}
// If the job is archived, find its `meta.json` file and override the Metadata
// UpdateMetadata checks if the job is archived, find its `meta.json` file and override the Metadata
// in that JSON file. If the job is not archived, nothing is done.
func UpdateMetadata(job *schema.Job, metadata map[string]string) error {
mutex.Lock()
@@ -206,7 +205,7 @@ func UpdateMetadata(job *schema.Job, metadata map[string]string) error {
return ar.StoreJobMeta(jobMeta)
}
// If the job is archived, find its `meta.json` file and override the tags list
// UpdateTags checks if the job is archived, find its `meta.json` file and override the tags list
// in that JSON file. If the job is not archived, nothing is done.
func UpdateTags(job *schema.Job, tags []*schema.Tag) error {
mutex.Lock()

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package archive
import (

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package archive
import (
@@ -336,7 +337,7 @@ func (fsa *FsArchive) Move(jobs []*schema.Job, path string) {
source := getDirectory(job, fsa.path)
target := getDirectory(job, path)
if err := os.MkdirAll(filepath.Clean(filepath.Join(target, "..")), 0777); err != nil {
if err := os.MkdirAll(filepath.Clean(filepath.Join(target, "..")), 0o777); err != nil {
cclog.Errorf("JobArchive Move MkDir error: %v", err)
}
if err := os.Rename(source, target); err != nil {
@@ -395,7 +396,7 @@ func (fsa *FsArchive) CompressLast(starttime int64) int64 {
b, err := os.ReadFile(filename)
if err != nil {
cclog.Errorf("fsBackend Compress - %v", err)
os.WriteFile(filename, []byte(fmt.Sprintf("%d", starttime)), 0644)
os.WriteFile(filename, fmt.Appendf(nil, "%d", starttime), 0o644)
return starttime
}
last, err := strconv.ParseInt(strings.TrimSuffix(string(b), "\n"), 10, 64)
@@ -405,12 +406,12 @@ func (fsa *FsArchive) CompressLast(starttime int64) int64 {
}
cclog.Infof("fsBackend Compress - start %d last %d", starttime, last)
os.WriteFile(filename, []byte(fmt.Sprintf("%d", starttime)), 0644)
os.WriteFile(filename, fmt.Appendf(nil, "%d", starttime), 0o644)
return last
}
func (fsa *FsArchive) LoadJobData(job *schema.Job) (schema.JobData, error) {
var isCompressed bool = true
isCompressed := true
filename := getPath(job, fsa.path, "data.json.gz")
if !util.CheckFileExists(filename) {
@@ -422,7 +423,7 @@ func (fsa *FsArchive) LoadJobData(job *schema.Job) (schema.JobData, error) {
}
func (fsa *FsArchive) LoadJobStats(job *schema.Job) (schema.ScopedJobStats, error) {
var isCompressed bool = true
isCompressed := true
filename := getPath(job, fsa.path, "data.json.gz")
if !util.CheckFileExists(filename) {
@@ -495,7 +496,7 @@ func (fsa *FsArchive) Iter(loadMetricData bool) <-chan JobContainer {
}
if loadMetricData {
var isCompressed bool = true
isCompressed := true
filename := filepath.Join(dirpath, startTimeDir.Name(), "data.json.gz")
if !util.CheckFileExists(filename) {
@@ -549,7 +550,7 @@ func (fsa *FsArchive) ImportJob(
jobData *schema.JobData,
) error {
dir := getPath(jobMeta, fsa.path, "")
if err := os.MkdirAll(dir, 0777); err != nil {
if err := os.MkdirAll(dir, 0o777); err != nil {
cclog.Error("Error while creating job archive path")
return err
}

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package archive
import (
@@ -14,7 +15,7 @@ import (
)
func DecodeJobData(r io.Reader, k string) (schema.JobData, error) {
data := cache.Get(k, func() (value interface{}, ttl time.Duration, size int) {
data := cache.Get(k, func() (value any, ttl time.Duration, size int) {
var d schema.JobData
if err := json.NewDecoder(r).Decode(&d); err != nil {
cclog.Warn("Error while decoding raw job data json")

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package archive
import (
@@ -79,8 +80,8 @@ type NLExprString string
func (nle NLExprString) consume(input string) (next string, ok bool) {
str := string(nle)
if strings.HasPrefix(input, str) {
return strings.TrimPrefix(input, str), true
if after, ok0 := strings.CutPrefix(input, str); ok0 {
return after, true
}
return "", false
}

View File

@@ -2,6 +2,7 @@
// All rights reserved. This file is part of cc-backend.
// Use of this source code is governed by a MIT-style
// license that can be found in the LICENSE file.
package archive
type S3ArchiveConfig struct {