mirror of
https://github.com/ClusterCockpit/cc-backend
synced 2025-07-22 20:41:40 +02:00
Refactor taggers. Refine Job Hooks. Start job classifier
This commit is contained in:
@@ -59,14 +59,13 @@ func getDirectory(
|
||||
func getPath(
|
||||
job *schema.Job,
|
||||
rootPath string,
|
||||
file string) string {
|
||||
|
||||
file string,
|
||||
) string {
|
||||
return filepath.Join(
|
||||
getDirectory(job, rootPath), file)
|
||||
}
|
||||
|
||||
func loadJobMeta(filename string) (*schema.JobMeta, error) {
|
||||
|
||||
b, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
log.Errorf("loadJobMeta() > open file error: %v", err)
|
||||
@@ -83,7 +82,6 @@ func loadJobMeta(filename string) (*schema.JobMeta, error) {
|
||||
|
||||
func loadJobData(filename string, isCompressed bool) (schema.JobData, error) {
|
||||
f, err := os.Open(filename)
|
||||
|
||||
if err != nil {
|
||||
log.Errorf("fsBackend LoadJobData()- %v", err)
|
||||
return nil, err
|
||||
@@ -117,7 +115,6 @@ func loadJobData(filename string, isCompressed bool) (schema.JobData, error) {
|
||||
|
||||
func loadJobStats(filename string, isCompressed bool) (schema.ScopedJobStats, error) {
|
||||
f, err := os.Open(filename)
|
||||
|
||||
if err != nil {
|
||||
log.Errorf("fsBackend LoadJobStats()- %v", err)
|
||||
return nil, err
|
||||
@@ -150,7 +147,6 @@ func loadJobStats(filename string, isCompressed bool) (schema.ScopedJobStats, er
|
||||
}
|
||||
|
||||
func (fsa *FsArchive) Init(rawConfig json.RawMessage) (uint64, error) {
|
||||
|
||||
var config FsArchiveConfig
|
||||
if err := json.Unmarshal(rawConfig, &config); err != nil {
|
||||
log.Warnf("Init() > Unmarshal error: %#v", err)
|
||||
@@ -276,7 +272,6 @@ func (fsa *FsArchive) Exists(job *schema.Job) bool {
|
||||
}
|
||||
|
||||
func (fsa *FsArchive) Clean(before int64, after int64) {
|
||||
|
||||
if after == 0 {
|
||||
after = math.MaxInt64
|
||||
}
|
||||
@@ -392,7 +387,6 @@ func (fsa *FsArchive) Compress(jobs []*schema.Job) {
|
||||
}
|
||||
|
||||
func (fsa *FsArchive) CompressLast(starttime int64) int64 {
|
||||
|
||||
filename := filepath.Join(fsa.path, "compress.txt")
|
||||
b, err := os.ReadFile(filename)
|
||||
if err != nil {
|
||||
@@ -441,7 +435,6 @@ func (fsa *FsArchive) LoadJobMeta(job *schema.Job) (*schema.JobMeta, error) {
|
||||
}
|
||||
|
||||
func (fsa *FsArchive) LoadClusterCfg(name string) (*schema.Cluster, error) {
|
||||
|
||||
b, err := os.ReadFile(filepath.Join(fsa.path, name, "cluster.json"))
|
||||
if err != nil {
|
||||
log.Errorf("LoadClusterCfg() > open file error: %v", err)
|
||||
@@ -456,7 +449,6 @@ func (fsa *FsArchive) LoadClusterCfg(name string) (*schema.Cluster, error) {
|
||||
}
|
||||
|
||||
func (fsa *FsArchive) Iter(loadMetricData bool) <-chan JobContainer {
|
||||
|
||||
ch := make(chan JobContainer)
|
||||
go func() {
|
||||
clustersDir, err := os.ReadDir(fsa.path)
|
||||
@@ -527,7 +519,6 @@ func (fsa *FsArchive) Iter(loadMetricData bool) <-chan JobContainer {
|
||||
}
|
||||
|
||||
func (fsa *FsArchive) StoreJobMeta(jobMeta *schema.JobMeta) error {
|
||||
|
||||
job := schema.Job{
|
||||
BaseJob: jobMeta.BaseJob,
|
||||
StartTime: time.Unix(jobMeta.StartTime, 0),
|
||||
@@ -556,8 +547,8 @@ func (fsa *FsArchive) GetClusters() []string {
|
||||
|
||||
func (fsa *FsArchive) ImportJob(
|
||||
jobMeta *schema.JobMeta,
|
||||
jobData *schema.JobData) error {
|
||||
|
||||
jobData *schema.JobData,
|
||||
) error {
|
||||
job := schema.Job{
|
||||
BaseJob: jobMeta.BaseJob,
|
||||
StartTime: time.Unix(jobMeta.StartTime, 0),
|
||||
@@ -583,28 +574,6 @@ func (fsa *FsArchive) ImportJob(
|
||||
return err
|
||||
}
|
||||
|
||||
// var isCompressed bool = true
|
||||
// // TODO Use shortJob Config for check
|
||||
// if jobMeta.Duration < 300 {
|
||||
// isCompressed = false
|
||||
// f, err = os.Create(path.Join(dir, "data.json"))
|
||||
// } else {
|
||||
// f, err = os.Create(path.Join(dir, "data.json.gz"))
|
||||
// }
|
||||
// if err != nil {
|
||||
// return err
|
||||
// }
|
||||
//
|
||||
// if isCompressed {
|
||||
// if err := EncodeJobData(gzip.NewWriter(f), jobData); err != nil {
|
||||
// return err
|
||||
// }
|
||||
// } else {
|
||||
// if err := EncodeJobData(f, jobData); err != nil {
|
||||
// return err
|
||||
// }
|
||||
// }
|
||||
|
||||
f, err = os.Create(path.Join(dir, "data.json"))
|
||||
if err != nil {
|
||||
log.Error("Error while creating filepath for data.json")
|
||||
|
Reference in New Issue
Block a user