split statsTable data from jobMetrics query, initial commit

- mainly backend changes
- statstable changes only for prototyping
This commit is contained in:
Christoph Kluge
2025-03-13 17:33:55 +01:00
parent d0af933b35
commit f5f36427a4
19 changed files with 1471 additions and 426 deletions

File diff suppressed because it is too large Load Diff

View File

@@ -81,11 +81,6 @@ type JobLinkResultList struct {
Count *int `json:"count,omitempty"`
}
type JobMetricStatWithName struct {
Name string `json:"name"`
Stats *schema.MetricStatistics `json:"stats"`
}
type JobMetricWithName struct {
Name string `json:"name"`
Scope schema.MetricScope `json:"scope"`
@@ -100,6 +95,17 @@ type JobResultList struct {
HasNextPage *bool `json:"hasNextPage,omitempty"`
}
type JobStats struct {
Name string `json:"name"`
Stats *schema.MetricStatistics `json:"stats"`
}
type JobStatsWithScope struct {
Name string `json:"name"`
Scope schema.MetricScope `json:"scope"`
Stats []*ScopedStats `json:"stats"`
}
type JobsStatistics struct {
ID string `json:"id"`
Name string `json:"name"`
@@ -173,6 +179,12 @@ type PageRequest struct {
Page int `json:"page"`
}
type ScopedStats struct {
Hostname string `json:"hostname"`
ID *string `json:"id,omitempty"`
Data *schema.MetricStatistics `json:"data"`
}
type StringInput struct {
Eq *string `json:"eq,omitempty"`
Neq *string `json:"neq,omitempty"`

View File

@@ -301,24 +301,23 @@ func (r *queryResolver) JobMetrics(ctx context.Context, id string, metrics []str
return res, err
}
// JobMetricStats is the resolver for the jobMetricStats field.
func (r *queryResolver) JobMetricStats(ctx context.Context, id string, metrics []string) ([]*model.JobMetricStatWithName, error) {
// JobMetricStats is the resolver for the jobStats field.
func (r *queryResolver) JobStats(ctx context.Context, id string, metrics []string) ([]*model.JobStats, error) {
job, err := r.Query().Job(ctx, id)
if err != nil {
log.Warn("Error while querying job for metrics")
log.Warnf("Error while querying job %s for metrics", id)
return nil, err
}
data, err := metricDataDispatcher.LoadStatData(job, metrics, ctx)
data, err := metricDataDispatcher.LoadJobStats(job, metrics, ctx)
if err != nil {
log.Warn("Error while loading job stat data")
log.Warnf("Error while loading job stat data for job id %s", id)
return nil, err
}
res := []*model.JobMetricStatWithName{}
res := []*model.JobStats{}
for name, md := range data {
res = append(res, &model.JobMetricStatWithName{
res = append(res, &model.JobStats{
Name: name,
Stats: &md,
})
@@ -327,6 +326,47 @@ func (r *queryResolver) JobMetricStats(ctx context.Context, id string, metrics [
return res, err
}
// JobStats is the resolver for the scopedJobStats field.
func (r *queryResolver) ScopedJobStats(ctx context.Context, id string, metrics []string, scopes []schema.MetricScope) ([]*model.JobStatsWithScope, error) {
job, err := r.Query().Job(ctx, id)
if err != nil {
log.Warnf("Error while querying job %s for metrics", id)
return nil, err
}
data, err := metricDataDispatcher.LoadScopedJobStats(job, metrics, scopes, ctx)
if err != nil {
log.Warnf("Error while loading scoped job stat data for job id %s", id)
return nil, err
}
res := make([]*model.JobStatsWithScope, 0)
for name, scoped := range data {
for scope, stats := range scoped {
// log.Debugf("HANDLE >>>>> %s @ %s -> First Array Value %#v", name, scope, *stats[0])
mdlStats := make([]*model.ScopedStats, 0)
for _, stat := range stats {
// log.Debugf("CONVERT >>>>> >>>>> %s -> %v -> %#v", stat.Hostname, stat.Id, stat.Data)
mdlStats = append(mdlStats, &model.ScopedStats{
Hostname: stat.Hostname,
ID: stat.Id,
Data: stat.Data,
})
}
// log.Debugf("APPEND >>>>> >>>>> %#v", mdlStats)
res = append(res, &model.JobStatsWithScope{
Name: name,
Scope: scope,
Stats: mdlStats,
})
}
}
return res, nil
}
// JobsFootprints is the resolver for the jobsFootprints field.
func (r *queryResolver) JobsFootprints(ctx context.Context, filter []*model.JobFilter, metrics []string) (*model.Footprints, error) {
// NOTE: Legacy Naming! This resolver is for normalized histograms in analysis view only - *Not* related to DB "footprint" column!