Refactor schema

* Remove redundant scope
* Change Series Id type to string
* Adopt golang structs to schema
This commit is contained in:
Jan Eitzinger
2023-03-22 19:21:11 +01:00
parent def35a551a
commit 1f8c6064e2
15 changed files with 247 additions and 342 deletions

View File

@@ -16,9 +16,9 @@ type Topology struct {
Node []int `json:"node"`
Socket [][]int `json:"socket"`
MemoryDomain [][]int `json:"memoryDomain"`
Die [][]int `json:"die"`
Die [][]*int `json:"die,omitempty"`
Core [][]int `json:"core"`
Accelerators []*Accelerator `json:"accelerators"`
Accelerators []*Accelerator `json:"accelerators,omitempty"`
}
type MetricValue struct {
@@ -27,17 +27,16 @@ type MetricValue struct {
}
type SubCluster struct {
Name string `json:"name"`
Nodes string `json:"nodes"`
NumberOfNodes int `json:"numberOfNodes"`
ProcessorType string `json:"processorType"`
SocketsPerNode int `json:"socketsPerNode"`
CoresPerSocket int `json:"coresPerSocket"`
ThreadsPerCore int `json:"threadsPerCore"`
FlopRateScalar *MetricValue `json:"flopRateScalar"`
FlopRateSimd *MetricValue `json:"flopRateSimd"`
MemoryBandwidth *MetricValue `json:"memoryBandwidth"`
Topology *Topology `json:"topology"`
Name string `json:"name"`
Nodes string `json:"nodes"`
ProcessorType string `json:"processorType"`
SocketsPerNode int `json:"socketsPerNode"`
CoresPerSocket int `json:"coresPerSocket"`
ThreadsPerCore int `json:"threadsPerCore"`
FlopRateScalar MetricValue `json:"flopRateScalar"`
FlopRateSimd MetricValue `json:"flopRateSimd"`
MemoryBandwidth MetricValue `json:"memoryBandwidth"`
Topology Topology `json:"topology"`
}
type SubClusterConfig struct {
@@ -53,13 +52,13 @@ type MetricConfig struct {
Name string `json:"name"`
Unit Unit `json:"unit"`
Scope MetricScope `json:"scope"`
Aggregation *string `json:"aggregation"`
Aggregation string `json:"aggregation"`
Timestep int `json:"timestep"`
Peak *float64 `json:"peak"`
Normal *float64 `json:"normal"`
Caution *float64 `json:"caution"`
Alert *float64 `json:"alert"`
SubClusters []*SubClusterConfig `json:"subClusters"`
Peak float64 `json:"peak"`
Normal float64 `json:"normal"`
Caution float64 `json:"caution"`
Alert float64 `json:"alert"`
SubClusters []*SubClusterConfig `json:"subClusters,omitempty"`
}
type Cluster struct {
@@ -169,12 +168,3 @@ func (topo *Topology) GetAcceleratorIDs() ([]int, error) {
}
return accels, nil
}
func (topo *Topology) GetAcceleratorIndex(id string) (int, bool) {
for idx, accel := range topo.Accelerators {
if accel.ID == id {
return idx, true
}
}
return -1, false
}

View File

@@ -80,18 +80,17 @@ func (s *Series) MarshalJSON() ([]byte, error) {
buf = append(buf, s.Hostname...)
buf = append(buf, '"')
if s.Id != nil {
buf = append(buf, `,"id":`...)
buf = strconv.AppendInt(buf, int64(*s.Id), 10)
}
if s.Statistics != nil {
buf = append(buf, `,"statistics":{"min":`...)
buf = strconv.AppendFloat(buf, s.Statistics.Min, 'f', 2, 64)
buf = append(buf, `,"avg":`...)
buf = strconv.AppendFloat(buf, s.Statistics.Avg, 'f', 2, 64)
buf = append(buf, `,"max":`...)
buf = strconv.AppendFloat(buf, s.Statistics.Max, 'f', 2, 64)
buf = append(buf, '}')
buf = append(buf, `,"id":"`...)
buf = append(buf, *s.Id...)
buf = append(buf, '"')
}
buf = append(buf, `,"statistics":{"min":`...)
buf = strconv.AppendFloat(buf, s.Statistics.Min, 'f', 2, 64)
buf = append(buf, `,"avg":`...)
buf = strconv.AppendFloat(buf, s.Statistics.Avg, 'f', 2, 64)
buf = append(buf, `,"max":`...)
buf = strconv.AppendFloat(buf, s.Statistics.Max, 'f', 2, 64)
buf = append(buf, '}')
buf = append(buf, `,"data":[`...)
for i := 0; i < len(s.Data); i++ {
if i != 0 {

View File

@@ -90,8 +90,8 @@ var JobDefaults BaseJob = BaseJob{
}
type Unit struct {
Base string `json:"base"`
Prefix string `json:"prefix"`
Base string `json:"base"`
Prefix *string `json:"prefix,omitempty"`
}
// JobStatistics model

View File

@@ -16,17 +16,16 @@ type JobData map[string]map[MetricScope]*JobMetric
type JobMetric struct {
Unit Unit `json:"unit"`
Scope MetricScope `json:"scope"`
Timestep int `json:"timestep"`
Series []Series `json:"series"`
StatisticsSeries *StatsSeries `json:"statisticsSeries"`
StatisticsSeries *StatsSeries `json:"statisticsSeries,omitempty"`
}
type Series struct {
Hostname string `json:"hostname"`
Id *int `json:"id,omitempty"`
Statistics *MetricStatistics `json:"statistics"`
Data []Float `json:"data"`
Hostname string `json:"hostname"`
Id *string `json:"id,omitempty"`
Statistics MetricStatistics `json:"statistics"`
Data []Float `json:"data"`
}
type MetricStatistics struct {
@@ -218,17 +217,12 @@ func (jd *JobData) AddNodeScope(metric string) bool {
nodeJm := &JobMetric{
Unit: jm.Unit,
Scope: MetricScopeNode,
Timestep: jm.Timestep,
Series: make([]Series, 0, len(hosts)),
}
for hostname, series := range hosts {
min, sum, max := math.MaxFloat32, 0.0, -math.MaxFloat32
for _, series := range series {
if series.Statistics == nil {
min, sum, max = math.NaN(), math.NaN(), math.NaN()
break
}
sum += series.Statistics.Avg
min = math.Min(min, series.Statistics.Min)
max = math.Max(max, series.Statistics.Max)
@@ -259,7 +253,7 @@ func (jd *JobData) AddNodeScope(metric string) bool {
nodeJm.Series = append(nodeJm.Series, Series{
Hostname: hostname,
Statistics: &MetricStatistics{Min: min, Avg: sum / float64(len(series)), Max: max},
Statistics: MetricStatistics{Min: min, Avg: sum / float64(len(series)), Max: max},
Data: data,
})
}

View File

@@ -94,6 +94,7 @@
"timestep",
"aggregation",
"peak",
"normal",
"caution",
"alert"
]

View File

@@ -146,7 +146,8 @@ func ConvertUnitString(us string) schema.Unit {
u := NewUnit(us)
p := u.getPrefix()
if p.Prefix() != "" {
nu.Prefix = p.Prefix()
prefix := p.Prefix()
nu.Prefix = &prefix
}
m := u.getMeasure()
d := u.getUnitDenominator()