Refactoring: Reduze bytesize of structs.

This commit is contained in:
Jan Eitzinger 2024-03-22 08:59:35 +01:00
parent 1e5f2944cf
commit 83c38e74db

View File

@ -32,32 +32,32 @@ type CCMetricStoreConfig struct {
} }
type CCMetricStore struct { type CCMetricStore struct {
here2there map[string]string
there2here map[string]string
client http.Client
jwt string jwt string
url string url string
queryEndpoint string queryEndpoint string
client http.Client
here2there map[string]string
there2here map[string]string
} }
type ApiQueryRequest struct { type ApiQueryRequest struct {
Cluster string `json:"cluster"` Cluster string `json:"cluster"`
Queries []ApiQuery `json:"queries"`
ForAllNodes []string `json:"for-all-nodes"`
From int64 `json:"from"` From int64 `json:"from"`
To int64 `json:"to"` To int64 `json:"to"`
WithStats bool `json:"with-stats"` WithStats bool `json:"with-stats"`
WithData bool `json:"with-data"` WithData bool `json:"with-data"`
Queries []ApiQuery `json:"queries"`
ForAllNodes []string `json:"for-all-nodes"`
} }
type ApiQuery struct { type ApiQuery struct {
Type *string `json:"type,omitempty"`
SubType *string `json:"subtype,omitempty"`
Metric string `json:"metric"` Metric string `json:"metric"`
Hostname string `json:"host"` Hostname string `json:"host"`
Aggregate bool `json:"aggreg"`
Type *string `json:"type,omitempty"`
TypeIds []string `json:"type-ids,omitempty"` TypeIds []string `json:"type-ids,omitempty"`
SubType *string `json:"subtype,omitempty"`
SubTypeIds []string `json:"subtype-ids,omitempty"` SubTypeIds []string `json:"subtype-ids,omitempty"`
Aggregate bool `json:"aggreg"`
} }
type ApiQueryResponse struct { type ApiQueryResponse struct {
@ -67,16 +67,15 @@ type ApiQueryResponse struct {
type ApiMetricData struct { type ApiMetricData struct {
Error *string `json:"error"` Error *string `json:"error"`
Data []schema.Float `json:"data"`
From int64 `json:"from"` From int64 `json:"from"`
To int64 `json:"to"` To int64 `json:"to"`
Data []schema.Float `json:"data"`
Avg schema.Float `json:"avg"` Avg schema.Float `json:"avg"`
Min schema.Float `json:"min"` Min schema.Float `json:"min"`
Max schema.Float `json:"max"` Max schema.Float `json:"max"`
} }
func (ccms *CCMetricStore) Init(rawConfig json.RawMessage) error { func (ccms *CCMetricStore) Init(rawConfig json.RawMessage) error {
var config CCMetricStoreConfig var config CCMetricStoreConfig
if err := json.Unmarshal(rawConfig, &config); err != nil { if err := json.Unmarshal(rawConfig, &config); err != nil {
log.Warn("Error while unmarshaling raw json config") log.Warn("Error while unmarshaling raw json config")
@ -122,8 +121,8 @@ func (ccms *CCMetricStore) toLocalName(metric string) string {
func (ccms *CCMetricStore) doRequest( func (ccms *CCMetricStore) doRequest(
ctx context.Context, ctx context.Context,
body *ApiQueryRequest) (*ApiQueryResponse, error) { body *ApiQueryRequest,
) (*ApiQueryResponse, error) {
buf := &bytes.Buffer{} buf := &bytes.Buffer{}
if err := json.NewEncoder(buf).Encode(body); err != nil { if err := json.NewEncoder(buf).Encode(body); err != nil {
log.Warn("Error while encoding request body") log.Warn("Error while encoding request body")
@ -162,8 +161,8 @@ func (ccms *CCMetricStore) LoadData(
job *schema.Job, job *schema.Job,
metrics []string, metrics []string,
scopes []schema.MetricScope, scopes []schema.MetricScope,
ctx context.Context) (schema.JobData, error) { ctx context.Context,
) (schema.JobData, error) {
queries, assignedScope, err := ccms.buildQueries(job, metrics, scopes) queries, assignedScope, err := ccms.buildQueries(job, metrics, scopes)
if err != nil { if err != nil {
log.Warn("Error while building queries") log.Warn("Error while building queries")
@ -186,7 +185,7 @@ func (ccms *CCMetricStore) LoadData(
} }
var errors []string var errors []string
var jobData schema.JobData = make(schema.JobData) jobData := make(schema.JobData)
for i, row := range resBody.Results { for i, row := range resBody.Results {
query := req.Queries[i] query := req.Queries[i]
metric := ccms.toLocalName(query.Metric) metric := ccms.toLocalName(query.Metric)
@ -267,8 +266,8 @@ var (
func (ccms *CCMetricStore) buildQueries( func (ccms *CCMetricStore) buildQueries(
job *schema.Job, job *schema.Job,
metrics []string, metrics []string,
scopes []schema.MetricScope) ([]ApiQuery, []schema.MetricScope, error) { scopes []schema.MetricScope,
) ([]ApiQuery, []schema.MetricScope, error) {
queries := make([]ApiQuery, 0, len(metrics)*len(scopes)*len(job.Resources)) queries := make([]ApiQuery, 0, len(metrics)*len(scopes)*len(job.Resources))
assignedScope := []schema.MetricScope{} assignedScope := []schema.MetricScope{}
@ -504,8 +503,8 @@ func (ccms *CCMetricStore) buildQueries(
func (ccms *CCMetricStore) LoadStats( func (ccms *CCMetricStore) LoadStats(
job *schema.Job, job *schema.Job,
metrics []string, metrics []string,
ctx context.Context) (map[string]map[string]schema.MetricStatistics, error) { ctx context.Context,
) (map[string]map[string]schema.MetricStatistics, error) {
queries, _, err := ccms.buildQueries(job, metrics, []schema.MetricScope{schema.MetricScopeNode}) // #166 Add scope shere for analysis view accelerator normalization? queries, _, err := ccms.buildQueries(job, metrics, []schema.MetricScope{schema.MetricScopeNode}) // #166 Add scope shere for analysis view accelerator normalization?
if err != nil { if err != nil {
log.Warn("Error while building query") log.Warn("Error while building query")
@ -566,8 +565,8 @@ func (ccms *CCMetricStore) LoadNodeData(
metrics, nodes []string, metrics, nodes []string,
scopes []schema.MetricScope, scopes []schema.MetricScope,
from, to time.Time, from, to time.Time,
ctx context.Context) (map[string]map[string][]*schema.JobMetric, error) { ctx context.Context,
) (map[string]map[string][]*schema.JobMetric, error) {
req := ApiQueryRequest{ req := ApiQueryRequest{
Cluster: cluster, Cluster: cluster,
From: from.Unix(), From: from.Unix(),
@ -652,7 +651,6 @@ func (ccms *CCMetricStore) LoadNodeData(
} }
func intToStringSlice(is []int) []string { func intToStringSlice(is []int) []string {
ss := make([]string, len(is)) ss := make([]string, len(is))
for i, x := range is { for i, x := range is {
ss[i] = strconv.Itoa(x) ss[i] = strconv.Itoa(x)