add single update gql queries to metric wrapper

This commit is contained in:
Christoph Kluge 2024-08-16 14:50:31 +02:00
parent 6ab2e02fe6
commit b1fd07cd30
6 changed files with 114 additions and 38 deletions

View File

@ -223,7 +223,7 @@ type Query {
allocatedNodes(cluster: String!): [Count!]! allocatedNodes(cluster: String!): [Count!]!
job(id: ID!): Job job(id: ID!): Job
jobMetrics(id: ID!, metrics: [String!], scopes: [MetricScope!]): [JobMetricWithName!]! jobMetrics(id: ID!, metrics: [String!], scopes: [MetricScope!], resolution: Int): [JobMetricWithName!]!
jobsFootprints(filter: [JobFilter!], metrics: [String!]!): Footprints jobsFootprints(filter: [JobFilter!], metrics: [String!]!): Footprints
jobs(filter: [JobFilter!], page: PageRequest, order: OrderByInput): JobResultList! jobs(filter: [JobFilter!], page: PageRequest, order: OrderByInput): JobResultList!

View File

@ -1110,7 +1110,7 @@ func (api *RestApi) getJobMetrics(rw http.ResponseWriter, r *http.Request) {
} }
resolver := graph.GetResolverInstance() resolver := graph.GetResolverInstance()
data, err := resolver.Query().JobMetrics(r.Context(), id, metrics, scopes) data, err := resolver.Query().JobMetrics(r.Context(), id, metrics, scopes, nil)
if err != nil { if err != nil {
json.NewEncoder(rw).Encode(Respone{ json.NewEncoder(rw).Encode(Respone{
Error: &struct { Error: &struct {

View File

@ -246,7 +246,7 @@ type ComplexityRoot struct {
Clusters func(childComplexity int) int Clusters func(childComplexity int) int
GlobalMetrics func(childComplexity int) int GlobalMetrics func(childComplexity int) int
Job func(childComplexity int, id string) int Job func(childComplexity int, id string) int
JobMetrics func(childComplexity int, id string, metrics []string, scopes []schema.MetricScope) int JobMetrics func(childComplexity int, id string, metrics []string, scopes []schema.MetricScope, resolution *int) int
Jobs func(childComplexity int, filter []*model.JobFilter, page *model.PageRequest, order *model.OrderByInput) int Jobs func(childComplexity int, filter []*model.JobFilter, page *model.PageRequest, order *model.OrderByInput) int
JobsFootprints func(childComplexity int, filter []*model.JobFilter, metrics []string) int JobsFootprints func(childComplexity int, filter []*model.JobFilter, metrics []string) int
JobsStatistics func(childComplexity int, filter []*model.JobFilter, metrics []string, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate) int JobsStatistics func(childComplexity int, filter []*model.JobFilter, metrics []string, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate) int
@ -368,7 +368,7 @@ type QueryResolver interface {
User(ctx context.Context, username string) (*model.User, error) User(ctx context.Context, username string) (*model.User, error)
AllocatedNodes(ctx context.Context, cluster string) ([]*model.Count, error) AllocatedNodes(ctx context.Context, cluster string) ([]*model.Count, error)
Job(ctx context.Context, id string) (*schema.Job, error) Job(ctx context.Context, id string) (*schema.Job, error)
JobMetrics(ctx context.Context, id string, metrics []string, scopes []schema.MetricScope) ([]*model.JobMetricWithName, error) JobMetrics(ctx context.Context, id string, metrics []string, scopes []schema.MetricScope, resolution *int) ([]*model.JobMetricWithName, error)
JobsFootprints(ctx context.Context, filter []*model.JobFilter, metrics []string) (*model.Footprints, error) JobsFootprints(ctx context.Context, filter []*model.JobFilter, metrics []string) (*model.Footprints, error)
Jobs(ctx context.Context, filter []*model.JobFilter, page *model.PageRequest, order *model.OrderByInput) (*model.JobResultList, error) Jobs(ctx context.Context, filter []*model.JobFilter, page *model.PageRequest, order *model.OrderByInput) (*model.JobResultList, error)
JobsStatistics(ctx context.Context, filter []*model.JobFilter, metrics []string, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate) ([]*model.JobsStatistics, error) JobsStatistics(ctx context.Context, filter []*model.JobFilter, metrics []string, page *model.PageRequest, sortBy *model.SortByAggregate, groupBy *model.Aggregate) ([]*model.JobsStatistics, error)
@ -1290,7 +1290,7 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
return 0, false return 0, false
} }
return e.complexity.Query.JobMetrics(childComplexity, args["id"].(string), args["metrics"].([]string), args["scopes"].([]schema.MetricScope)), true return e.complexity.Query.JobMetrics(childComplexity, args["id"].(string), args["metrics"].([]string), args["scopes"].([]schema.MetricScope), args["resolution"].(*int)), true
case "Query.jobs": case "Query.jobs":
if e.complexity.Query.Jobs == nil { if e.complexity.Query.Jobs == nil {
@ -2059,7 +2059,7 @@ type Query {
allocatedNodes(cluster: String!): [Count!]! allocatedNodes(cluster: String!): [Count!]!
job(id: ID!): Job job(id: ID!): Job
jobMetrics(id: ID!, metrics: [String!], scopes: [MetricScope!]): [JobMetricWithName!]! jobMetrics(id: ID!, metrics: [String!], scopes: [MetricScope!], resolution: Int): [JobMetricWithName!]!
jobsFootprints(filter: [JobFilter!], metrics: [String!]!): Footprints jobsFootprints(filter: [JobFilter!], metrics: [String!]!): Footprints
jobs(filter: [JobFilter!], page: PageRequest, order: OrderByInput): JobResultList! jobs(filter: [JobFilter!], page: PageRequest, order: OrderByInput): JobResultList!
@ -2370,6 +2370,15 @@ func (ec *executionContext) field_Query_jobMetrics_args(ctx context.Context, raw
} }
} }
args["scopes"] = arg2 args["scopes"] = arg2
var arg3 *int
if tmp, ok := rawArgs["resolution"]; ok {
ctx := graphql.WithPathContext(ctx, graphql.NewPathWithField("resolution"))
arg3, err = ec.unmarshalOInt2ᚖint(ctx, tmp)
if err != nil {
return nil, err
}
}
args["resolution"] = arg3
return args, nil return args, nil
} }
@ -8499,7 +8508,7 @@ func (ec *executionContext) _Query_jobMetrics(ctx context.Context, field graphql
}() }()
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) { resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (interface{}, error) {
ctx = rctx // use context from middleware stack in children ctx = rctx // use context from middleware stack in children
return ec.resolvers.Query().JobMetrics(rctx, fc.Args["id"].(string), fc.Args["metrics"].([]string), fc.Args["scopes"].([]schema.MetricScope)) return ec.resolvers.Query().JobMetrics(rctx, fc.Args["id"].(string), fc.Args["metrics"].([]string), fc.Args["scopes"].([]schema.MetricScope), fc.Args["resolution"].(*int))
}) })
if err != nil { if err != nil {
ec.Error(ctx, err) ec.Error(ctx, err)

View File

@ -224,13 +224,19 @@ func (r *queryResolver) Job(ctx context.Context, id string) (*schema.Job, error)
} }
// JobMetrics is the resolver for the jobMetrics field. // JobMetrics is the resolver for the jobMetrics field.
func (r *queryResolver) JobMetrics(ctx context.Context, id string, metrics []string, scopes []schema.MetricScope) ([]*model.JobMetricWithName, error) { func (r *queryResolver) JobMetrics(ctx context.Context, id string, metrics []string, scopes []schema.MetricScope, resolution *int) ([]*model.JobMetricWithName, error) {
defaultRes := 600
if resolution == nil {
resolution = &defaultRes
}
job, err := r.Query().Job(ctx, id) job, err := r.Query().Job(ctx, id)
if err != nil { if err != nil {
log.Warn("Error while querying job for metrics") log.Warn("Error while querying job for metrics")
return nil, err return nil, err
} }
log.Debugf(">>>>> REQUEST DATA HERE FOR %v AT SCOPE %v WITH RESOLUTION OF %d", metrics, scopes, *resolution)
data, err := metricdata.LoadData(job, metrics, scopes, ctx) data, err := metricdata.LoadData(job, metrics, scopes, ctx)
if err != nil { if err != nil {
log.Warn("Error while loading job data") log.Warn("Error while loading job data")

View File

@ -122,14 +122,14 @@
variables: { dbid, selectedMetrics, selectedScopes }, variables: { dbid, selectedMetrics, selectedScopes },
}); });
function loadAllScopes() { // function loadAllScopes() {
selectedScopes = [...selectedScopes, "socket", "core"] // selectedScopes = [...selectedScopes, "socket", "core"]
jobMetrics = queryStore({ // jobMetrics = queryStore({
client: client, // client: client,
query: query, // query: query,
variables: { dbid, selectedMetrics, selectedScopes}, // variables: { dbid, selectedMetrics, selectedScopes},
}); // });
} // }
// Handle Job Query on Init -> is not executed anymore // Handle Job Query on Init -> is not executed anymore
getContext("on-init")(() => { getContext("on-init")(() => {
@ -229,11 +229,6 @@
$initq.data.job.subCluster, $initq.data.job.subCluster,
), ),
})); }));
const loadRes = ({ detail }) => {
console.log(">>> UPPER RES REQUEST", detail)
}
</script> </script>
<Row> <Row>
@ -362,8 +357,6 @@
{#if item.data} {#if item.data}
<Metric <Metric
bind:this={plots[item.metric]} bind:this={plots[item.metric]}
on:load-all={loadAllScopes}
on:new-res={loadRes}
job={$initq.data.job} job={$initq.data.job}
metricName={item.metric} metricName={item.metric}
metricUnit={$initq.data.globalMetrics.find((gm) => gm.name == item.metric)?.unit} metricUnit={$initq.data.globalMetrics.find((gm) => gm.name == item.metric)?.unit}

View File

@ -13,7 +13,12 @@
--> -->
<script> <script>
import { createEventDispatcher } from "svelte"; import {
queryStore,
gql,
getContextClient
} from "@urql/svelte";
// import { createEventDispatcher } from "svelte";
import { import {
InputGroup, InputGroup,
InputGroupText, InputGroupText,
@ -32,24 +37,79 @@
export let rawData; export let rawData;
export let isShared = false; export let isShared = false;
const dispatch = createEventDispatcher();
const unit = (metricUnit?.prefix ? metricUnit.prefix : "") + (metricUnit?.base ? metricUnit.base : "")
let selectedHost = null, let selectedHost = null,
plot, plot,
fetching = false,
error = null; error = null;
let selectedScope = minScope(scopes); let selectedScope = minScope(scopes);
let selectedResolution = 60 let selectedResolution = 600
$: dispatch("new-res", selectedResolution)
let statsPattern = /(.*)-stat$/ let statsPattern = /(.*)-stat$/
let statsSeries = rawData.map((data) => data?.statisticsSeries ? data.statisticsSeries : null) let statsSeries = rawData.map((data) => data?.statisticsSeries ? data.statisticsSeries : null)
let selectedScopeIndex let selectedScopeIndex
const resolutions = [60, 240, 600] // const dispatch = createEventDispatcher();
const unit = (metricUnit?.prefix ? metricUnit.prefix : "") + (metricUnit?.base ? metricUnit.base : "")
const resolutions = [600, 240, 60]
const client = getContextClient();
const subQuery = gql`
query ($dbid: ID!, $selectedMetrics: [String!]!, $selectedScopes: [MetricScope!]!, $selectedResolution: Int) {
singleUpdate: jobMetrics(id: $dbid, metrics: $selectedMetrics, scopes: $selectedScopes, resolution: $selectedResolution) {
name
scope
metric {
unit {
prefix
base
}
timestep
statisticsSeries {
min
median
max
}
series {
hostname
id
data
statistics {
min
avg
max
}
}
}
}
}
`;
let metricData;
let selectedScopes = [...scopes]
const dbid = job.id;
const selectedMetrics = [metricName]
function loadUpdate() {
// useQuery('repoData', () =>
// fetch('https://api.github.com/repos/SvelteStack/svelte-query').then(res =>
// res.json()
// )
metricData = queryStore({
client: client,
query: subQuery,
variables: { dbid, selectedMetrics, selectedScopes, selectedResolution },
});
console.log('S> OLD DATA:', rawData)
// rawData = {...$metricData?.data?.singleUpdate}
};
$: if (selectedScope == "load-all") {
scopes = [...scopes, "socket", "core"]
selectedScope = nativeScope
selectedScopes = [...scopes]
loadUpdate()
};
$: availableScopes = scopes;
$: patternMatches = statsPattern.exec(selectedScope) $: patternMatches = statsPattern.exec(selectedScope)
$: if (!patternMatches) { $: if (!patternMatches) {
selectedScopeIndex = scopes.findIndex((s) => s == selectedScope); selectedScopeIndex = scopes.findIndex((s) => s == selectedScope);
@ -61,7 +121,10 @@
(series) => selectedHost == null || series.hostname == selectedHost, (series) => selectedHost == null || series.hostname == selectedHost,
); );
$: if (selectedScope == "load-all") dispatch("load-all"); $: if ($metricData && !$metricData.fetching) console.log('S> NEW DATA:', rawData)
// $: console.log('Pattern', patternMatches)
$: console.log('SelectedScope', selectedScope)
$: console.log('ScopeIndex', selectedScopeIndex)
</script> </script>
<InputGroup> <InputGroup>
@ -69,13 +132,13 @@
{metricName} ({unit}) {metricName} ({unit})
</InputGroupText> </InputGroupText>
<select class="form-select" bind:value={selectedScope}> <select class="form-select" bind:value={selectedScope}>
{#each availableScopes as scope, index} {#each scopes as scope, index}
<option value={scope}>{scope}</option> <option value={scope}>{scope}</option>
{#if statsSeries[index]} {#if statsSeries[index]}
<option value={scope + '-stat'}>stats series ({scope})</option> <option value={scope + '-stat'}>stats series ({scope})</option>
{/if} {/if}
{/each} {/each}
{#if availableScopes.length == 1 && nativeScope != "node"} {#if scopes.length == 1 && nativeScope != "node"}
<option value={"load-all"}>Load all...</option> <option value={"load-all"}>Load all...</option>
{/if} {/if}
</select> </select>
@ -87,14 +150,19 @@
{/each} {/each}
</select> </select>
{/if} {/if}
<select class="form-select" bind:value={selectedResolution}> <select class="form-select" bind:value={selectedResolution} on:change={() => {
scopes = ["node"]
selectedScope = "node"
selectedScopes = [...scopes]
loadUpdate
}}>
{#each resolutions as res} {#each resolutions as res}
<option value={res}>Timestep: {res}</option> <option value={res}>Timestep: {res}</option>
{/each} {/each}
</select> </select>
</InputGroup> </InputGroup>
{#key series} {#key series}
{#if fetching == true} {#if $metricData?.fetching == true}
<Spinner /> <Spinner />
{:else if error != null} {:else if error != null}
<Card body color="danger">{error.message}</Card> <Card body color="danger">{error.message}</Card>