mirror of
https://github.com/ClusterCockpit/cc-backend
synced 2025-05-13 20:51:43 +02:00
add resource compare graph, add cursor sync, handle jobIds fitler
This commit is contained in:
parent
d3d752f90c
commit
aed2bd48fc
@ -174,6 +174,9 @@ type JobStats {
|
||||
jobId: Int!
|
||||
startTime: Int!
|
||||
duration: Int!
|
||||
numNodes: Int!
|
||||
numHWThreads: Int
|
||||
numAccelerators: Int
|
||||
stats: [NamedStats!]!
|
||||
}
|
||||
|
||||
|
@ -171,10 +171,13 @@ type ComplexityRoot struct {
|
||||
}
|
||||
|
||||
JobStats struct {
|
||||
Duration func(childComplexity int) int
|
||||
JobID func(childComplexity int) int
|
||||
StartTime func(childComplexity int) int
|
||||
Stats func(childComplexity int) int
|
||||
Duration func(childComplexity int) int
|
||||
JobID func(childComplexity int) int
|
||||
NumAccelerators func(childComplexity int) int
|
||||
NumHWThreads func(childComplexity int) int
|
||||
NumNodes func(childComplexity int) int
|
||||
StartTime func(childComplexity int) int
|
||||
Stats func(childComplexity int) int
|
||||
}
|
||||
|
||||
JobsStatistics struct {
|
||||
@ -956,6 +959,27 @@ func (e *executableSchema) Complexity(typeName, field string, childComplexity in
|
||||
|
||||
return e.complexity.JobStats.JobID(childComplexity), true
|
||||
|
||||
case "JobStats.numAccelerators":
|
||||
if e.complexity.JobStats.NumAccelerators == nil {
|
||||
break
|
||||
}
|
||||
|
||||
return e.complexity.JobStats.NumAccelerators(childComplexity), true
|
||||
|
||||
case "JobStats.numHWThreads":
|
||||
if e.complexity.JobStats.NumHWThreads == nil {
|
||||
break
|
||||
}
|
||||
|
||||
return e.complexity.JobStats.NumHWThreads(childComplexity), true
|
||||
|
||||
case "JobStats.numNodes":
|
||||
if e.complexity.JobStats.NumNodes == nil {
|
||||
break
|
||||
}
|
||||
|
||||
return e.complexity.JobStats.NumNodes(childComplexity), true
|
||||
|
||||
case "JobStats.startTime":
|
||||
if e.complexity.JobStats.StartTime == nil {
|
||||
break
|
||||
@ -2299,6 +2323,9 @@ type JobStats {
|
||||
jobId: Int!
|
||||
startTime: Int!
|
||||
duration: Int!
|
||||
numNodes: Int!
|
||||
numHWThreads: Int
|
||||
numAccelerators: Int
|
||||
stats: [NamedStats!]!
|
||||
}
|
||||
|
||||
@ -7506,6 +7533,132 @@ func (ec *executionContext) fieldContext_JobStats_duration(_ context.Context, fi
|
||||
return fc, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) _JobStats_numNodes(ctx context.Context, field graphql.CollectedField, obj *model.JobStats) (ret graphql.Marshaler) {
|
||||
fc, err := ec.fieldContext_JobStats_numNodes(ctx, field)
|
||||
if err != nil {
|
||||
return graphql.Null
|
||||
}
|
||||
ctx = graphql.WithFieldContext(ctx, fc)
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
ec.Error(ctx, ec.Recover(ctx, r))
|
||||
ret = graphql.Null
|
||||
}
|
||||
}()
|
||||
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) {
|
||||
ctx = rctx // use context from middleware stack in children
|
||||
return obj.NumNodes, nil
|
||||
})
|
||||
if err != nil {
|
||||
ec.Error(ctx, err)
|
||||
return graphql.Null
|
||||
}
|
||||
if resTmp == nil {
|
||||
if !graphql.HasFieldError(ctx, fc) {
|
||||
ec.Errorf(ctx, "must not be null")
|
||||
}
|
||||
return graphql.Null
|
||||
}
|
||||
res := resTmp.(int)
|
||||
fc.Result = res
|
||||
return ec.marshalNInt2int(ctx, field.Selections, res)
|
||||
}
|
||||
|
||||
func (ec *executionContext) fieldContext_JobStats_numNodes(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
||||
fc = &graphql.FieldContext{
|
||||
Object: "JobStats",
|
||||
Field: field,
|
||||
IsMethod: false,
|
||||
IsResolver: false,
|
||||
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
||||
return nil, errors.New("field of type Int does not have child fields")
|
||||
},
|
||||
}
|
||||
return fc, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) _JobStats_numHWThreads(ctx context.Context, field graphql.CollectedField, obj *model.JobStats) (ret graphql.Marshaler) {
|
||||
fc, err := ec.fieldContext_JobStats_numHWThreads(ctx, field)
|
||||
if err != nil {
|
||||
return graphql.Null
|
||||
}
|
||||
ctx = graphql.WithFieldContext(ctx, fc)
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
ec.Error(ctx, ec.Recover(ctx, r))
|
||||
ret = graphql.Null
|
||||
}
|
||||
}()
|
||||
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) {
|
||||
ctx = rctx // use context from middleware stack in children
|
||||
return obj.NumHWThreads, nil
|
||||
})
|
||||
if err != nil {
|
||||
ec.Error(ctx, err)
|
||||
return graphql.Null
|
||||
}
|
||||
if resTmp == nil {
|
||||
return graphql.Null
|
||||
}
|
||||
res := resTmp.(*int)
|
||||
fc.Result = res
|
||||
return ec.marshalOInt2ᚖint(ctx, field.Selections, res)
|
||||
}
|
||||
|
||||
func (ec *executionContext) fieldContext_JobStats_numHWThreads(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
||||
fc = &graphql.FieldContext{
|
||||
Object: "JobStats",
|
||||
Field: field,
|
||||
IsMethod: false,
|
||||
IsResolver: false,
|
||||
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
||||
return nil, errors.New("field of type Int does not have child fields")
|
||||
},
|
||||
}
|
||||
return fc, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) _JobStats_numAccelerators(ctx context.Context, field graphql.CollectedField, obj *model.JobStats) (ret graphql.Marshaler) {
|
||||
fc, err := ec.fieldContext_JobStats_numAccelerators(ctx, field)
|
||||
if err != nil {
|
||||
return graphql.Null
|
||||
}
|
||||
ctx = graphql.WithFieldContext(ctx, fc)
|
||||
defer func() {
|
||||
if r := recover(); r != nil {
|
||||
ec.Error(ctx, ec.Recover(ctx, r))
|
||||
ret = graphql.Null
|
||||
}
|
||||
}()
|
||||
resTmp, err := ec.ResolverMiddleware(ctx, func(rctx context.Context) (any, error) {
|
||||
ctx = rctx // use context from middleware stack in children
|
||||
return obj.NumAccelerators, nil
|
||||
})
|
||||
if err != nil {
|
||||
ec.Error(ctx, err)
|
||||
return graphql.Null
|
||||
}
|
||||
if resTmp == nil {
|
||||
return graphql.Null
|
||||
}
|
||||
res := resTmp.(*int)
|
||||
fc.Result = res
|
||||
return ec.marshalOInt2ᚖint(ctx, field.Selections, res)
|
||||
}
|
||||
|
||||
func (ec *executionContext) fieldContext_JobStats_numAccelerators(_ context.Context, field graphql.CollectedField) (fc *graphql.FieldContext, err error) {
|
||||
fc = &graphql.FieldContext{
|
||||
Object: "JobStats",
|
||||
Field: field,
|
||||
IsMethod: false,
|
||||
IsResolver: false,
|
||||
Child: func(ctx context.Context, field graphql.CollectedField) (*graphql.FieldContext, error) {
|
||||
return nil, errors.New("field of type Int does not have child fields")
|
||||
},
|
||||
}
|
||||
return fc, nil
|
||||
}
|
||||
|
||||
func (ec *executionContext) _JobStats_stats(ctx context.Context, field graphql.CollectedField, obj *model.JobStats) (ret graphql.Marshaler) {
|
||||
fc, err := ec.fieldContext_JobStats_stats(ctx, field)
|
||||
if err != nil {
|
||||
@ -11307,6 +11460,12 @@ func (ec *executionContext) fieldContext_Query_jobsMetricStats(ctx context.Conte
|
||||
return ec.fieldContext_JobStats_startTime(ctx, field)
|
||||
case "duration":
|
||||
return ec.fieldContext_JobStats_duration(ctx, field)
|
||||
case "numNodes":
|
||||
return ec.fieldContext_JobStats_numNodes(ctx, field)
|
||||
case "numHWThreads":
|
||||
return ec.fieldContext_JobStats_numHWThreads(ctx, field)
|
||||
case "numAccelerators":
|
||||
return ec.fieldContext_JobStats_numAccelerators(ctx, field)
|
||||
case "stats":
|
||||
return ec.fieldContext_JobStats_stats(ctx, field)
|
||||
}
|
||||
@ -17647,6 +17806,15 @@ func (ec *executionContext) _JobStats(ctx context.Context, sel ast.SelectionSet,
|
||||
if out.Values[i] == graphql.Null {
|
||||
out.Invalids++
|
||||
}
|
||||
case "numNodes":
|
||||
out.Values[i] = ec._JobStats_numNodes(ctx, field, obj)
|
||||
if out.Values[i] == graphql.Null {
|
||||
out.Invalids++
|
||||
}
|
||||
case "numHWThreads":
|
||||
out.Values[i] = ec._JobStats_numHWThreads(ctx, field, obj)
|
||||
case "numAccelerators":
|
||||
out.Values[i] = ec._JobStats_numAccelerators(ctx, field, obj)
|
||||
case "stats":
|
||||
out.Values[i] = ec._JobStats_stats(ctx, field, obj)
|
||||
if out.Values[i] == graphql.Null {
|
||||
|
@ -97,10 +97,13 @@ type JobResultList struct {
|
||||
}
|
||||
|
||||
type JobStats struct {
|
||||
JobID int `json:"jobId"`
|
||||
StartTime int `json:"startTime"`
|
||||
Duration int `json:"duration"`
|
||||
Stats []*NamedStats `json:"stats"`
|
||||
JobID int `json:"jobId"`
|
||||
StartTime int `json:"startTime"`
|
||||
Duration int `json:"duration"`
|
||||
NumNodes int `json:"numNodes"`
|
||||
NumHWThreads *int `json:"numHWThreads,omitempty"`
|
||||
NumAccelerators *int `json:"numAccelerators,omitempty"`
|
||||
Stats []*NamedStats `json:"stats"`
|
||||
}
|
||||
|
||||
type JobsStatistics struct {
|
||||
|
@ -615,11 +615,16 @@ func (r *queryResolver) JobsMetricStats(ctx context.Context, filter []*model.Job
|
||||
})
|
||||
}
|
||||
|
||||
numThreadsInt := int(job.NumHWThreads)
|
||||
numAccsInt := int(job.NumAcc)
|
||||
res = append(res, &model.JobStats{
|
||||
JobID: int(job.JobID),
|
||||
StartTime: int(job.StartTime.Unix()),
|
||||
Duration: int(job.Duration),
|
||||
Stats: sres,
|
||||
JobID: int(job.JobID),
|
||||
StartTime: int(job.StartTime.Unix()),
|
||||
Duration: int(job.Duration),
|
||||
NumNodes: int(job.NumNodes),
|
||||
NumHWThreads: &numThreadsInt,
|
||||
NumAccelerators: &numAccsInt,
|
||||
Stats: sres,
|
||||
})
|
||||
}
|
||||
return res, err
|
||||
|
@ -149,6 +149,14 @@ func BuildWhereClause(filter *model.JobFilter, query sq.SelectBuilder) sq.Select
|
||||
if filter.JobID != nil {
|
||||
query = buildStringCondition("job.job_id", filter.JobID, query)
|
||||
}
|
||||
if filter.JobIds != nil {
|
||||
jobIds := make([]string, len(filter.JobIds))
|
||||
for i, val := range filter.JobIds {
|
||||
jobIds[i] = string(val)
|
||||
}
|
||||
|
||||
query = query.Where(sq.Eq{"job.job_id": jobIds})
|
||||
}
|
||||
if filter.ArrayJobID != nil {
|
||||
query = query.Where("job.array_job_id = ?", *filter.ArrayJobID)
|
||||
}
|
||||
|
@ -15,6 +15,7 @@
|
||||
|
||||
<script>
|
||||
import { getContext } from "svelte";
|
||||
import uPlot from "uplot";
|
||||
import {
|
||||
queryStore,
|
||||
gql,
|
||||
@ -40,11 +41,12 @@
|
||||
let filter = [...filterBuffer];
|
||||
let comparePlotData = {};
|
||||
let jobIds = [];
|
||||
const sorting = { field: "startTime", type: "col", order: "DESC" };
|
||||
|
||||
/*uPlot*/
|
||||
let plotSync = uPlot.sync("compareJobsView");
|
||||
|
||||
/* GQL */
|
||||
|
||||
const client = getContextClient();
|
||||
const client = getContextClient();
|
||||
// Pull All Series For Metrics Statistics Only On Node Scope
|
||||
const compareQuery = gql`
|
||||
query ($filter: [JobFilter!]!, $metrics: [String!]!) {
|
||||
@ -52,6 +54,9 @@
|
||||
jobId
|
||||
startTime
|
||||
duration
|
||||
numNodes
|
||||
numHWThreads
|
||||
numAccelerators
|
||||
stats {
|
||||
name
|
||||
data {
|
||||
@ -111,11 +116,13 @@
|
||||
|
||||
function jobs2uplot(jobs, metrics) {
|
||||
// Prep
|
||||
// Resources Init
|
||||
comparePlotData['resources'] = {unit:'', data: [[],[],[],[],[],[]]} // data: [X, XST, XRT, YNODES, YTHREADS, YACCS]
|
||||
// Metric Init
|
||||
for (let m of metrics) {
|
||||
// Get Unit
|
||||
const rawUnit = globalMetrics.find((gm) => gm.name == m)?.unit
|
||||
const metricUnit = (rawUnit?.prefix ? rawUnit.prefix : "") + (rawUnit?.base ? rawUnit.base : "")
|
||||
// Init
|
||||
comparePlotData[m] = {unit: metricUnit, data: [[],[],[],[],[],[]]} // data: [X, XST, XRT, YMIN, YAVG, YMAX]
|
||||
}
|
||||
|
||||
@ -123,7 +130,16 @@
|
||||
if (jobs) {
|
||||
let plotIndex = 0
|
||||
jobs.forEach((j) => {
|
||||
// Collect JobIDs for X-Ticks
|
||||
jobIds.push(j.jobId)
|
||||
// Resources
|
||||
comparePlotData['resources'].data[0].push(plotIndex)
|
||||
comparePlotData['resources'].data[1].push(j.startTime)
|
||||
comparePlotData['resources'].data[2].push(j.duration)
|
||||
comparePlotData['resources'].data[3].push(j.numNodes)
|
||||
comparePlotData['resources'].data[4].push(j?.numHWThreads?j.numHWThreads:0)
|
||||
comparePlotData['resources'].data[5].push(j?.numAccelerators?j.numAccelerators:0)
|
||||
// Metrics
|
||||
for (let s of j.stats) {
|
||||
comparePlotData[s.name].data[0].push(plotIndex)
|
||||
comparePlotData[s.name].data[1].push(j.startTime)
|
||||
@ -181,16 +197,34 @@
|
||||
</Col>
|
||||
</Row>
|
||||
{:else}
|
||||
<Row>
|
||||
<Col>
|
||||
<Comparogram
|
||||
title={'Compare Resources'}
|
||||
xlabel="JobIDs"
|
||||
xticks={jobIds}
|
||||
ylabel={'Resource Counts'}
|
||||
data={comparePlotData['resources'].data}
|
||||
{plotSync}
|
||||
forResources
|
||||
/>
|
||||
</Col>
|
||||
</Row>
|
||||
{#each metrics as m}
|
||||
<Comparogram
|
||||
title={'Compare '+ m}
|
||||
xlabel="JobIds"
|
||||
xticks={jobIds}
|
||||
ylabel={m}
|
||||
metric={m}
|
||||
yunit={comparePlotData[m].unit}
|
||||
data={comparePlotData[m].data}
|
||||
/>
|
||||
<Row>
|
||||
<Col>
|
||||
<Comparogram
|
||||
title={`Compare Metric '${m}'`}
|
||||
xlabel="JobIDs"
|
||||
xticks={jobIds}
|
||||
ylabel={m}
|
||||
metric={m}
|
||||
yunit={comparePlotData[m].unit}
|
||||
data={comparePlotData[m].data}
|
||||
{plotSync}
|
||||
/>
|
||||
</Col>
|
||||
</Row>
|
||||
{/each}
|
||||
<hr/><hr/>
|
||||
{#each $compareData.data.jobsMetricStats as job, jindex (job.jobId)}
|
||||
|
@ -14,24 +14,24 @@
|
||||
|
||||
<script>
|
||||
import uPlot from "uplot";
|
||||
import { roundTwoDigits, formatTime } from "../units.js";
|
||||
import { roundTwoDigits, formatTime, formatNumber } from "../units.js";
|
||||
import { getContext, onMount, onDestroy } from "svelte";
|
||||
import { Card } from "@sveltestrap/sveltestrap";
|
||||
|
||||
export let metric;
|
||||
export let metric = "";
|
||||
export let width = 0;
|
||||
export let height = 300;
|
||||
export let data;
|
||||
export let xlabel;
|
||||
export let xticks;
|
||||
export let ylabel;
|
||||
export let yunit;
|
||||
export let title;
|
||||
// export let cluster = "";
|
||||
// export let subCluster = "";
|
||||
export let data = null;
|
||||
export let xlabel = "";
|
||||
export let xticks = [];
|
||||
export let ylabel = "";
|
||||
export let yunit = "";
|
||||
export let title = "";
|
||||
export let forResources = false;
|
||||
export let plotSync;
|
||||
|
||||
// NOTE: Metric Thresholds non-required, Cluster Mixing Allowed
|
||||
|
||||
const metricConfig = null // DEBUG FILLER
|
||||
// const metricConfig = getContext("getMetricConfig")(cluster, subCluster, metric); // Args woher
|
||||
const clusterCockpitConfig = getContext("cc-config");
|
||||
const lineWidth = clusterCockpitConfig.plot_general_lineWidth / window.devicePixelRatio;
|
||||
const cbmode = clusterCockpitConfig?.plot_general_colorblindMode || false;
|
||||
@ -80,9 +80,6 @@
|
||||
overEl.addEventListener("mouseleave", () => {
|
||||
legendEl.style.display = "none";
|
||||
});
|
||||
|
||||
// let tooltip exit plot
|
||||
// overEl.style.overflow = "visible";
|
||||
}
|
||||
|
||||
function update(u) {
|
||||
@ -100,19 +97,6 @@
|
||||
};
|
||||
}
|
||||
|
||||
let maxY = null;
|
||||
// TODO: Hilfreich!
|
||||
// if (metricConfig !== null) {
|
||||
// maxY = data[3].reduce( // Data[3] is JobMaxs
|
||||
// (max, x) => Math.max(max, x),
|
||||
// metricConfig.normal,
|
||||
// ) || metricConfig.normal
|
||||
// if (maxY >= 10 * metricConfig.peak) {
|
||||
// // Hard y-range render limit if outliers in series data
|
||||
// maxY = 10 * metricConfig.peak;
|
||||
// }
|
||||
// }
|
||||
|
||||
const plotSeries = [
|
||||
{
|
||||
label: "JobID",
|
||||
@ -135,34 +119,62 @@
|
||||
return formatTime(ts);
|
||||
},
|
||||
},
|
||||
{
|
||||
label: "Min",
|
||||
scale: "y",
|
||||
width: lineWidth,
|
||||
stroke: cbmode ? "rgb(0,255,0)" : "red",
|
||||
value: (u, ts, sidx, didx) => {
|
||||
return `${roundTwoDigits(ts)} ${yunit}`;
|
||||
]
|
||||
|
||||
if (forResources) {
|
||||
const resSeries = [
|
||||
{
|
||||
label: "Nodes",
|
||||
scale: "y",
|
||||
width: lineWidth,
|
||||
stroke: "black",
|
||||
},
|
||||
},
|
||||
{
|
||||
label: "Avg",
|
||||
scale: "y",
|
||||
width: lineWidth,
|
||||
stroke: "black",
|
||||
value: (u, ts, sidx, didx) => {
|
||||
return `${roundTwoDigits(ts)} ${yunit}`;
|
||||
{
|
||||
label: "Threads",
|
||||
scale: "y",
|
||||
width: lineWidth,
|
||||
stroke: "rgb(0,0,255)",
|
||||
},
|
||||
},
|
||||
{
|
||||
label: "Max",
|
||||
scale: "y",
|
||||
width: lineWidth,
|
||||
stroke: cbmode ? "rgb(0,0,255)" : "green",
|
||||
value: (u, ts, sidx, didx) => {
|
||||
return `${roundTwoDigits(ts)} ${yunit}`;
|
||||
{
|
||||
label: "Accelerators",
|
||||
scale: "y",
|
||||
width: lineWidth,
|
||||
stroke: cbmode ? "rgb(0,255,0)" : "red",
|
||||
}
|
||||
];
|
||||
plotSeries.push(...resSeries)
|
||||
} else {
|
||||
const statsSeries = [
|
||||
{
|
||||
label: "Min",
|
||||
scale: "y",
|
||||
width: lineWidth,
|
||||
stroke: cbmode ? "rgb(0,255,0)" : "red",
|
||||
value: (u, ts, sidx, didx) => {
|
||||
return `${roundTwoDigits(ts)} ${yunit}`;
|
||||
},
|
||||
},
|
||||
}
|
||||
];
|
||||
{
|
||||
label: "Avg",
|
||||
scale: "y",
|
||||
width: lineWidth,
|
||||
stroke: "black",
|
||||
value: (u, ts, sidx, didx) => {
|
||||
return `${roundTwoDigits(ts)} ${yunit}`;
|
||||
},
|
||||
},
|
||||
{
|
||||
label: "Max",
|
||||
scale: "y",
|
||||
width: lineWidth,
|
||||
stroke: cbmode ? "rgb(0,0,255)" : "green",
|
||||
value: (u, ts, sidx, didx) => {
|
||||
return `${roundTwoDigits(ts)} ${yunit}`;
|
||||
},
|
||||
}
|
||||
];
|
||||
plotSeries.push(...statsSeries)
|
||||
};
|
||||
|
||||
const plotBands = [
|
||||
{ series: [5, 4], fill: cbmode ? "rgba(0,0,255,0.1)" : "rgba(0,255,0,0.1)" },
|
||||
@ -198,19 +210,20 @@
|
||||
scale: "y",
|
||||
grid: { show: true },
|
||||
labelFont: "sans-serif",
|
||||
label: ylabel + (yunit ? ` (${yunit})` : '')
|
||||
label: ylabel + (yunit ? ` (${yunit})` : ''),
|
||||
values: (u, vals) => vals.map((v) => formatNumber(v)),
|
||||
},
|
||||
],
|
||||
bands: plotBands,
|
||||
padding: [5, 10, 0, 0], // 5, 10, -20, 0
|
||||
bands: forResources ? [] : plotBands,
|
||||
padding: [5, 10, 0, 0],
|
||||
hooks: {
|
||||
draw: [
|
||||
(u) => {
|
||||
// Draw plot type label:
|
||||
let textl = "Metric Min/Avg/Max for Job Duration";
|
||||
let textl = forResources ? "Job Resources by Type" : "Metric Min/Avg/Max for Job Duration";
|
||||
let textr = "Earlier <- StartTime -> Later";
|
||||
u.ctx.save();
|
||||
u.ctx.textAlign = "start"; // 'end'
|
||||
u.ctx.textAlign = "start";
|
||||
u.ctx.fillStyle = "black";
|
||||
u.ctx.fillText(textl, u.bbox.left + 10, u.bbox.top + 10);
|
||||
u.ctx.textAlign = "end";
|
||||
@ -220,24 +233,8 @@
|
||||
u.bbox.left + u.bbox.width - 10,
|
||||
u.bbox.top + 10,
|
||||
);
|
||||
// u.ctx.fillText(text, u.bbox.left + u.bbox.width - 10, u.bbox.top + u.bbox.height - 10) // Recipe for bottom right
|
||||
|
||||
if (!metricConfig) {
|
||||
u.ctx.restore();
|
||||
return;
|
||||
}
|
||||
|
||||
// TODO: Braucht MetricConf
|
||||
let y = u.valToPos(metricConfig?.normal, "y", true);
|
||||
u.ctx.save();
|
||||
u.ctx.lineWidth = lineWidth;
|
||||
u.ctx.strokeStyle = "#000000"; // Black
|
||||
u.ctx.setLineDash([5, 5]);
|
||||
u.ctx.beginPath();
|
||||
u.ctx.moveTo(u.bbox.left, y);
|
||||
u.ctx.lineTo(u.bbox.left + u.bbox.width, y);
|
||||
u.ctx.stroke();
|
||||
u.ctx.restore();
|
||||
return;
|
||||
},
|
||||
]
|
||||
},
|
||||
@ -245,7 +242,7 @@
|
||||
x: { time: false },
|
||||
xst: { time: false },
|
||||
xrt: { time: false },
|
||||
y: maxY ? { min: 0, max: (maxY * 1.1) } : {auto: true}, // Add some space to upper render limit
|
||||
y: {auto: true, distr: forResources ? 3 : 1},
|
||||
},
|
||||
legend: {
|
||||
// Display legend
|
||||
@ -254,6 +251,10 @@
|
||||
},
|
||||
cursor: {
|
||||
drag: { x: true, y: true },
|
||||
sync: {
|
||||
key: plotSync.key,
|
||||
scales: ["x", null],
|
||||
}
|
||||
}
|
||||
};
|
||||
|
||||
@ -267,6 +268,7 @@
|
||||
opts.width = ren_width;
|
||||
opts.height = ren_height;
|
||||
uplot = new uPlot(opts, data, plotWrapper); // Data is uplot formatted [[X][Ymin][Yavg][Ymax]]
|
||||
plotSync.sub(uplot)
|
||||
} else {
|
||||
uplot.setSize({ width: ren_width, height: ren_height });
|
||||
}
|
||||
|
Loading…
x
Reference in New Issue
Block a user