Continue fixing errors

This commit is contained in:
Jan Eitzinger 2023-05-04 09:19:43 +02:00
parent f235b5e911
commit 518f4664a1
4 changed files with 224 additions and 199 deletions

View File

@ -6,7 +6,7 @@ import terser from '@rollup/plugin-terser';
import css from 'rollup-plugin-css-only';
// const production = !process.env.ROLLUP_WATCH;
const production = true
const production = false
const plugins = [
svelte({

View File

@ -9,6 +9,7 @@
import Filters from "./filters/Filters.svelte";
import { queryStore, gql, getContextClient } from "@urql/svelte";
import { scramble, scrambleNames } from "./joblist/JobInfo.svelte";
import { UniqueInputFieldNamesRule } from "graphql";
const {} = init();
@ -20,7 +21,9 @@
"Invalid list type provided!"
);
const stats = queryStore({
let filters;
$: stats = queryStore({
client: getContextClient(),
query: gql`
query($filter: [JobFilter!]!) {
@ -32,10 +35,10 @@
totalCoreHours
}
}`,
variables: { filter, type },
variables: { filters },
pause: true
});
let filters;
let nameFilter = "";
let sorting = { field: "totalJobs", direction: "down" };
@ -88,9 +91,8 @@
startTimeQuickSelect={true}
menuText="Only {type.toLowerCase()}s with jobs that match the filters will show up"
on:update={({ detail }) => {
$stats.variables = { filter: detail.filters };
$stats.context.pause = false;
$stats.reexecute();
filters = detail.filters;
stats.resume();
}}
/>
</Col>

View File

@ -81,8 +81,6 @@
$jobs.reexecute({ requestPolicy: 'network-only' })
}
query(jobs)
let tableWidth = null
let jobInfoColumnWidth = 250
$: plotWidth = Math.floor((tableWidth - jobInfoColumnWidth) / metrics.length - 10)

View File

@ -1,7 +1,11 @@
import { expiringCacheExchange } from './cache-exchange.js'
import { Client, setContextClient, fetchExchange } from '@urql/svelte';
import { setContext, getContext, hasContext, onDestroy, tick } from 'svelte'
import { readable } from 'svelte/store'
import { expiringCacheExchange } from "./cache-exchange.js";
import {
Client,
setContextClient,
fetchExchange,
} from "@urql/svelte";
import { setContext, getContext, hasContext, onDestroy, tick } from "svelte";
import { readable } from "svelte/store";
/*
* Call this function only at component initialization time!
@ -13,28 +17,29 @@ import { readable } from 'svelte/store'
* - Adds 'clusters' to the context (object with cluster names as keys)
* - Adds 'metrics' to the context, a function that takes a cluster and metric name and returns the MetricConfig (or undefined)
*/
export function init(extraInitQuery = '') {
const jwt = hasContext('jwt')
? getContext('jwt')
: getContext('cc-config')['jwt']
export function init(extraInitQuery = "") {
const jwt = hasContext("jwt")
? getContext("jwt")
: getContext("cc-config")["jwt"];
const client = new Client({
url: `${window.location.origin}/query`,
fetchOptions: jwt != null
? { headers: { 'Authorization': `Bearer ${jwt}` } } : {},
exchanges: [
dedupExchange,
expiringCacheExchange({
ttl: 5 * 60 * 1000,
maxSize: 150,
}),
fetchExchange
]
})
const client = new Client({
url: `${window.location.origin}/query`,
fetchOptions:
jwt != null ? { headers: { Authorization: `Bearer ${jwt}` } } : {},
exchanges: [
expiringCacheExchange({
ttl: 5 * 60 * 1000,
maxSize: 150,
}),
fetchExchange,
],
});
setContextClient(client)
setContextClient(client);
const query = client.query(`query {
const query = client
.query(
`query {
clusters {
name,
metricConfig {
@ -62,227 +67,247 @@ export function init(extraInitQuery = '') {
}
tags { id, name, type }
${extraInitQuery}
}`).toPromise()
}`
)
.toPromise();
let state = { fetching: true, error: null, data: null }
let subscribers = []
const subscribe = (callback) => {
callback(state)
subscribers.push(callback)
return () => {
subscribers = subscribers.filter(cb => cb != callback)
}
let state = { fetching: true, error: null, data: null };
let subscribers = [];
const subscribe = (callback) => {
callback(state);
subscribers.push(callback);
return () => {
subscribers = subscribers.filter((cb) => cb != callback);
};
};
const tags = [], clusters = []
setContext('tags', tags)
setContext('clusters', clusters)
setContext('metrics', (cluster, metric) => {
if (typeof cluster !== 'object')
cluster = clusters.find(c => c.name == cluster)
const tags = [],
clusters = [];
setContext("tags", tags);
setContext("clusters", clusters);
setContext("metrics", (cluster, metric) => {
if (typeof cluster !== "object")
cluster = clusters.find((c) => c.name == cluster);
return cluster.metricConfig.find(m => m.name == metric)
})
setContext('on-init', callback => state.fetching
? subscribers.push(callback)
: callback(state))
setContext('initialized', readable(false, (set) =>
subscribers.push(() => set(true))))
return cluster.metricConfig.find((m) => m.name == metric);
});
setContext("on-init", (callback) =>
state.fetching ? subscribers.push(callback) : callback(state)
);
setContext(
"initialized",
readable(false, (set) => subscribers.push(() => set(true)))
);
query.then(({ error, data }) => {
state.fetching = false
if (error != null) {
console.error(error)
state.error = error
tick().then(() => subscribers.forEach(cb => cb(state)))
return
}
for (let tag of data.tags)
tags.push(tag)
for (let cluster of data.clusters)
clusters.push(cluster)
state.data = data
tick().then(() => subscribers.forEach(cb => cb(state)))
})
return {
query: { subscribe },
tags,
clusters,
query.then(({ error, data }) => {
state.fetching = false;
if (error != null) {
console.error(error);
state.error = error;
tick().then(() => subscribers.forEach((cb) => cb(state)));
return;
}
for (let tag of data.tags) tags.push(tag);
for (let cluster of data.clusters) clusters.push(cluster);
state.data = data;
tick().then(() => subscribers.forEach((cb) => cb(state)));
});
return {
query: { subscribe },
tags,
clusters,
};
}
export function formatNumber(x) {
let suffix = ''
if (x >= 1000000000) {
x /= 1000000
suffix = 'G'
} else if (x >= 1000000) {
x /= 1000000
suffix = 'M'
} else if (x >= 1000) {
x /= 1000
suffix = 'k'
}
let suffix = "";
if (x >= 1000000000) {
x /= 1000000;
suffix = "G";
} else if (x >= 1000000) {
x /= 1000000;
suffix = "M";
} else if (x >= 1000) {
x /= 1000;
suffix = "k";
}
return `${(Math.round(x * 100) / 100)} ${suffix}`
return `${Math.round(x * 100) / 100} ${suffix}`;
}
// Use https://developer.mozilla.org/en-US/docs/Web/API/structuredClone instead?
export function deepCopy(x) {
return JSON.parse(JSON.stringify(x))
return JSON.parse(JSON.stringify(x));
}
function fuzzyMatch(term, string) {
return string.toLowerCase().includes(term)
return string.toLowerCase().includes(term);
}
export function fuzzySearchTags(term, tags) {
if (!tags)
return []
if (!tags) return [];
let results = []
let termparts = term.split(':').map(s => s.trim()).filter(s => s.length > 0)
let results = [];
let termparts = term
.split(":")
.map((s) => s.trim())
.filter((s) => s.length > 0);
if (termparts.length == 0) {
results = tags.slice()
} else if (termparts.length == 1) {
for (let tag of tags)
if (fuzzyMatch(termparts[0], tag.type)
|| fuzzyMatch(termparts[0], tag.name))
results.push(tag)
} else if (termparts.length == 2) {
for (let tag of tags)
if (fuzzyMatch(termparts[0], tag.type)
&& fuzzyMatch(termparts[1], tag.name))
results.push(tag)
}
if (termparts.length == 0) {
results = tags.slice();
} else if (termparts.length == 1) {
for (let tag of tags)
if (
fuzzyMatch(termparts[0], tag.type) ||
fuzzyMatch(termparts[0], tag.name)
)
results.push(tag);
} else if (termparts.length == 2) {
for (let tag of tags)
if (
fuzzyMatch(termparts[0], tag.type) &&
fuzzyMatch(termparts[1], tag.name)
)
results.push(tag);
}
return results.sort((a, b) => {
if (a.type < b.type) return -1
if (a.type > b.type) return 1
if (a.name < b.name) return -1
if (a.name > b.name) return 1
return 0
})
return results.sort((a, b) => {
if (a.type < b.type) return -1;
if (a.type > b.type) return 1;
if (a.name < b.name) return -1;
if (a.name > b.name) return 1;
return 0;
});
}
export function groupByScope(jobMetrics) {
let metrics = new Map()
for (let metric of jobMetrics) {
if (metrics.has(metric.name))
metrics.get(metric.name).push(metric)
else
metrics.set(metric.name, [metric])
}
let metrics = new Map();
for (let metric of jobMetrics) {
if (metrics.has(metric.name)) metrics.get(metric.name).push(metric);
else metrics.set(metric.name, [metric]);
}
return [...metrics.values()].sort((a, b) => a[0].name.localeCompare(b[0].name))
return [...metrics.values()].sort((a, b) =>
a[0].name.localeCompare(b[0].name)
);
}
const scopeGranularity = {
"node": 10,
"socket": 5,
"accelerator": 5,
"core": 2,
"hwthread": 1
node: 10,
socket: 5,
accelerator: 5,
core: 2,
hwthread: 1,
};
export function maxScope(scopes) {
console.assert(scopes.length > 0 && scopes.every(x => scopeGranularity[x] != null))
let sm = scopes[0], gran = scopeGranularity[scopes[0]]
for (let scope of scopes) {
let otherGran = scopeGranularity[scope]
if (otherGran > gran) {
sm = scope
gran = otherGran
}
console.assert(
scopes.length > 0 && scopes.every((x) => scopeGranularity[x] != null)
);
let sm = scopes[0],
gran = scopeGranularity[scopes[0]];
for (let scope of scopes) {
let otherGran = scopeGranularity[scope];
if (otherGran > gran) {
sm = scope;
gran = otherGran;
}
return sm
}
return sm;
}
export function minScope(scopes) {
console.assert(scopes.length > 0 && scopes.every(x => scopeGranularity[x] != null))
let sm = scopes[0], gran = scopeGranularity[scopes[0]]
for (let scope of scopes) {
let otherGran = scopeGranularity[scope]
if (otherGran < gran) {
sm = scope
gran = otherGran
}
console.assert(
scopes.length > 0 && scopes.every((x) => scopeGranularity[x] != null)
);
let sm = scopes[0],
gran = scopeGranularity[scopes[0]];
for (let scope of scopes) {
let otherGran = scopeGranularity[scope];
if (otherGran < gran) {
sm = scope;
gran = otherGran;
}
return sm
}
return sm;
}
export async function fetchMetrics(job, metrics, scopes) {
if (job.monitoringStatus == 0)
return null
if (job.monitoringStatus == 0) return null;
let query = []
if (metrics != null) {
for (let metric of metrics) {
query.push(`metric=${metric}`)
}
let query = [];
if (metrics != null) {
for (let metric of metrics) {
query.push(`metric=${metric}`);
}
if (scopes != null) {
for (let scope of scopes) {
query.push(`scope=${scope}`)
}
}
if (scopes != null) {
for (let scope of scopes) {
query.push(`scope=${scope}`);
}
}
try {
let res = await fetch(
`/api/jobs/metrics/${job.id}${query.length > 0 ? "?" : ""}${query.join(
"&"
)}`
);
if (res.status != 200) {
return { error: { status: res.status, message: await res.text() } };
}
try {
let res = await fetch(`/api/jobs/metrics/${job.id}${(query.length > 0) ? '?' : ''}${query.join('&')}`)
if (res.status != 200) {
return { error: { status: res.status, message: await res.text() } }
}
return await res.json()
} catch (e) {
return { error: e }
}
return await res.json();
} catch (e) {
return { error: e };
}
}
export function fetchMetricsStore() {
let set = null
let prev = { fetching: true, error: null, data: null }
return [
readable(prev, (_set) => { set = _set }),
(job, metrics, scopes) => fetchMetrics(job, metrics, scopes).then(res => {
let next = { fetching: false, error: res.error, data: res.data }
if (prev.data && next.data)
next.data.jobMetrics.push(...prev.data.jobMetrics)
let set = null;
let prev = { fetching: true, error: null, data: null };
return [
readable(prev, (_set) => {
set = _set;
}),
(job, metrics, scopes) =>
fetchMetrics(job, metrics, scopes).then((res) => {
let next = { fetching: false, error: res.error, data: res.data };
if (prev.data && next.data)
next.data.jobMetrics.push(...prev.data.jobMetrics);
prev = next
set(next)
})
]
prev = next;
set(next);
}),
];
}
export function stickyHeader(datatableHeaderSelector, updatePading) {
const header = document.querySelector('header > nav.navbar')
if (!header)
return
const header = document.querySelector("header > nav.navbar");
if (!header) return;
let ticking = false, datatableHeader = null
const onscroll = event => {
if (ticking)
return
let ticking = false,
datatableHeader = null;
const onscroll = (event) => {
if (ticking) return;
ticking = true
window.requestAnimationFrame(() => {
ticking = false
if (!datatableHeader)
datatableHeader = document.querySelector(datatableHeaderSelector)
ticking = true;
window.requestAnimationFrame(() => {
ticking = false;
if (!datatableHeader)
datatableHeader = document.querySelector(datatableHeaderSelector);
const top = datatableHeader.getBoundingClientRect().top
updatePading(top < header.clientHeight
? (header.clientHeight - top) + 10
: 10)
})
}
const top = datatableHeader.getBoundingClientRect().top;
updatePading(
top < header.clientHeight ? header.clientHeight - top + 10 : 10
);
});
};
document.addEventListener('scroll', onscroll)
onDestroy(() => document.removeEventListener('scroll', onscroll))
document.addEventListener("scroll", onscroll);
onDestroy(() => document.removeEventListener("scroll", onscroll));
}