Iterate setupscript, remove startscript, update migratetimestamp script

This commit is contained in:
Christoph Kluge 2022-06-23 18:05:08 +02:00
parent f7914b0291
commit 46c1d90aab
3 changed files with 42 additions and 147 deletions

View File

@ -15,7 +15,7 @@ use REST::Client;
my $localtime = localtime; my $localtime = localtime;
my $epochtime = $localtime->epoch; my $epochtime = $localtime->epoch;
my $archiveTarget = './cc-backend/var/job-archive'; my $archiveTarget = './cc-backend/var/job-archive';
my $archiveSrc = './data/job-archive'; my $archiveSrc = './data/job-archive-source';
my @ArchiveClusters; my @ArchiveClusters;
# Get clusters by job-archive/$subfolder # Get clusters by job-archive/$subfolder
@ -29,7 +29,7 @@ while ( readdir $dh ) {
# start for jobarchive # start for jobarchive
foreach my $cluster ( @ArchiveClusters ) { foreach my $cluster ( @ArchiveClusters ) {
print "Starting to update startTime in job-archive for $cluster\n"; print "Starting to update start- and stoptimes in job-archive for $cluster\n";
opendir my $dhLevel1, "$archiveSrc/$cluster" or die "can't open directory: $!"; opendir my $dhLevel1, "$archiveSrc/$cluster" or die "can't open directory: $!";
while ( readdir $dhLevel1 ) { while ( readdir $dhLevel1 ) {
@ -55,7 +55,7 @@ foreach my $cluster ( @ArchiveClusters ) {
} }
# check if subfolder contains file, else remove source and skip # check if subfolder contains file, else remove source and skip
if ( ! -e "$jobSource/meta.json") { if ( ! -e "$jobSource/meta.json") {
rmtree $jobOrigin; # rmtree $jobOrigin;
next; next;
} }
@ -92,13 +92,13 @@ foreach my $cluster ( @ArchiveClusters ) {
} }
} }
print "Done for job-archive\n"; print "Done for job-archive\n";
sleep(2); sleep(1);
## CHECKPOINTS ## CHECKPOINTS
chomp(my $checkpointStart=`date --date 'TZ="Europe/Berlin" 0:00 7 days ago' +%s`); chomp(my $checkpointStart=`date --date 'TZ="Europe/Berlin" 0:00 7 days ago' +%s`);
my $halfday = 43200; my $halfday = 43200;
my $checkpTarget = './data/cc-metric-store_new'; my $checkpTarget = './data/cc-metric-store/checkpoints';
my $checkpSource = './data/cc-metric-store'; my $checkpSource = './data/cc-metric-store-source/checkpoints';
my @CheckpClusters; my @CheckpClusters;
# Get clusters by cc-metric-store/$subfolder # Get clusters by cc-metric-store/$subfolder
@ -112,7 +112,7 @@ while ( readdir $dhc ) {
# start for checkpoints # start for checkpoints
foreach my $cluster ( @CheckpClusters ) { foreach my $cluster ( @CheckpClusters ) {
print "Starting to update startTime in checkpoint-files for $cluster\n"; print "Starting to update checkpoint filenames and data starttimes for $cluster\n";
opendir my $dhLevel1, "$checkpSource/$cluster" or die "can't open directory: $!"; opendir my $dhLevel1, "$checkpSource/$cluster" or die "can't open directory: $!";
while ( readdir $dhLevel1 ) { while ( readdir $dhLevel1 ) {
@ -169,85 +169,3 @@ foreach my $cluster ( @CheckpClusters ) {
} }
} }
print "Done for checkpoints\n"; print "Done for checkpoints\n";
sleep(2);
### INFLUXDB
my $newCheckpoints = './data/cc-metric-store_new';
my $verbose = 1;
my $restClient = REST::Client->new();
$restClient->setHost('http://localhost:8087');
$restClient->addHeader('Authorization', "Token 74008ea2a8dad5e6f856838a90c6392e");
$restClient->addHeader('Content-Type', 'text/plain; charset=utf-8');
$restClient->addHeader('Accept', 'application/json');
$restClient->getUseragent()->ssl_opts(SSL_verify_mode => 0); # Temporary: Disable Cert Check
$restClient->getUseragent()->ssl_opts(verify_hostname => 0); # Temporary: Disable Cert Check
# Get clusters by folder: Reuse from above
# start to read checkpoints for influx
foreach my $cluster ( @CheckpClusters ) {
print "Starting to read checkpoint-files into influx for $cluster\n";
opendir my $dhLevel1, "$newCheckpoints/$cluster" or die "can't open directory: $!";
while ( readdir $dhLevel1 ) {
chomp; next if $_ eq '.' or $_ eq '..';
my $level1 = $_;
if ( -d "$newCheckpoints/$cluster/$level1" ) {
my $nodeSource = "$newCheckpoints/$cluster/$level1/";
my @files = read_dir($nodeSource);
my $length = @files;
if (!@files || $length != 14) { # needs 14 files == 7 days worth of data
next;
}
my @sortedFiles = sort { versioncmp($a,$b) } @files; # sort alphanumerically: _Really_ start with index == 0 == 1609459200.json
my $nodeMeasurement;
foreach my $file (@sortedFiles) {
# print "$file\n";
my $rawstr = read_file("$nodeSource/$file");
my $json = decode_json($rawstr);
my $fileMeasurement;
foreach my $metric (keys %{$json->{metrics}}) {
my $start = $json->{metrics}->{$metric}->{start};
my $timestep = $json->{metrics}->{$metric}->{frequency};
my $data = $json->{metrics}->{$metric}->{data};
my $length = @$data;
my $measurement;
while (my ($index, $value) = each(@$data)) {
if ($value) {
my $timestamp = $start + ($timestep * $index);
$measurement .= "$metric,cluster=$cluster,hostname=$level1,type=node value=".$value." $timestamp"."\n";
}
}
# Use v2 API for Influx2
if ($measurement) {
# print "Adding: #VALUES $length KEY $metric"."\n";
$fileMeasurement .= $measurement;
}
}
if ($fileMeasurement) {
$nodeMeasurement .= $fileMeasurement;
}
}
$restClient->POST("/api/v2/write?org=ClusterCockpit&bucket=ClusterCockpit&precision=s", "$nodeMeasurement");
my $responseCode = $restClient->responseCode();
if ( $responseCode eq '204') {
if ( $verbose ) {
print "INFLUX API WRITE: CLUSTER $cluster HOST $level1"."\n";
};
} else {
if ( $responseCode ne '422' ) { # Exclude High Frequency Error 422 - Temporary!
my $response = $restClient->responseContent();
print "INFLUX API WRITE ERROR CODE ".$responseCode.": ".$response."\n";
};
};
}
}
}
print "Done for influx\n";

View File

@ -18,73 +18,48 @@ else
fi fi
# Download unedited job-archibe to /data # Download unedited job-archive to ./data/job-archive-source
if [ ! -d data/job-archive ]; then if [ ! -d data/job-archive-source ]; then
cd data cd data
wget https://hpc-mover.rrze.uni-erlangen.de/HPC-Data/0x7b58aefb/eig7ahyo6fo2bais0ephuf2aitohv1ai/job-archive.tar.xz wget https://hpc-mover.rrze.uni-erlangen.de/HPC-Data/0x7b58aefb/eig7ahyo6fo2bais0ephuf2aitohv1ai/job-archive.tar.xz
tar xJf job-archive.tar.xz tar xJf job-archive.tar.xz
mv ./job-archive ./job-archive-source
rm ./job-archive.tar.xz rm ./job-archive.tar.xz
cd .. cd ..
else
echo "'data/job-archive-source' already exists!"
fi fi
# Download unedited checkpoint files to ./data/cc-metric-store-source/checkpoints
if [ ! -d data/cc-metric-store-source ]; then
mkdir -p data/cc-metric-store-source/checkpoints
cd data/cc-metric-store-source/checkpoints
wget https://hpc-mover.rrze.uni-erlangen.de/HPC-Data/0x7b58aefb/eig7ahyo6fo2bais0ephuf2aitohv1ai/cc-metric-store-checkpoints.tar.xz
tar xf cc-metric-store-checkpoints.tar.xz
rm cc-metric-store-checkpoints.tar.xz
cd ../../../
else
echo "'data/cc-metric-store-source' already exists!"
fi
# Update timestamps
perl ./migrateTimestamps.pl
# Download data for influxdb2 # Create archive folder for rewrtitten ccms checkpoints
if [ ! -d data/cc-metric-store/archive ]; then
mkdir -p data/cc-metric-store/archive
fi
# cleanup sources
# rm -r ./data/job-archive-source
# rm -r ./data/cc-metric-store-source
# prepare folders for influxdb2
if [ ! -d data/influxdb ]; then if [ ! -d data/influxdb ]; then
mkdir -p data/influxdb/data mkdir -p data/influxdb/data
cd data/influxdb/data mkdir -p data/influxdb/config/influx-configs
wget https://hpc-mover.rrze.uni-erlangen.de/HPC-Data/0x7b58aefb/eig7ahyo6fo2bais0ephuf2aitohv1ai/influxdbv2-data.tar.xz
tar xJf influxdbv2-data.tar.xz
rm influxdbv2-data.tar.xz
cd ../../../
else else
echo "'data/influxdb' already exists!" echo "'data/influxdb' already exists!"
echo -n "Remove existing folder and redownload? [yes to redownload / no to continue] "
read -r answer
if [ "$answer" == "yes" ]; then
echo "Removing 'data/influxdb' ..."
rm -rf data/influxdb
echo "Reinstall 'data/influxdb'..."
mkdir -p data/influxdb/data
cd data/influxdb/data
wget https://hpc-mover.rrze.uni-erlangen.de/HPC-Data/0x7b58aefb/eig7ahyo6fo2bais0ephuf2aitohv1ai/influxdbv2-data.tar.xz
tar xJf influxdbv2-data.tar.xz
rm influxdbv2-data.tar.xz
cd ../../../
echo "done."
else
echo "'data/influxdb' unchanged."
fi
fi
# Download checkpoint files for cc-metric-store
if [ ! -d data/cc-metric-store ]; then
mkdir -p data/cc-metric-store/checkpoints
mkdir -p data/cc-metric-store/archive
cd data/cc-metric-store/checkpoints
wget https://hpc-mover.rrze.uni-erlangen.de/HPC-Data/0x7b58aefb/eig7ahyo6fo2bais0ephuf2aitohv1ai/cc-metric-store-checkpoints.tar.xz
tar xf cc-metric-store-checkpoints.tar.xz
rm cc-metric-store-checkpoints.tar.xz
cd ../../../
else
echo "'data/cc-metric-store' already exists!"
echo -n "Remove existing folder and redownload? [yes to redownload / no to continue] "
read -r answer
if [ "$answer" == "yes" ]; then
echo "Removing 'data/cc-metric-store' ..."
rm -rf data/cc-metric-store
echo "Reinstall 'data/cc-metric-store'..."
mkdir -p data/cc-metric-store/checkpoints
mkdir -p data/cc-metric-store/archive
cd data/cc-metric-store/checkpoints
wget https://hpc-mover.rrze.uni-erlangen.de/HPC-Data/0x7b58aefb/eig7ahyo6fo2bais0ephuf2aitohv1ai/cc-metric-store-checkpoints.tar.xz
tar xf cc-metric-store-checkpoints.tar.xz
rm cc-metric-store-checkpoints.tar.xz
cd ../../../
echo "done."
else
echo "'data/cc-metric-store' unchanged."
fi
fi fi
# Check dotenv-file and docker-compose-yml, copy accordingly if not present and build docker services # Check dotenv-file and docker-compose-yml, copy accordingly if not present and build docker services
@ -98,6 +73,11 @@ if [ ! -d docker-compose.yml ]; then
fi fi
docker-compose build docker-compose build
./cc-backend/cc-backend --init-db --add-user demo:admin:AdminDev --no-server
docker-compose up -d
echo "" echo ""
echo "Setup complete. Use 'startDev.sh' to boot containers and start cc-backend." echo "Setup complete, containers are up by default: Shut down with 'docker-compose down'."
echo "Use './cc-backend/cc-backend' to start cc-backend."
echo "Use scripts in /scripts to load data into influx or mariadb."
# ./cc-backend/cc-backend

View File

@ -1,3 +0,0 @@
#!/bin/bash
docker-compose up -d