mirror of
https://github.com/ClusterCockpit/cc-docker.git
synced 2024-11-10 09:47:27 +01:00
Reduce unused code
This commit is contained in:
parent
dde48d2394
commit
63552dd52b
@ -4,66 +4,21 @@ use warnings;
|
|||||||
use utf8;
|
use utf8;
|
||||||
|
|
||||||
use File::Path qw( make_path rmtree );
|
use File::Path qw( make_path rmtree );
|
||||||
use String::CamelCase qw(camelize);
|
|
||||||
use Cpanel::JSON::XS qw( decode_json encode_json );
|
use Cpanel::JSON::XS qw( decode_json encode_json );
|
||||||
use File::Slurp;
|
use File::Slurp;
|
||||||
use Data::Dumper;
|
use Data::Dumper;
|
||||||
use Data::Walk;
|
|
||||||
use Scalar::Util qw( reftype );
|
|
||||||
use Time::Piece;
|
use Time::Piece;
|
||||||
use Sort::Versions;
|
use Sort::Versions;
|
||||||
|
|
||||||
## NOTE: Based on Jan: migrateCC-jobArchive.pl
|
### JOB-ARCHIVE
|
||||||
|
|
||||||
my $FIRST=1;
|
|
||||||
my @METRICS = ('flops_any', 'cpu_load', 'mem_used', 'flops_sp',
|
|
||||||
'flops_dp', 'mem_bw', 'cpi', 'cpi_avg', 'clock', 'rapl_power');
|
|
||||||
|
|
||||||
my %UNITS = (
|
|
||||||
'flops_any' => 'GF/s',
|
|
||||||
'cpu_load' => 'load',
|
|
||||||
'mem_used' => 'GB',
|
|
||||||
'flops_sp' => 'GF/s',
|
|
||||||
'flops_dp' => 'GF/s',
|
|
||||||
'mem_bw' => 'GB/s',
|
|
||||||
'clock' => 'MHz',
|
|
||||||
'rapl_power' => 'W'
|
|
||||||
);
|
|
||||||
|
|
||||||
sub process {
|
|
||||||
if ( $Data::Walk::type eq 'HASH' && !($Data::Walk::index%2)) {
|
|
||||||
|
|
||||||
if ( ! $FIRST ) {
|
|
||||||
my $key = $_;
|
|
||||||
if ( ! grep( /^$key$/, @METRICS) ) {
|
|
||||||
my $str = lcfirst(camelize($key));
|
|
||||||
my $hashref = $Data::Walk::container;
|
|
||||||
my $value = delete ${$hashref}{$key};
|
|
||||||
${$hashref}{$str} = $value;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
if ( $FIRST ) {
|
|
||||||
$FIRST = 0;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
my $localtime = localtime;
|
my $localtime = localtime;
|
||||||
my $epochtime = $localtime->epoch;
|
my $epochtime = $localtime->epoch;
|
||||||
my $targetDir = './cc-backend/var/job-archive';
|
my $targetDir = './cc-backend/var/job-archive';
|
||||||
my @Clusters;
|
my @Clusters;
|
||||||
my $src = './data/job-archive';
|
my $src = './data/job-archive';
|
||||||
|
|
||||||
chomp(my $checkpointStart=`date --date 'TZ="Europe/Berlin" 0:00 7 days ago' +%s`);
|
# Get clusters by folder
|
||||||
my $halfday = 43200;
|
|
||||||
my $targetDirCheckpoints = './data/cc-metric-store_new';
|
|
||||||
my $srcCheckpoints = './data/cc-metric-store';
|
|
||||||
my @ClustersCheckpoints;
|
|
||||||
|
|
||||||
## Get Clusters
|
|
||||||
opendir my $dh, $src or die "can't open directory: $!";
|
opendir my $dh, $src or die "can't open directory: $!";
|
||||||
|
|
||||||
while ( readdir $dh ) {
|
while ( readdir $dh ) {
|
||||||
chomp; next if $_ eq '.' or $_ eq '..' or $_ eq 'job-archive';
|
chomp; next if $_ eq '.' or $_ eq '..' or $_ eq 'job-archive';
|
||||||
|
|
||||||
@ -71,15 +26,6 @@ while ( readdir $dh ) {
|
|||||||
push @Clusters, $cluster;
|
push @Clusters, $cluster;
|
||||||
}
|
}
|
||||||
|
|
||||||
opendir my $dhc, $srcCheckpoints or die "can't open directory: $!";
|
|
||||||
|
|
||||||
while ( readdir $dhc ) {
|
|
||||||
chomp; next if $_ eq '.' or $_ eq '..' or $_ eq 'job-archive';
|
|
||||||
|
|
||||||
my $cluster = $_;
|
|
||||||
push @ClustersCheckpoints, $cluster;
|
|
||||||
}
|
|
||||||
|
|
||||||
# start for jobarchive
|
# start for jobarchive
|
||||||
foreach my $cluster ( @Clusters ) {
|
foreach my $cluster ( @Clusters ) {
|
||||||
print "Starting to update startTime in job-archive for $cluster\n";
|
print "Starting to update startTime in job-archive for $cluster\n";
|
||||||
@ -114,9 +60,6 @@ foreach my $cluster ( @Clusters ) {
|
|||||||
my $str = read_file("$src/meta.json");
|
my $str = read_file("$src/meta.json");
|
||||||
my $json = decode_json($str);
|
my $json = decode_json($str);
|
||||||
|
|
||||||
$FIRST = 1;
|
|
||||||
walk \&process, $json;
|
|
||||||
|
|
||||||
# NOTE Start meta.json iteration here
|
# NOTE Start meta.json iteration here
|
||||||
# my $random_number = int(rand(UPPERLIMIT)) + LOWERLIMIT;
|
# my $random_number = int(rand(UPPERLIMIT)) + LOWERLIMIT;
|
||||||
# Set new startTime: Between 5 days and 1 day before now
|
# Set new startTime: Between 5 days and 1 day before now
|
||||||
@ -148,6 +91,22 @@ foreach my $cluster ( @Clusters ) {
|
|||||||
print "Done for job-archive\n";
|
print "Done for job-archive\n";
|
||||||
sleep(2);
|
sleep(2);
|
||||||
|
|
||||||
|
## CHECKPOINTS
|
||||||
|
chomp(my $checkpointStart=`date --date 'TZ="Europe/Berlin" 0:00 7 days ago' +%s`);
|
||||||
|
my $halfday = 43200;
|
||||||
|
my $targetDirCheckpoints = './data/cc-metric-store_new';
|
||||||
|
my $srcCheckpoints = './data/cc-metric-store';
|
||||||
|
my @ClustersCheckpoints;
|
||||||
|
|
||||||
|
# Get clusters by folder
|
||||||
|
opendir my $dhc, $srcCheckpoints or die "can't open directory: $!";
|
||||||
|
while ( readdir $dhc ) {
|
||||||
|
chomp; next if $_ eq '.' or $_ eq '..' or $_ eq 'job-archive';
|
||||||
|
|
||||||
|
my $cluster = $_;
|
||||||
|
push @ClustersCheckpoints, $cluster;
|
||||||
|
}
|
||||||
|
|
||||||
# start for checkpoints
|
# start for checkpoints
|
||||||
foreach my $cluster ( @ClustersCheckpoints ) {
|
foreach my $cluster ( @ClustersCheckpoints ) {
|
||||||
print "Starting to update startTime in checkpoint-files for $cluster\n";
|
print "Starting to update startTime in checkpoint-files for $cluster\n";
|
||||||
|
Loading…
Reference in New Issue
Block a user