mirror of
https://github.com/ClusterCockpit/cc-docker.git
synced 2025-04-05 21:45:55 +02:00
Merge pull request #3 from ClusterCockpit/dev
Check for all installed modules before starting docker services.
This commit is contained in:
commit
dbf66015b7
@ -21,24 +21,6 @@ services:
|
|||||||
depends_on:
|
depends_on:
|
||||||
- nats
|
- nats
|
||||||
|
|
||||||
influxdb:
|
|
||||||
container_name: influxdb
|
|
||||||
image: influxdb:latest
|
|
||||||
command: ["--reporting-disabled", "--log-level=debug"]
|
|
||||||
environment:
|
|
||||||
DOCKER_INFLUXDB_INIT_MODE: setup
|
|
||||||
DOCKER_INFLUXDB_INIT_USERNAME: devel
|
|
||||||
DOCKER_INFLUXDB_INIT_PASSWORD: ${INFLUXDB_PASSWORD}
|
|
||||||
DOCKER_INFLUXDB_INIT_ORG: ${INFLUXDB_ORG}
|
|
||||||
DOCKER_INFLUXDB_INIT_BUCKET: ${INFLUXDB_BUCKET}
|
|
||||||
DOCKER_INFLUXDB_INIT_RETENTION: 100w
|
|
||||||
DOCKER_INFLUXDB_INIT_ADMIN_TOKEN: ${INFLUXDB_ADMIN_TOKEN}
|
|
||||||
ports:
|
|
||||||
- "0.0.0.0:8086:8086"
|
|
||||||
volumes:
|
|
||||||
- ${DATADIR}/influxdb/data:/var/lib/influxdb2
|
|
||||||
- ${DATADIR}/influxdb/config:/etc/influxdb2
|
|
||||||
|
|
||||||
openldap:
|
openldap:
|
||||||
container_name: ldap
|
container_name: ldap
|
||||||
image: osixia/openldap:1.5.0
|
image: osixia/openldap:1.5.0
|
||||||
@ -144,3 +126,21 @@ services:
|
|||||||
- /etc/localtime:/etc/localtime:ro
|
- /etc/localtime:/etc/localtime:ro
|
||||||
ports:
|
ports:
|
||||||
- "6820:6820"
|
- "6820:6820"
|
||||||
|
|
||||||
|
# influxdb:
|
||||||
|
# container_name: influxdb
|
||||||
|
# image: influxdb:latest
|
||||||
|
# command: ["--reporting-disabled", "--log-level=debug"]
|
||||||
|
# environment:
|
||||||
|
# DOCKER_INFLUXDB_INIT_MODE: setup
|
||||||
|
# DOCKER_INFLUXDB_INIT_USERNAME: devel
|
||||||
|
# DOCKER_INFLUXDB_INIT_PASSWORD: ${INFLUXDB_PASSWORD}
|
||||||
|
# DOCKER_INFLUXDB_INIT_ORG: ${INFLUXDB_ORG}
|
||||||
|
# DOCKER_INFLUXDB_INIT_BUCKET: ${INFLUXDB_BUCKET}
|
||||||
|
# DOCKER_INFLUXDB_INIT_RETENTION: 100w
|
||||||
|
# DOCKER_INFLUXDB_INIT_ADMIN_TOKEN: ${INFLUXDB_ADMIN_TOKEN}
|
||||||
|
# ports:
|
||||||
|
# - "0.0.0.0:8086:8086"
|
||||||
|
# volumes:
|
||||||
|
# - ${DATADIR}/influxdb/data:/var/lib/influxdb2
|
||||||
|
# - ${DATADIR}/influxdb/config:/etc/influxdb2
|
68
scripts/checkModules.sh
Executable file
68
scripts/checkModules.sh
Executable file
@ -0,0 +1,68 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
cd scripts
|
||||||
|
|
||||||
|
# Check if required perl modules are installed
|
||||||
|
if ./checkPerlModules.pl ../migrateTimestamps.pl | grep "couldn't load"; then
|
||||||
|
echo "Perl Modules missing!"
|
||||||
|
echo -n "Stopped."
|
||||||
|
exit
|
||||||
|
else
|
||||||
|
echo "Perl Modules loaded."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# check if golang is installed and available
|
||||||
|
if ! go version; then
|
||||||
|
echo "Golang not installed!"
|
||||||
|
echo -n "Stopped."
|
||||||
|
exit
|
||||||
|
else
|
||||||
|
echo "Golang installed."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# check if docker is installed and available
|
||||||
|
if ! docker --version; then
|
||||||
|
echo "Docker not installed!"
|
||||||
|
echo -n "Stopped."
|
||||||
|
exit
|
||||||
|
else
|
||||||
|
echo "Docker installed."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# check if docker-compose is installed and available
|
||||||
|
if ! docker-compose --version; then
|
||||||
|
echo "Docker-compose not installed!"
|
||||||
|
echo -n "Stopped."
|
||||||
|
exit
|
||||||
|
else
|
||||||
|
echo "Docker-compose installed."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# check if npm is installed and available
|
||||||
|
if ! npm --version; then
|
||||||
|
echo "NPM not installed!"
|
||||||
|
echo -n "Stopped."
|
||||||
|
exit
|
||||||
|
else
|
||||||
|
echo "NPM installed."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# check if make is installed and available
|
||||||
|
if ! make --version; then
|
||||||
|
echo "Make not installed!"
|
||||||
|
echo -n "Stopped."
|
||||||
|
exit
|
||||||
|
else
|
||||||
|
echo "Make installed."
|
||||||
|
fi
|
||||||
|
|
||||||
|
# check if gcc is installed and available
|
||||||
|
if ! gcc --version; then
|
||||||
|
echo "GCC not installed!"
|
||||||
|
echo -n "Stopped."
|
||||||
|
exit
|
||||||
|
else
|
||||||
|
echo "GCC installed."
|
||||||
|
fi
|
||||||
|
|
||||||
|
cd ..
|
45
scripts/checkPerlModules.pl
Executable file
45
scripts/checkPerlModules.pl
Executable file
@ -0,0 +1,45 @@
|
|||||||
|
#!/usr/bin/env perl
|
||||||
|
use warnings;
|
||||||
|
use strict;
|
||||||
|
|
||||||
|
my $filename =
|
||||||
|
shift || &help; # command line argument is perl script to evaluate
|
||||||
|
my @modules; # array of 'use' statements from code we are checking
|
||||||
|
|
||||||
|
open( IN, $filename ) or die "couldn't open $filename for processing: $!
|
||||||
|
+\n";
|
||||||
|
|
||||||
|
while (<IN>) {
|
||||||
|
chomp;
|
||||||
|
if ( (/^use/) and not( /strict/ || /warnings/ ) ) {
|
||||||
|
push @modules, $_;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
close IN;
|
||||||
|
for my $code (@modules) {
|
||||||
|
my ( undef, $library ) = split( / /, $code ); # get the module name
|
||||||
|
$library =~ s/;//; # clean up the name
|
||||||
|
eval $code;
|
||||||
|
if ($@) {
|
||||||
|
warn "couldn't load $library: $@", "\n";
|
||||||
|
} else {
|
||||||
|
print "$library looks ok\n";
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
sub help
|
||||||
|
{
|
||||||
|
print <<"END";
|
||||||
|
|
||||||
|
checkPerlModules.pl
|
||||||
|
|
||||||
|
This script finds all the "use" statements loading modules in the targ
|
||||||
|
+et perl
|
||||||
|
file (specified as a command line argument) and attempts to load them.
|
||||||
|
If there are problems loading the module, the error mesage returned is
|
||||||
|
+ printed.
|
||||||
|
|
||||||
|
END
|
||||||
|
exit;
|
||||||
|
}
|
||||||
|
|
65
scripts/checkpointsToInflux.sh
Executable file
65
scripts/checkpointsToInflux.sh
Executable file
@ -0,0 +1,65 @@
|
|||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -euo pipefail
|
||||||
|
|
||||||
|
NEW_CHECKPOINTS='../data/cc-metric-store/checkpoints'
|
||||||
|
VERBOSE=1
|
||||||
|
INFLUX_HOST='http://0.0.0.0:8181'
|
||||||
|
|
||||||
|
HEADERS=(
|
||||||
|
-H "Content-Type: text/plain; charset=utf-8"
|
||||||
|
-H "Accept: application/json"
|
||||||
|
)
|
||||||
|
|
||||||
|
checkp_clusters=()
|
||||||
|
while IFS= read -r -d '' dir; do
|
||||||
|
checkp_clusters+=("$(basename "$dir")")
|
||||||
|
done < <(find "$NEW_CHECKPOINTS" -mindepth 1 -maxdepth 1 -type d \! -name 'job-archive' -print0)
|
||||||
|
|
||||||
|
for cluster in "${checkp_clusters[@]}"; do
|
||||||
|
echo "Starting to read updated checkpoint-files into influx for $cluster"
|
||||||
|
|
||||||
|
while IFS= read -r -d '' level1_dir; do
|
||||||
|
level1=$(basename "$level1_dir")
|
||||||
|
node_source="$NEW_CHECKPOINTS/$cluster/$level1"
|
||||||
|
|
||||||
|
mapfile -t files < <(find "$node_source" -type f -name '*.json' | sort -V)
|
||||||
|
# if [[ ${#files[@]} -ne 14 ]]; then
|
||||||
|
# continue
|
||||||
|
# fi
|
||||||
|
|
||||||
|
node_measurement=""
|
||||||
|
for file in "${files[@]}"; do
|
||||||
|
rawstr=$(<"$file")
|
||||||
|
|
||||||
|
while IFS= read -r metric; do
|
||||||
|
start=$(jq -r ".metrics[\"$metric\"].start" <<<"$rawstr")
|
||||||
|
timestep=$(jq -r ".metrics[\"$metric\"].frequency" <<<"$rawstr")
|
||||||
|
|
||||||
|
while IFS= read -r index_value; do
|
||||||
|
index=$(awk -F: '{print $1}' <<<"$index_value")
|
||||||
|
value=$(awk -F: '{print $2}' <<<"$index_value")
|
||||||
|
|
||||||
|
if [[ -n "$value" && "$value" != "null" ]]; then
|
||||||
|
timestamp=$((start + (timestep * index)))
|
||||||
|
node_measurement+="$metric,cluster=$cluster,hostname=$level1,type=node value=$value $timestamp\n"
|
||||||
|
fi
|
||||||
|
done < <(jq -r ".metrics[\"$metric\"].data | to_entries | map(\"\(.key):\(.value // \"null\")\") | .[]" <<<"$rawstr")
|
||||||
|
done < <(jq -r '.metrics | keys[]' <<<"$rawstr")
|
||||||
|
done
|
||||||
|
|
||||||
|
if [[ -n "$node_measurement" ]]; then
|
||||||
|
while IFS= read -r chunk; do
|
||||||
|
response_code=$(curl -s -o /dev/null -w "%{http_code}" "${HEADERS[@]}" --data-binary "$chunk" "$INFLUX_HOST/api/v2/write?bucket=mydb&precision=s")
|
||||||
|
if [[ "$response_code" == "204" ]]; then
|
||||||
|
[[ "$VERBOSE" -eq 1 ]] && echo "INFLUX API WRITE: CLUSTER $cluster HOST $level1"
|
||||||
|
elif [[ "$response_code" != "422" ]]; then
|
||||||
|
echo "INFLUX API WRITE ERROR CODE $response_code"
|
||||||
|
fi
|
||||||
|
done < <(echo -e "$node_measurement" | split -l 1000 --filter='cat')
|
||||||
|
fi
|
||||||
|
echo "Done for : "$node_source
|
||||||
|
done < <(find "$NEW_CHECKPOINTS/$cluster" -mindepth 1 -maxdepth 1 -type d -print0)
|
||||||
|
done
|
||||||
|
|
||||||
|
echo "Done for influx"
|
@ -1,4 +1,5 @@
|
|||||||
#!/bin/bash
|
#!/bin/bash
|
||||||
|
set -eu
|
||||||
echo ""
|
echo ""
|
||||||
echo "|--------------------------------------------------------------------------------------|"
|
echo "|--------------------------------------------------------------------------------------|"
|
||||||
echo "| Welcome to cc-docker automatic deployment script. |"
|
echo "| Welcome to cc-docker automatic deployment script. |"
|
||||||
@ -23,6 +24,9 @@ if [ ! -d cc-backend ]; then
|
|||||||
exit
|
exit
|
||||||
fi
|
fi
|
||||||
|
|
||||||
|
chmod u+x scripts/checkModules.sh
|
||||||
|
./scripts/checkModules.sh
|
||||||
|
|
||||||
# Creates data directory if it does not exists.
|
# Creates data directory if it does not exists.
|
||||||
# Contains all the mount points required by all the docker services
|
# Contains all the mount points required by all the docker services
|
||||||
# and their static files.
|
# and their static files.
|
||||||
|
Loading…
x
Reference in New Issue
Block a user