diff --git a/docker-compose.yml b/docker-compose.yml index 59f5891..f93b317 100755 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -21,24 +21,6 @@ services: depends_on: - nats - influxdb: - container_name: influxdb - image: influxdb:latest - command: ["--reporting-disabled", "--log-level=debug"] - environment: - DOCKER_INFLUXDB_INIT_MODE: setup - DOCKER_INFLUXDB_INIT_USERNAME: devel - DOCKER_INFLUXDB_INIT_PASSWORD: ${INFLUXDB_PASSWORD} - DOCKER_INFLUXDB_INIT_ORG: ${INFLUXDB_ORG} - DOCKER_INFLUXDB_INIT_BUCKET: ${INFLUXDB_BUCKET} - DOCKER_INFLUXDB_INIT_RETENTION: 100w - DOCKER_INFLUXDB_INIT_ADMIN_TOKEN: ${INFLUXDB_ADMIN_TOKEN} - ports: - - "0.0.0.0:8086:8086" - volumes: - - ${DATADIR}/influxdb/data:/var/lib/influxdb2 - - ${DATADIR}/influxdb/config:/etc/influxdb2 - openldap: container_name: ldap image: osixia/openldap:1.5.0 @@ -143,4 +125,22 @@ services: - /etc/timezone:/etc/timezone:ro - /etc/localtime:/etc/localtime:ro ports: - - "6820:6820" \ No newline at end of file + - "6820:6820" + + # influxdb: + # container_name: influxdb + # image: influxdb:latest + # command: ["--reporting-disabled", "--log-level=debug"] + # environment: + # DOCKER_INFLUXDB_INIT_MODE: setup + # DOCKER_INFLUXDB_INIT_USERNAME: devel + # DOCKER_INFLUXDB_INIT_PASSWORD: ${INFLUXDB_PASSWORD} + # DOCKER_INFLUXDB_INIT_ORG: ${INFLUXDB_ORG} + # DOCKER_INFLUXDB_INIT_BUCKET: ${INFLUXDB_BUCKET} + # DOCKER_INFLUXDB_INIT_RETENTION: 100w + # DOCKER_INFLUXDB_INIT_ADMIN_TOKEN: ${INFLUXDB_ADMIN_TOKEN} + # ports: + # - "0.0.0.0:8086:8086" + # volumes: + # - ${DATADIR}/influxdb/data:/var/lib/influxdb2 + # - ${DATADIR}/influxdb/config:/etc/influxdb2 \ No newline at end of file diff --git a/scripts/checkModules.sh b/scripts/checkModules.sh new file mode 100755 index 0000000..6fdd04d --- /dev/null +++ b/scripts/checkModules.sh @@ -0,0 +1,68 @@ +#!/bin/bash + +cd scripts + +# Check if required perl modules are installed +if ./checkPerlModules.pl ../migrateTimestamps.pl | grep "couldn't load"; then + echo "Perl Modules missing!" + echo -n "Stopped." + exit +else + echo "Perl Modules loaded." +fi + +# check if golang is installed and available +if ! go version; then + echo "Golang not installed!" + echo -n "Stopped." + exit +else + echo "Golang installed." +fi + +# check if docker is installed and available +if ! docker --version; then + echo "Docker not installed!" + echo -n "Stopped." + exit +else + echo "Docker installed." +fi + +# check if docker-compose is installed and available +if ! docker-compose --version; then + echo "Docker-compose not installed!" + echo -n "Stopped." + exit +else + echo "Docker-compose installed." +fi + +# check if npm is installed and available +if ! npm --version; then + echo "NPM not installed!" + echo -n "Stopped." + exit +else + echo "NPM installed." +fi + +# check if make is installed and available +if ! make --version; then + echo "Make not installed!" + echo -n "Stopped." + exit +else + echo "Make installed." +fi + +# check if gcc is installed and available +if ! gcc --version; then + echo "GCC not installed!" + echo -n "Stopped." + exit +else + echo "GCC installed." +fi + +cd .. diff --git a/scripts/checkPerlModules.pl b/scripts/checkPerlModules.pl new file mode 100755 index 0000000..9735ae8 --- /dev/null +++ b/scripts/checkPerlModules.pl @@ -0,0 +1,45 @@ +#!/usr/bin/env perl +use warnings; +use strict; + +my $filename = + shift || &help; # command line argument is perl script to evaluate +my @modules; # array of 'use' statements from code we are checking + +open( IN, $filename ) or die "couldn't open $filename for processing: $! ++\n"; + +while () { + chomp; + if ( (/^use/) and not( /strict/ || /warnings/ ) ) { + push @modules, $_; + } +} +close IN; +for my $code (@modules) { + my ( undef, $library ) = split( / /, $code ); # get the module name + $library =~ s/;//; # clean up the name + eval $code; + if ($@) { + warn "couldn't load $library: $@", "\n"; + } else { + print "$library looks ok\n"; + } +} + +sub help +{ + print <<"END"; + +checkPerlModules.pl + +This script finds all the "use" statements loading modules in the targ ++et perl +file (specified as a command line argument) and attempts to load them. +If there are problems loading the module, the error mesage returned is ++ printed. + +END + exit; +} + diff --git a/scripts/checkpointsToInflux.sh b/scripts/checkpointsToInflux.sh new file mode 100755 index 0000000..4097163 --- /dev/null +++ b/scripts/checkpointsToInflux.sh @@ -0,0 +1,65 @@ +#!/bin/bash + +set -euo pipefail + +NEW_CHECKPOINTS='../data/cc-metric-store/checkpoints' +VERBOSE=1 +INFLUX_HOST='http://0.0.0.0:8181' + +HEADERS=( + -H "Content-Type: text/plain; charset=utf-8" + -H "Accept: application/json" +) + +checkp_clusters=() +while IFS= read -r -d '' dir; do + checkp_clusters+=("$(basename "$dir")") +done < <(find "$NEW_CHECKPOINTS" -mindepth 1 -maxdepth 1 -type d \! -name 'job-archive' -print0) + +for cluster in "${checkp_clusters[@]}"; do + echo "Starting to read updated checkpoint-files into influx for $cluster" + + while IFS= read -r -d '' level1_dir; do + level1=$(basename "$level1_dir") + node_source="$NEW_CHECKPOINTS/$cluster/$level1" + + mapfile -t files < <(find "$node_source" -type f -name '*.json' | sort -V) + # if [[ ${#files[@]} -ne 14 ]]; then + # continue + # fi + + node_measurement="" + for file in "${files[@]}"; do + rawstr=$(<"$file") + + while IFS= read -r metric; do + start=$(jq -r ".metrics[\"$metric\"].start" <<<"$rawstr") + timestep=$(jq -r ".metrics[\"$metric\"].frequency" <<<"$rawstr") + + while IFS= read -r index_value; do + index=$(awk -F: '{print $1}' <<<"$index_value") + value=$(awk -F: '{print $2}' <<<"$index_value") + + if [[ -n "$value" && "$value" != "null" ]]; then + timestamp=$((start + (timestep * index))) + node_measurement+="$metric,cluster=$cluster,hostname=$level1,type=node value=$value $timestamp\n" + fi + done < <(jq -r ".metrics[\"$metric\"].data | to_entries | map(\"\(.key):\(.value // \"null\")\") | .[]" <<<"$rawstr") + done < <(jq -r '.metrics | keys[]' <<<"$rawstr") + done + + if [[ -n "$node_measurement" ]]; then + while IFS= read -r chunk; do + response_code=$(curl -s -o /dev/null -w "%{http_code}" "${HEADERS[@]}" --data-binary "$chunk" "$INFLUX_HOST/api/v2/write?bucket=mydb&precision=s") + if [[ "$response_code" == "204" ]]; then + [[ "$VERBOSE" -eq 1 ]] && echo "INFLUX API WRITE: CLUSTER $cluster HOST $level1" + elif [[ "$response_code" != "422" ]]; then + echo "INFLUX API WRITE ERROR CODE $response_code" + fi + done < <(echo -e "$node_measurement" | split -l 1000 --filter='cat') + fi + echo "Done for : "$node_source + done < <(find "$NEW_CHECKPOINTS/$cluster" -mindepth 1 -maxdepth 1 -type d -print0) +done + +echo "Done for influx" diff --git a/setupDev.sh b/setupDev.sh index 2549181..7c61558 100755 --- a/setupDev.sh +++ b/setupDev.sh @@ -1,4 +1,5 @@ #!/bin/bash +set -eu echo "" echo "|--------------------------------------------------------------------------------------|" echo "| Welcome to cc-docker automatic deployment script. |" @@ -18,16 +19,19 @@ echo "" # Check cc-backend if exists if [ ! -d cc-backend ]; then - echo "'cc-backend' not yet prepared! Please clone cc-backend repository before starting this script." - echo -n "Stopped." - exit + echo "'cc-backend' not yet prepared! Please clone cc-backend repository before starting this script." + echo -n "Stopped." + exit fi +chmod u+x scripts/checkModules.sh +./scripts/checkModules.sh + # Creates data directory if it does not exists. # Contains all the mount points required by all the docker services # and their static files. if [ ! -d data ]; then - mkdir -m777 data + mkdir -m777 data fi # Invokes the dataGenerationScript.sh, which then populates the required @@ -41,12 +45,12 @@ perl ./migrateTimestamps.pl # Create archive folder for rewritten ccms checkpoints if [ ! -d data/cc-metric-store/archive ]; then - mkdir -p data/cc-metric-store/archive + mkdir -p data/cc-metric-store/archive fi # cleanup sources if [ -d data/cc-metric-store-source ]; then - rm -r data/cc-metric-store-source + rm -r data/cc-metric-store-source fi # Just in case user forgot manually shutdown the docker services. @@ -66,21 +70,21 @@ docker-compose up -d cd cc-backend if [ ! -d var ]; then - wget https://hpc-mover.rrze.uni-erlangen.de/HPC-Data/0x7b58aefb/eig7ahyo6fo2bais0ephuf2aitohv1ai/job-archive-demo.tar - tar xf job-archive-demo.tar - rm ./job-archive-demo.tar + wget https://hpc-mover.rrze.uni-erlangen.de/HPC-Data/0x7b58aefb/eig7ahyo6fo2bais0ephuf2aitohv1ai/job-archive-demo.tar + tar xf job-archive-demo.tar + rm ./job-archive-demo.tar - cp ./configs/env-template.txt .env - cp -f ../misc/config.json config.json + cp ./configs/env-template.txt .env + cp -f ../misc/config.json config.json - make + make - ./cc-backend -migrate-db - ./cc-backend --init-db --add-user demo:admin:demo - cd .. + ./cc-backend -migrate-db + ./cc-backend --init-db --add-user demo:admin:demo + cd .. else - cd .. - echo "'cc-backend/var' exists. Cautiously exiting." + cd .. + echo "'cc-backend/var' exists. Cautiously exiting." fi echo ""