Adapting docker files to generic arch

This commit is contained in:
Aditya Ujeniya 2024-10-10 22:09:01 +02:00
parent 523a688bed
commit e52321247a
7 changed files with 76 additions and 70 deletions

View File

@ -81,7 +81,7 @@ services:
# - SYS_NICE
slurm-controller:
image: cc-docker:22.05
image: clustercockpit:22.05.6
container_name: slurmctld
hostname: slurmctld
build:

View File

@ -8,28 +8,22 @@ if [ ! -d cc-backend ]; then
else
cd cc-backend
if [ ! -d var ]; then
mkdir var
touch var/job.db
wget https://hpc-mover.rrze.uni-erlangen.de/HPC-Data/0x7b58aefb/eig7ahyo6fo2bais0ephuf2aitohv1ai/job-archive-demo.tar
tar xf job-archive-demo.tar
rm ./job-archive-demo.tar
make
./cc-backend -migrate-db
./cc-backend --init-db --add-user demo:admin:AdminDev
cd ..
else
echo "'cc-backend/var' exists. Cautiously exiting."
echo -n "Stopped."
exit
cd ..
# echo "'cc-backend/var' exists. Cautiously exiting."
# echo -n "Stopped."
# exit
fi
fi
# Download unedited job-archive to ./data/job-archive-source
if [ ! -d data/job-archive-source ]; then
wget https://hpc-mover.rrze.uni-erlangen.de/HPC-Data/0x7b58aefb/eig7ahyo6fo2bais0ephuf2aitohv1ai/job-archive-demo.tar
tar xf job-archive-demo.tar
# mv ./var/job-archive ./job-archive-source
# mv -f ./var/job.db ./cc-backend/var/
# rm -rf ./var
rm ./job-archive-demo.tar
cd ..
else
echo "'data/job-archive-source' already exists!"
fi
ls
# Download unedited checkpoint files to ./data/cc-metric-store-source/checkpoints
if [ ! -d data/cc-metric-store-source ]; then
@ -53,7 +47,7 @@ fi
# cleanup sources
# rm -r ./data/job-archive-source
rm -r ./data/cc-metric-store-source
# rm -r ./data/cc-metric-store-source
# prepare folders for influxdb2
if [ ! -d data/influxdb ]; then
@ -72,12 +66,17 @@ if [ ! -f docker-compose.yml ]; then
cp templates/docker-compose.yml.default ./docker-compose.yml
fi
docker-compose down
cd slurm/base/
make
cd ../..
docker-compose build
./cc-backend/cc-backend --init-db --add-user demo:admin:AdminDev
docker-compose up -d
# echo ""
# echo "Setup complete, containers are up by default: Shut down with 'docker-compose down'."
# echo "Use './cc-backend/cc-backend' to start cc-backend."
# echo "Use scripts in /scripts to load data into influx or mariadb."
# # ./cc-backend/cc-backend
echo ""
echo "Setup complete, containers are up by default: Shut down with 'docker-compose down'."
echo "Use './cc-backend/cc-backend' to start cc-backend."
echo "Use scripts in /scripts to load data into influx or mariadb."
# ./cc-backend/cc-backend

View File

@ -2,9 +2,8 @@ FROM rockylinux:8
LABEL org.opencontainers.image.authors="jan.eitzinger@fau.de"
ENV SLURM_VERSION=22.05.6
ENV ARCH=aarch64
RUN yum install https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm -y
RUN yum install -y https://dl.fedoraproject.org/pub/epel/epel-release-latest-8.noarch.rpm
RUN groupadd -g 981 munge \
&& useradd -m -c "MUNGE Uid 'N' Gid Emporium" -d /var/lib/munge -u 981 -g munge -s /sbin/nologin munge \
@ -13,29 +12,25 @@ RUN groupadd -g 981 munge \
&& groupadd -g 1000 worker \
&& useradd -m -c "Workflow user" -d /home/worker -u 1000 -g worker -s /bin/bash worker
RUN yum install -y munge munge-libs
RUN dnf --enablerepo=powertools install munge-devel -y
RUN yum install rng-tools -y
RUN yum install -y python3 gcc openssl openssl-devel \
RUN yum install -y munge munge-libs rng-tools \
python3 gcc openssl openssl-devel \
openssh-server openssh-clients dbus-devel \
pam-devel numactl numactl-devel hwloc sudo \
lua readline-devel ncurses-devel man2html \
libibmad libibumad rpm-build perl-ExtUtils-MakeMaker.noarch rpm-build make wget
RUN dnf --enablerepo=powertools install rrdtool-devel lua-devel hwloc-devel rpm-build -y
RUN dnf install mariadb-server mariadb-devel -y
RUN mkdir /usr/local/slurm-tmp
RUN cd /usr/local/slurm-tmp
RUN wget https://download.schedmd.com/slurm/slurm-${SLURM_VERSION}.tar.bz2
RUN rpmbuild -ta slurm-${SLURM_VERSION}.tar.bz2
RUN dnf --enablerepo=powertools install -y munge-devel rrdtool-devel lua-devel hwloc-devel mariadb-server mariadb-devel
WORKDIR /root/rpmbuild/RPMS/${ARCH}
RUN yum -y --nogpgcheck localinstall \
slurm-${SLURM_VERSION}-1.el8.${ARCH}.rpm \
slurm-perlapi-${SLURM_VERSION}-1.el8.${ARCH}.rpm \
slurm-slurmctld-${SLURM_VERSION}-1.el8.${ARCH}.rpm
WORKDIR /
RUN mkdir -p /usr/local/slurm-tmp \
&& cd /usr/local/slurm-tmp \
&& wget https://download.schedmd.com/slurm/slurm-${SLURM_VERSION}.tar.bz2 \
&& rpmbuild -ta slurm-${SLURM_VERSION}.tar.bz2
RUN ARCH=$(uname -m) \
&& yum -y --nogpgcheck localinstall \
/root/rpmbuild/RPMS/$ARCH/slurm-${SLURM_VERSION}-1.el8.$ARCH.rpm \
/root/rpmbuild/RPMS/$ARCH/slurm-perlapi-${SLURM_VERSION}-1.el8.$ARCH.rpm \
/root/rpmbuild/RPMS/$ARCH/slurm-slurmctld-${SLURM_VERSION}-1.el8.$ARCH.rpm
VOLUME ["/home", "/.secret"]
# 22: SSH

View File

@ -1,6 +1,9 @@
#!/usr/bin/env bash
set -e
# Determine the system architecture dynamically
ARCH=$(uname -m)
# start sshd server
_sshd_host() {
if [ ! -d /var/run/sshd ]; then
@ -70,12 +73,12 @@ _copy_secrets() {
# run slurmctld
_slurmctld() {
cd /root/rpmbuild/RPMS/aarch64
yum -y --nogpgcheck localinstall slurm-22.05.6-1.el8.aarch64.rpm \
slurm-perlapi-22.05.6-1.el8.aarch64.rpm \
slurm-slurmd-22.05.6-1.el8.aarch64.rpm \
slurm-torque-22.05.6-1.el8.aarch64.rpm \
slurm-slurmctld-22.05.6-1.el8.aarch64.rpm
cd /root/rpmbuild/RPMS/$ARCH
yum -y --nogpgcheck localinstall slurm-22.05.6-1.el8.$ARCH.rpm \
slurm-perlapi-22.05.6-1.el8.$ARCH.rpm \
slurm-slurmd-22.05.6-1.el8.$ARCH.rpm \
slurm-torque-22.05.6-1.el8.$ARCH.rpm \
slurm-slurmctld-22.05.6-1.el8.$ARCH.rpm
echo "checking for slurmdbd.conf"
while [ ! -f /.secret/slurmdbd.conf ]; do
echo -n "."

View File

@ -1,6 +1,9 @@
#!/usr/bin/env bash
set -e
# Determine the system architecture dynamically
ARCH=$(uname -m)
SLURM_ACCT_DB_SQL=/slurm_acct_db.sql
# start sshd server
@ -48,10 +51,10 @@ _wait_for_worker() {
# run slurmdbd
_slurmdbd() {
cd /root/rpmbuild/RPMS/aarch64
yum -y --nogpgcheck localinstall slurm-22.05.6-1.el8.aarch64.rpm \
slurm-perlapi-22.05.6-1.el8.aarch64.rpm \
slurm-slurmdbd-22.05.6-1.el8.aarch64.rpm
cd /root/rpmbuild/RPMS/$ARCH
yum -y --nogpgcheck localinstall slurm-22.05.6-1.el8.$ARCH.rpm \
slurm-perlapi-22.05.6-1.el8.$ARCH.rpm \
slurm-slurmdbd-22.05.6-1.el8.$ARCH.rpm
mkdir -p /var/spool/slurm/d /var/log/slurm /etc/slurm
chown slurm: /var/spool/slurm/d /var/log/slurm
if [[ ! -f /home/config/slurmdbd.conf ]]; then

View File

@ -1,6 +1,9 @@
#!/usr/bin/env bash
set -e
# Determine the system architecture dynamically
ARCH=$(uname -m)
# start sshd server
_sshd_host() {
if [ ! -d /var/run/sshd ]; then
@ -68,13 +71,13 @@ _copy_secrets() {
# run slurmctld
_slurmctld() {
cd /root/rpmbuild/RPMS/aarch64
yum -y --nogpgcheck localinstall slurm-22.05.6-1.el8.aarch64.rpm \
slurm-perlapi-22.05.6-1.el8.aarch64.rpm \
slurm-slurmd-22.05.6-1.el8.aarch64.rpm \
slurm-torque-22.05.6-1.el8.aarch64.rpm \
slurm-slurmctld-22.05.6-1.el8.aarch64.rpm \
slurm-slurmrestd-22.05.6-1.el8.aarch64.rpm
cd /root/rpmbuild/RPMS/$ARCH
yum -y --nogpgcheck localinstall slurm-22.05.6-1.el8.$ARCH.rpm \
slurm-perlapi-22.05.6-1.el8.$ARCH.rpm \
slurm-slurmd-22.05.6-1.el8.$ARCH.rpm \
slurm-torque-22.05.6-1.el8.$ARCH.rpm \
slurm-slurmctld-22.05.6-1.el8.$ARCH.rpm \
slurm-slurmrestd-22.05.6-1.el8.$ARCH.rpm
echo -n "checking for slurmdbd.conf"
while [ ! -f /.secret/slurmdbd.conf ]; do
echo -n "."

View File

@ -1,6 +1,9 @@
#!/usr/bin/env bash
set -e
# Determine the system architecture dynamically
ARCH=$(uname -m)
# start sshd server
_sshd_host() {
if [ ! -d /var/run/sshd ]; then
@ -50,11 +53,11 @@ _start_dbus() {
# run slurmd
_slurmd() {
cd /root/rpmbuild/RPMS/aarch64
yum -y --nogpgcheck localinstall slurm-22.05.6-1.el8.aarch64.rpm \
slurm-perlapi-22.05.6-1.el8.aarch64.rpm \
slurm-slurmd-22.05.6-1.el8.aarch64.rpm \
slurm-torque-22.05.6-1.el8.aarch64.rpm
cd /root/rpmbuild/RPMS/$ARCH
yum -y --nogpgcheck localinstall slurm-22.05.6-1.el8.$ARCH.rpm \
slurm-perlapi-22.05.6-1.el8.$ARCH.rpm \
slurm-slurmd-22.05.6-1.el8.$ARCH.rpm \
slurm-torque-22.05.6-1.el8.$ARCH.rpm
if [ ! -f /.secret/slurm.conf ]; then
echo -n "checking for slurm.conf"
while [ ! -f /.secret/slurm.conf ]; do