haga/run.sbatch

32 lines
1.3 KiB
Plaintext
Raw Normal View History

2024-04-21 14:54:11 +02:00
#!/usr/bin/env bash
2024-05-21 20:18:42 +02:00
# test this timing, it scales with result sizes
2024-05-09 12:18:08 +02:00
#SBATCH --time=12:00:00
2024-04-21 14:54:11 +02:00
#SBATCH --partition=cpu
2024-05-21 20:18:42 +02:00
# 30 Runs * 9 Experiments * 3 Datasets
#SBATCH --array=0-809
# ensure output exists, is a folder and is writable in your working directory
2024-05-09 13:40:33 +02:00
#SBATCH --output=./output/output_run_%a.txt
#SBATCH --error=./output/error_run_%a.txt
2024-05-21 20:18:42 +02:00
# exclude nodes with weaker CPUs
2024-05-09 13:05:07 +02:00
#SBATCH --exclude=oc222
2024-05-21 20:18:42 +02:00
# test memory usage, it scales **Exponentially** with max Depth. Implement some countermeasures if that's a problem, e.g. raise max depth over time.
2024-05-09 12:18:08 +02:00
#SBATCH --mem=6G
2024-05-09 13:00:03 +02:00
#SBATCH --nodes=1
2024-05-21 20:18:42 +02:00
# list your branches
2024-05-09 12:18:08 +02:00
problems=("iris" "nurse" "german")
2024-05-21 20:18:42 +02:00
# 30 Runs * 9 Experiments
2024-05-09 12:18:08 +02:00
current_problem=${problems[(${SLURM_ARRAY_TASK_ID}/270)]}
2024-05-21 20:18:42 +02:00
# 30 Runs, 9 Experiments
2024-05-09 12:18:08 +02:00
current_variant=$(((${SLURM_ARRAY_TASK_ID} / 30) % 9 + 1))
current_branch="${current_problem}_${current_variant}"
2024-05-21 20:18:42 +02:00
# ensure [full path to writable folder on node *] exists
git clone -b $current_branch --single-branch "[your git repo]" [full path to writable folder on node 1]/$current_branch
git clone -b $current_branch --single-branch "[your git repo]" [full path to writable folder on node 2]/$current_branch
#... for every node
2024-05-09 12:18:08 +02:00
srun bash -c "cd /data/$SLURMD_NODENAME/merljoha/$current_branch; nix develop --command stack --no-nix --system-ghc --no-install-ghc run haga-lambda"