Compare commits
17 Commits
Author | SHA1 | Date | |
---|---|---|---|
|
e3e961262c | ||
|
bb755c5495 | ||
|
45bceb9deb | ||
|
33efb0757d | ||
|
a4cdfd032f | ||
|
ddbd2e67f2 | ||
|
63edd8f4eb | ||
|
8a20109c57 | ||
|
34b4ac0ffc | ||
|
7eb4208ee5 | ||
|
86c1e4616b | ||
|
9d1c93ac94 | ||
|
892f649c53 | ||
|
e397cad784 | ||
|
c6de876e2d | ||
|
155bc888bf | ||
|
137aaf81f4 |
27
OnRunning.md
Normal file
27
OnRunning.md
Normal file
|
@ -0,0 +1,27 @@
|
||||||
|
# Running Experiments with Lambda:
|
||||||
|
This is not supposed to be a instruction on how to do it properly, but it is a writeup on how i did it.
|
||||||
|
If you want to do it properly, extend the command line Arguments for haga-lambda and allow runtime tweaking of Hyperparams and Datasets. While at it, generalizing LamdaCalculusV1 would be smart, too. You can use LamdaCalculusV2 as a template on how to do it more properly. (I wrote that later, and was IMO quite a bit smarter about it. I sadly didn't have time to fix up V1...)
|
||||||
|
|
||||||
|
You just want to do the same hack i did or know about it?
|
||||||
|
|
||||||
|
create a branch for each Dataset-experiment pair. e.g. iris_1 ... iris_9
|
||||||
|
|
||||||
|
here git is your friend, especially if you inevitably screw up.
|
||||||
|
e.g. echo git\ checkout\ iris_{1..9}\;\ git\ cherry-pick\ 7ced1e1\; will create a command for applying the commit 7ced1e1 to every iris branch.
|
||||||
|
|
||||||
|
Adapt the build.sbatch and run.sbatch and **commit them**!
|
||||||
|
clone the branch you committed to on the cluster.
|
||||||
|
create the required folders! If you forget the output one, slurm will fail silently!
|
||||||
|
|
||||||
|
Make sure to sbatch an adapted **build.sbatch before run.sbatch**!
|
||||||
|
build.sbatch will need to be adapted for and run on every node you will use!
|
||||||
|
Otherwise stuff WILL break!
|
||||||
|
|
||||||
|
sbatch run.sbatch
|
||||||
|
|
||||||
|
You can use squeue to monitor progress.
|
||||||
|
|
||||||
|
A huge slew of raw data will be dumped into the output Folder. The error files contain results, the output files stats during training.
|
||||||
|
|
||||||
|
On how to process these results, see: https://merl.dnshome.de/git/Hans/haga-graphics
|
||||||
|
|
29
build.sbatch
29
build.sbatch
|
@ -1,9 +1,28 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#SBATCH --time=00:10:00
|
#SBATCH --time=00:10:00
|
||||||
#SBATCH --partition=cpu
|
#SBATCH --partition=cpu
|
||||||
#SBATCH --output=./output/output_build.txt
|
# 9 Experiments * 3 Datasets
|
||||||
#SBATCH --error=./output/error_build.txt
|
#SBATCH --array=0-27
|
||||||
|
# ensure output exists, is a folder and is writable in your working directory
|
||||||
|
#SBATCH --output=./output/output_run_%a.txt
|
||||||
|
#SBATCH --error=./output/error_run_%a.txt
|
||||||
|
# run once for every node you plan to use
|
||||||
#SBATCH --nodelist=oc-compute02
|
#SBATCH --nodelist=oc-compute02
|
||||||
#SBATCH --mem=4G
|
#SBATCH --mem=2G
|
||||||
#SBATCH -c16
|
|
||||||
srun nix develop --command stack --no-nix --system-ghc --no-install-ghc build
|
|
||||||
|
# list your branches
|
||||||
|
problems=("iris" "nurse" "german")
|
||||||
|
|
||||||
|
#9 Experiments
|
||||||
|
current_problem=${problems[(${SLURM_ARRAY_TASK_ID}/9)]}
|
||||||
|
#9 Experiments
|
||||||
|
current_variant=$(((${SLURM_ARRAY_TASK_ID}) % 9 + 1))
|
||||||
|
current_branch="${current_problem}_${current_variant}"
|
||||||
|
|
||||||
|
# ensure [full path to writable folder on node *] exists
|
||||||
|
git clone -b $current_branch --single-branch "[your git repo]" [full path to writable folder on node 1]/$current_branch
|
||||||
|
git clone -b $current_branch --single-branch "[your git repo]" [full path to writable folder on node 1]/$current_branch
|
||||||
|
#... for every node
|
||||||
|
|
||||||
|
srun bash -c "cd /data/$SLURMD_NODENAME/merljoha/$current_branch; nix develop --command stack --no-nix --system-ghc --no-install-ghc build"
|
||||||
|
|
|
@ -38,7 +38,7 @@ lE =
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Int -> Int -> Int))), ["(+)", "(-)", "(*)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Int -> Int -> Int))), ["(+)", "(-)", "(*)"]),
|
||||||
-- Logic
|
-- Logic
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> Bool -> Bool))), ["(&&)", "(||)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> Bool -> Bool))), ["(&&)", "(||)"]),
|
||||||
-- Ordered Enums
|
-- Ordered
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Int -> Int -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Int -> Int -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(AccountStatus -> AccountStatus -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(AccountStatus -> AccountStatus -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(CreditHistory -> CreditHistory -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(CreditHistory -> CreditHistory -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
||||||
|
@ -46,7 +46,7 @@ lE =
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(EmploymentStatus -> EmploymentStatus -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(EmploymentStatus -> EmploymentStatus -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(OtherDebtors -> OtherDebtors -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(OtherDebtors -> OtherDebtors -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Job -> Job -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Job -> Job -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
||||||
-- Eq Enum
|
-- Eq
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(GermanClass -> GermanClass -> Bool))), ["(==)", "(/=)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(GermanClass -> GermanClass -> Bool))), ["(==)", "(/=)"]),
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Purpose -> Purpose -> Bool))), ["(==)", "(/=)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Purpose -> Purpose -> Bool))), ["(==)", "(/=)"]),
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(StatusAndSex -> StatusAndSex -> Bool))), ["(==)", "(/=)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(StatusAndSex -> StatusAndSex -> Bool))), ["(==)", "(/=)"]),
|
||||||
|
@ -55,7 +55,7 @@ lE =
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Housing -> Housing -> Bool))), ["(==)", "(/=)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Housing -> Housing -> Bool))), ["(==)", "(/=)"]),
|
||||||
-- Any Type
|
-- Any Type
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> Int -> Int -> Int))), ["if'"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> Int -> Int -> Int))), ["if'"]),
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> GermanClass -> GermanClass -> GermanClass))), ["if'","if'","if'","if'","if'","if'","if'","if'"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> GermanClass -> GermanClass -> GermanClass))), ["if'"]),
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> AccountStatus -> AccountStatus -> AccountStatus))), ["if'"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> AccountStatus -> AccountStatus -> AccountStatus))), ["if'"]),
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> CreditHistory -> CreditHistory -> CreditHistory))), ["if'"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> CreditHistory -> CreditHistory -> CreditHistory))), ["if'"]),
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> Purpose -> Purpose -> Purpose))), ["if'"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> Purpose -> Purpose -> Purpose))), ["if'"]),
|
||||||
|
@ -86,13 +86,13 @@ lE =
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Job))), [(fmap show (enumUniform UnemployedOrUnskilledNonResident HighlySkilled ))])
|
((Ref.SomeTypeRep (Ref.TypeRep @(Job))), [(fmap show (enumUniform UnemployedOrUnskilledNonResident HighlySkilled ))])
|
||||||
],
|
],
|
||||||
targetType = (Ref.SomeTypeRep (Ref.TypeRep @(AccountStatus -> Int -> CreditHistory -> Purpose -> Int -> Savings -> EmploymentStatus -> Int -> StatusAndSex -> OtherDebtors -> Int -> Property -> Int -> OtherPlans -> Housing -> Int -> Job -> Int -> Bool -> Bool -> GermanClass))),
|
targetType = (Ref.SomeTypeRep (Ref.TypeRep @(AccountStatus -> Int -> CreditHistory -> Purpose -> Int -> Savings -> EmploymentStatus -> Int -> StatusAndSex -> OtherDebtors -> Int -> Property -> Int -> OtherPlans -> Housing -> Int -> Job -> Int -> Bool -> Bool -> GermanClass))),
|
||||||
maxDepth = 8,
|
maxDepth = 9,
|
||||||
weights =
|
weights =
|
||||||
ExpressionWeights
|
ExpressionWeights
|
||||||
{ lambdaSpucker = 1,
|
{ lambdaSpucker = 0,
|
||||||
lambdaSchlucker = 2,
|
lambdaSchlucker = 10,
|
||||||
symbol = 30,
|
symbol = 100,
|
||||||
variable = 10,
|
variable = 5,
|
||||||
constant = 5
|
constant = 5
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -34,12 +34,17 @@ lE =
|
||||||
LambdaEnviroment
|
LambdaEnviroment
|
||||||
{ functions =
|
{ functions =
|
||||||
Map.fromList
|
Map.fromList
|
||||||
[ ((Ref.SomeTypeRep (Ref.TypeRep @(Float -> Float -> Float))), ["(+)", "(-)", "(*)"]),
|
[ -- Math
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Float -> Float -> Bool))), ["(>)", "(==)", "(>=)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Float -> Float -> Float))), ["(+)", "(-)", "(*)"]),
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(IrisClass -> IrisClass -> Bool))), ["(==)"]),
|
-- Logic
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> Float -> Float -> Float))), ["if'","if'","if'","if'","if'","if'","if'","if'"]),
|
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> Bool -> Bool))), ["(&&)", "(||)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> Bool -> Bool))), ["(&&)", "(||)"]),
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> IrisClass -> IrisClass -> IrisClass))), ["if'","if'","if'","if'","if'","if'","if'","if'","if'","if'"])
|
-- Ordered
|
||||||
|
((Ref.SomeTypeRep (Ref.TypeRep @(Float -> Float -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
||||||
|
-- Eq
|
||||||
|
((Ref.SomeTypeRep (Ref.TypeRep @(IrisClass -> IrisClass -> Bool))), ["(==)","(/=)"]),
|
||||||
|
-- Any Type
|
||||||
|
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> Float -> Float -> Float))), ["if'"]),
|
||||||
|
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> IrisClass -> IrisClass -> IrisClass))), ["if'"])
|
||||||
],
|
],
|
||||||
constants =
|
constants =
|
||||||
Map.fromList
|
Map.fromList
|
||||||
|
@ -48,13 +53,13 @@ lE =
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(IrisClass))), [(fmap show (enumUniform Setosa Versicolor :: RVar IrisClass))])
|
((Ref.SomeTypeRep (Ref.TypeRep @(IrisClass))), [(fmap show (enumUniform Setosa Versicolor :: RVar IrisClass))])
|
||||||
],
|
],
|
||||||
targetType = (Ref.SomeTypeRep (Ref.TypeRep @(Float -> Float -> Float -> Float -> IrisClass))),
|
targetType = (Ref.SomeTypeRep (Ref.TypeRep @(Float -> Float -> Float -> Float -> IrisClass))),
|
||||||
maxDepth = 10,
|
maxDepth = 9,
|
||||||
weights =
|
weights =
|
||||||
ExpressionWeights
|
ExpressionWeights
|
||||||
{ lambdaSpucker = 1,
|
{ lambdaSpucker = 0,
|
||||||
lambdaSchlucker = 1,
|
lambdaSchlucker = 10,
|
||||||
symbol = 30,
|
symbol = 100,
|
||||||
variable = 100,
|
variable = 5,
|
||||||
constant = 5
|
constant = 5
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
@ -63,7 +68,7 @@ lEE :: LamdaExecutionEnv
|
||||||
lEE =
|
lEE =
|
||||||
LamdaExecutionEnv
|
LamdaExecutionEnv
|
||||||
{ -- For now these need to define all available functions and types. Generic functions can be used.
|
{ -- For now these need to define all available functions and types. Generic functions can be used.
|
||||||
imports = ["LambdaDatasets.IrisDataset"],
|
imports = ["LambdaDatasets.IrisDefinition"],
|
||||||
training = True,
|
training = True,
|
||||||
trainingData =
|
trainingData =
|
||||||
( map fst (takeFraktion 0.8 irisTrainingData),
|
( map fst (takeFraktion 0.8 irisTrainingData),
|
||||||
|
@ -84,7 +89,7 @@ shuffledLEE = do
|
||||||
itD <- smpl $ shuffle irisTrainingData
|
itD <- smpl $ shuffle irisTrainingData
|
||||||
return LamdaExecutionEnv
|
return LamdaExecutionEnv
|
||||||
{ -- For now these need to define all available functions and types. Generic functions can be used.
|
{ -- For now these need to define all available functions and types. Generic functions can be used.
|
||||||
imports = ["LambdaDatasets.IrisDataset"],
|
imports = ["LambdaDatasets.IrisDefinition"],
|
||||||
training = True,
|
training = True,
|
||||||
trainingData =
|
trainingData =
|
||||||
( map fst (takeFraktion 0.8 itD),
|
( map fst (takeFraktion 0.8 itD),
|
||||||
|
|
|
@ -37,7 +37,7 @@ lE =
|
||||||
[ -- Math
|
[ -- Math
|
||||||
-- Logic
|
-- Logic
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> Bool -> Bool))), ["(&&)", "(||)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> Bool -> Bool))), ["(&&)", "(||)"]),
|
||||||
-- Ordered Enums
|
-- Ordered
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(NurseryClass -> NurseryClass -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(NurseryClass -> NurseryClass -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Parents -> Parents -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Parents -> Parents -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(HasNurs -> HasNurs -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(HasNurs -> HasNurs -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
||||||
|
@ -47,7 +47,7 @@ lE =
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Finance -> Finance -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Finance -> Finance -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Social -> Social -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Social -> Social -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Health -> Health -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Health -> Health -> Bool))), ["(>)", "(==)", "(/=)", "(>=)"]),
|
||||||
-- Eq Enum
|
-- Eq
|
||||||
-- Any Type
|
-- Any Type
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> Int -> Int -> Int))), ["if'"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> Int -> Int -> Int))), ["if'"]),
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> NurseryClass -> NurseryClass -> NurseryClass))), ["if'","if'","if'","if'","if'","if'","if'","if'"]),
|
((Ref.SomeTypeRep (Ref.TypeRep @(Bool -> NurseryClass -> NurseryClass -> NurseryClass))), ["if'","if'","if'","if'","if'","if'","if'","if'"]),
|
||||||
|
@ -74,13 +74,13 @@ lE =
|
||||||
((Ref.SomeTypeRep (Ref.TypeRep @(Health))), [(fmap show (enumUniform NotRecommendHealth PriorityHealth ))])
|
((Ref.SomeTypeRep (Ref.TypeRep @(Health))), [(fmap show (enumUniform NotRecommendHealth PriorityHealth ))])
|
||||||
],
|
],
|
||||||
targetType = (Ref.SomeTypeRep (Ref.TypeRep @(Parents -> HasNurs -> Form -> Children -> Housing -> Finance -> Social -> Health -> NurseryClass))),
|
targetType = (Ref.SomeTypeRep (Ref.TypeRep @(Parents -> HasNurs -> Form -> Children -> Housing -> Finance -> Social -> Health -> NurseryClass))),
|
||||||
maxDepth = 8,
|
maxDepth = 9,
|
||||||
weights =
|
weights =
|
||||||
ExpressionWeights
|
ExpressionWeights
|
||||||
{ lambdaSpucker = 1,
|
{ lambdaSpucker = 0,
|
||||||
lambdaSchlucker = 2,
|
lambdaSchlucker = 10,
|
||||||
symbol = 30,
|
symbol = 100,
|
||||||
variable = 20,
|
variable = 5,
|
||||||
constant = 5
|
constant = 5
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
|
@ -8,8 +8,8 @@ import Pipes
|
||||||
import Pretty
|
import Pretty
|
||||||
import Protolude hiding (for)
|
import Protolude hiding (for)
|
||||||
import System.IO
|
import System.IO
|
||||||
-- import LambdaDatasets.IrisDataset
|
import LambdaDatasets.IrisDataset
|
||||||
import LambdaDatasets.NurseryDataset
|
-- import LambdaDatasets.NurseryDataset
|
||||||
-- import LambdaDatasets.GermanDataset
|
-- import LambdaDatasets.GermanDataset
|
||||||
import Debug.Trace as DB
|
import Debug.Trace as DB
|
||||||
import qualified Data.Map.Strict as Map
|
import qualified Data.Map.Strict as Map
|
||||||
|
@ -35,7 +35,7 @@ options =
|
||||||
( long "population-size"
|
( long "population-size"
|
||||||
<> short 'p'
|
<> short 'p'
|
||||||
<> metavar "N"
|
<> metavar "N"
|
||||||
<> value 400
|
<> value 100
|
||||||
<> help "Population size"
|
<> help "Population size"
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -59,7 +59,7 @@ main =
|
||||||
selectionType = Tournament 3,
|
selectionType = Tournament 3,
|
||||||
termination = (steps (iterations opts)),
|
termination = (steps (iterations opts)),
|
||||||
poulationSize = (populationSize opts),
|
poulationSize = (populationSize opts),
|
||||||
stepSize = 120,
|
stepSize = 90,
|
||||||
elitismRatio = 5/100
|
elitismRatio = 5/100
|
||||||
}
|
}
|
||||||
pop' <- runEffect (for (run cfg) logCsv)
|
pop' <- runEffect (for (run cfg) logCsv)
|
||||||
|
|
36
run.sbatch
36
run.sbatch
|
@ -1,9 +1,31 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
#SBATCH --time=18:00:00
|
# test this timing, it scales with result sizes
|
||||||
|
#SBATCH --time=12:00:00
|
||||||
#SBATCH --partition=cpu
|
#SBATCH --partition=cpu
|
||||||
#SBATCH --array=0-30
|
# 30 Runs * 9 Experiments * 3 Datasets
|
||||||
#SBATCH --output=./output/output_run_%j.txt
|
#SBATCH --array=0-809
|
||||||
#SBATCH --error=./output/error_run_%j.txt
|
# ensure output exists, is a folder and is writable in your working directory
|
||||||
#SBATCH --nodelist=oc-compute02
|
#SBATCH --output=./output/output_run_%a.txt
|
||||||
#SBATCH --mem=3G
|
#SBATCH --error=./output/error_run_%a.txt
|
||||||
srun nix develop --command stack --no-nix --system-ghc --no-install-ghc run haga-lambda
|
# exclude nodes with weaker CPUs
|
||||||
|
#SBATCH --exclude=oc222
|
||||||
|
# test memory usage, it scales **Exponentially** with max Depth. Implement some countermeasures if that's a problem, e.g. raise max depth over time.
|
||||||
|
#SBATCH --mem=6G
|
||||||
|
#SBATCH --nodes=1
|
||||||
|
|
||||||
|
|
||||||
|
# list your branches
|
||||||
|
problems=("iris" "nurse" "german")
|
||||||
|
|
||||||
|
# 30 Runs * 9 Experiments
|
||||||
|
current_problem=${problems[(${SLURM_ARRAY_TASK_ID}/270)]}
|
||||||
|
# 30 Runs, 9 Experiments
|
||||||
|
current_variant=$(((${SLURM_ARRAY_TASK_ID} / 30) % 9 + 1))
|
||||||
|
current_branch="${current_problem}_${current_variant}"
|
||||||
|
|
||||||
|
# ensure [full path to writable folder on node *] exists
|
||||||
|
git clone -b $current_branch --single-branch "[your git repo]" [full path to writable folder on node 1]/$current_branch
|
||||||
|
git clone -b $current_branch --single-branch "[your git repo]" [full path to writable folder on node 2]/$current_branch
|
||||||
|
#... for every node
|
||||||
|
|
||||||
|
srun bash -c "cd /data/$SLURMD_NODENAME/merljoha/$current_branch; nix develop --command stack --no-nix --system-ghc --no-install-ghc run haga-lambda"
|
||||||
|
|
Loading…
Reference in New Issue
Block a user