File size: 1,541 Bytes
0fad117
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
cd $(cd "$(dirname "$0")";pwd); source gpu_utility.sh

##### setup
#!/bin/bash
file_name=$(basename $0)
current_path=$(pwd)
cd /data/yixin/workspace/unl-graph-usenix
source activate /data/yixin/anaconda/unlg
datasets=("IMDB-BINARY"  "MUTAG" "ENZYMES" "IMDB-MULTI"  )
models=( "gcn" "gin"  "sage" )
entity="mib-nlp"
exp_name="adv-run-v3"
batch_size=8
methods=( "clean" "rand"  "feat" "grad" "inject"  "adv")
wd=1e-5
adv_train_budgets=( 0.07 0.09 0.11 )
gen_exp_name="main-results-v2"
lr=0.01
es_patience=40
seed_default=0
optimizer="adam"
budget=0.05
total_epoch=300
max_steps=5000
seeds=("402")
# mkdir $current_path/logs/ if not exist
mkdir -p $current_path/logs/
mkdir -p $current_path/logs/$exp_name
#####



##### loop
for adv_train_budget in "${adv_train_budgets[@]}"; do
for dataset in "${datasets[@]}"; do
for model in "${models[@]}"; do
for method in "${methods[@]}"; do
##### 

    
update_device_idx;


command="""
    comb_command="for seed in ${seeds[@]} ; do nohup python eval.py --dataset $dataset --model ${model} --method ${method} --lr $lr --exp_name $exp_name --entity $entity  --batch_size $batch_size --seed \$seed --early_stop --num_epochs $total_epoch --wd $wd --device $device --es_patience $es_patience --optimizer $optimizer --max_steps $max_steps --adv_train --adv_train_budget $adv_train_budget --gen_exp_name $gen_exp_name > $current_path/logs/$exp_name/$dataset.$model.$method-\$seed-$RANDOM$RANDOM.log 2>&1 ; done; "
    eval $comb_command & 
    
    """
eval $command


##### 

##### 
done;
done;
done;
done;