-
Notifications
You must be signed in to change notification settings - Fork 0
/
Copy pathsubmit.py
80 lines (72 loc) · 2.48 KB
/
submit.py
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
48
49
50
51
52
53
54
55
56
57
58
59
60
61
62
63
64
65
66
67
68
69
70
71
72
73
74
75
76
77
78
79
80
import os
import sys
import argparse
import numpy as np
from math import ceil
from utils.submitter import Submitter
def make_dir(dir):
if not os.path.exists(dir):
os.makedirs(dir, exist_ok=True)
def main(argv):
# python submit.py --job_type S
# python submit.py --job_type M
parser = argparse.ArgumentParser(description="Submit jobs")
parser.add_argument('--job_type', type=str, default='M', help='Run single (S) or multiple (M) jobs in one experiment: S, M')
args = parser.parse_args()
sbatch_cfg = {
# Account name
# 'account': 'def-ashique',
'account': 'rrg-ashique',
# Job name
'job-name': 'mujoco_sac', # 1/2GPU, 1CPU, 8G, 75min
'job-name': 'mujoco_sac', # 1GPU, 1CPU, 8G, 50min
# Job time
'time': '0-01:15:00',
# Email notification
'mail-user': '[email protected]'
}
general_cfg = {
# User name
'user': 'qlan3',
# Check time interval in minutes
'check-time-interval': 5,
# Clusters info: name & capacity
'cluster_capacity': 996,
# Job indexes list
# 'job-list': np.array([1,2])
'job-list': np.array(range(1, 60+1))
}
make_dir(f"output/{sbatch_cfg['job-name']}")
if args.job_type == 'M':
# The number of total jobs for one task
jobs_per_task = 2
# Max number of parallel jobs in one task
max_parallel_jobs = 2
mem_per_job = 8 # in GB
cpu_per_job = 1 # Larger cpus_per_job increases speed
mem_per_cpu = int(ceil(max_parallel_jobs*mem_per_job/cpu_per_job))
# Write to procfile for Parallel
with open('procfile', 'w') as f:
f.write(str(max_parallel_jobs))
sbatch_cfg['gres'] = 'gpu:1' # GPU type
sbatch_cfg['cpus-per-task'] = cpu_per_job*max_parallel_jobs
sbatch_cfg['mem-per-cpu'] = f'{mem_per_cpu}G' # Memory
# Sbatch script path
general_cfg['script-path'] = './sbatch_m.sh'
# Max number of jobs for Parallel
general_cfg['jobs-per-task'] = jobs_per_task
submitter = Submitter(general_cfg, sbatch_cfg)
submitter.multiple_submit()
elif args.job_type == 'S':
mem_per_job = 8 # in GB
cpu_per_job = 1 # Larger cpus_per_job increases speed
mem_per_cpu = int(ceil(mem_per_job/cpu_per_job))
sbatch_cfg['gres'] = 'gpu:1' # GPU type
sbatch_cfg['cpus-per-task'] = cpu_per_job
sbatch_cfg['mem-per-cpu'] = f'{mem_per_cpu}G' # Memory
# Sbatch script path
general_cfg['script-path'] = './sbatch_s.sh'
submitter = Submitter(general_cfg, sbatch_cfg)
submitter.single_submit()
if __name__=='__main__':
main(sys.argv)