Slurm for GPAW
script1
1#!/bin/bash
2#SBATCH -J al201-gs
3#SBATCH -o %J.out
4#SBATCH -e %J.err
5#SBATCH -N 1
6#SBATCH -w node01
7#SBATCH -n 16
8#SBATCH -t 999:0:0
9#SBATCH -p debug01
10
11export OMP_NUM_THREADS=1
12
13echo Time is `date`
14echo Working dir is $SLURM_SUBMIT_DIR #$SLURM_SUBMIT_DIR:作业提交目录
15cd $SLURM_SUBMIT_DIR
16
17NPROCS=$((SLURM_NTASKS)) #$SLURM_NTASKS:总任务数
18N_NODE=$((SLURM_JOB_NUM_NODES)) #$SLURM_JOB_NUM_NODES:节点数
19
20echo This job has allocated ${N_NODE} nodes with ${NPROCS} processors
21echo Running on host `hostname`
22echo This jobs runs on the following processors:
23echo "**Start"
24
25echo $SLURM_SUBMIT_DIR > work.txt
26echo $SLURM_NODELIST > nodefile.txt #$SLURM_NODELIST:节点列表
27echo $NPROCS > cpucore.txt
28echo $N_NODE > node.txt
29
30export STARTTIME=`date +%s.%3N`
31export WALLTIME=`squeue -h -o %l -j $SLURM_JOB_ID`
32
33source ~/.bashrc
34source ~/run_software_bashrc/before_run_gpaw22.8.0.sh
35
36#srun gpaw -P python gs.py
37mpiexec -np ${NPROCS} gpaw python gs.py
38
39echo "**Finished!"
script2
1#!/bin/bash
2#SBATCH -J al201-gs
3#SBATCH -o %J.out
4#SBATCH -e %J.err
5#SBATCH -N 1
6#SBATCH -w node01
7#SBATCH -n 16
8#SBATCH -t 999:0:0
9#SBATCH -p debug01
10
11export OMP_NUM_THREADS=1
12
13echo Time is `date`
14echo Working dir is $SLURM_SUBMIT_DIR #$SLURM_SUBMIT_DIR:作业提交目录
15cd $SLURM_SUBMIT_DIR
16
17NPROCS=$((SLURM_NTASKS)) #$SLURM_NTASKS:总任务数
18N_NODE=$((SLURM_JOB_NUM_NODES)) #$SLURM_JOB_NUM_NODES:节点数
19
20echo This job has allocated ${N_NODE} nodes with ${NPROCS} processors
21echo Running on host `hostname`
22echo This jobs runs on the following processors:
23echo "**Start"
24
25echo $SLURM_SUBMIT_DIR > work.txt
26echo $SLURM_NODELIST > nodefile.txt #$SLURM_NODELIST:节点列表
27echo $NPROCS > cpucore.txt
28echo $N_NODE > node.txt
29
30export STARTTIME=`date +%s.%3N`
31export WALLTIME=`squeue -h -o %l -j $SLURM_JOB_ID`
32
33source ~/.bashrc
34source ~/run_software_bashrc/before_run_gpaw22.8.0.sh
35
36start_time=$(date +%s)
37
38# 开始第一个计算任务
39task_start_time=$(date +%s)
40# 执行第一个计算任务的命令
41mpiexec -np ${NPROCS} gpaw python timepropagation_calculate.py
42# 计算第一个计算任务的耗时
43task_duration=$(( $(date +%s) - $task_start_time ))
44echo "Task 1 duration: $task_duration seconds" > total_time.txt
45
46# 开始第二个计算任务
47task_start_time=$(date +%s)
48# 执行第二个计算任务的命令
49mpiexec -np ${NPROCS} gpaw python timepropagation_continue.py
50# 计算第二个计算任务的耗时
51task_duration=$(( $(date +%s) - $task_start_time ))
52echo "Task 2 duration: $task_duration seconds" > total_time.txt
53
54# 开始第二个计算任务
55task_start_time=$(date +%s)
56# 执行第二个计算任务的命令
57mpiexec -np ${NPROCS} gpaw python timepropagation_postprocess.py
58# 计算第二个计算任务的耗时
59task_duration=$(( $(date +%s) - $task_start_time ))
60echo "Task 3 duration: $task_duration seconds" > total_time.txt
61
62# 开始第二个计算任务
63task_start_time=$(date +%s)
64# 执行第二个计算任务的命令
65python timepropagation_plot.py
66# 计算第二个计算任务的耗时
67task_duration=$(( $(date +%s) - $task_start_time ))
68echo "Task 4 duration: $task_duration seconds" > total_time.txt
69
70end_time=$(date +%s)
71duration=$(( $end_time - $start_time ))
72echo "Total duration: $duration seconds" > total_time.txt
73echo "**Finished!"
script3
1#!/bin/bash
2#SBATCH -J gs-fd-Mg8
3#SBATCH -o %J.out
4#SBATCH -e %J.err
5#SBATCH -N 1
6#SBATCH -w node01
7#SBATCH -n 56
8#SBATCH -t 30-12:00:00
9#SBATCH -p hpc
10#=======================
11export OMP_NUM_THREADS=1
12#=======================
13echo =======================
14echo Submit Time is [`date`].
15echo Working dir is [$SLURM_SUBMIT_DIR].
16cd $SLURM_SUBMIT_DIR
17NPROCS=$((SLURM_NTASKS))
18N_NODE=$((SLURM_JOB_NUM_NODES))
19echo This job has allocated [${N_NODE}] nodes with [${NPROCS}] processors.
20echo Running on host [`hostname`].
21echo This jobs runs on the following processors:
22echo =======================
23echo "**Start!!!"
24echo Job start Time is [`date`].
25# echo "Submit Direction is "$SLURM_SUBMIT_DIR >> $SLURM_JOB_ID.out
26# echo "Submit NodeList is "$SLURM_NODELIST >> $SLURM_JOB_ID.out
27# echo "Submit CPU_Cores is "$NPROCS >> $SLURM_JOB_ID.out
28# echo "Submit N_Node is "$N_NODE >> $SLURM_JOB_ID.out
29echo This job name is [$SLURM_JOB_NAME] and job id is [$SLURM_JOB_ID]. >> $SLURM_JOB_ID.out
30#=======================
31# export STARTTIME=`date +%s.%3N` # start time
32# export WALLTIME=12:00:00 # wall time
33export STARTTIME=`date +%s.%3N`
34export WALLTIME=`squeue -h -o %l -j $SLURM_JOB_ID`
35source ~/.bashrc
36source /data1/soft_bashrc/before_run_gpaw22.8.0.sh
37
38start_time=$(date +%s)
39#1
40task_start_time=$(date +%s)
41mpiexec -np ${NPROCS} gpaw python gs.py
42# mpiexec -np ${NPROCS} gpaw python gsc.py
43task_duration=$(( $(date +%s) - $task_start_time ))
44echo "Task 1 duration: $task_duration seconds." >> $SLURM_JOB_ID.out
45end_time=$(date +%s)
46duration=$(( $end_time - $start_time ))
47echo "Total duration: $duration seconds." >> $SLURM_JOB_ID.out
48
49echo Job finish Time is [`date`].
50echo "**Finished!!!"
51echo =======================
52echo " "