JOB DETAILS FOR JOB 151476

captured Fri May 30 07:00:33 SAST 2025

MaxRAM=14.6G
MaxVMSize=0G
AveCPUFreq=0
AveDiskRead=79688852
AveDiskWrite=79964562

JobId=151476 JobName=fluxPar2022
   UserId=mnlgre001(291293653 G Maniel) GroupId=eresearch_hpc_users(1221947160) MCS_label=N/A
   Priority=7498 Nice=0 Account=biosci QOS=normal
   JobState=RUNNING Reason=None Dependency=(null)
   Requeue=1 Restarts=0 BatchFlag=1 Reboot=0 ExitCode=0:0
   DerivedExitCode=0:0
   RunTime=21:29:44 TimeLimit=2-00:00:00 TimeMin=N/A
   SubmitTime=2025-05-29T09:30:49 EligibleTime=2025-05-29T09:30:49
   AccrueTime=2025-05-29T09:30:49
   StartTime=2025-05-29T09:30:49 EndTime=2025-05-31T09:30:49 Deadline=N/A
   SuspendTime=None SecsPreSuspend=0 LastSchedEval=2025-05-29T09:30:49 Scheduler=Main
   Partition=ada AllocNode:Sid=srvrochpc001.uct.ac.za:597883
   ReqNodeList=(null) ExcNodeList=(null)
   NodeList=srvrochpc105
   BatchHost=srvrochpc105
   NumNodes=1 NumCPUs=10 NumTasks=10 CPUs/Task=1 ReqB:S:C:T=0:0:*:*
   ReqTRES=cpu=10,mem=80000M,node=1,billing=17
   AllocTRES=cpu=10,mem=80000M,node=1,billing=17
   Socks/Node=* NtasksPerN:B:S:C=0:0:*:* CoreSpec=*
   JOB_GRES=(null)
     Nodes=srvrochpc105 CPU_IDs=20-23,28-33 Mem=80000 GRES=
   MinCPUsNode=1 MinMemoryCPU=8000M MinTmpDiskNode=0
   Features=(null) DelayBoot=00:00:00
   OverSubscribe=OK Contiguous=0 Licenses=(null) Network=(null)
   Command=../09_autolimr_pipeline_par_foreach.sh
   WorkDir=/scratch/mnlgre001/networks_faseyic/slurm/slurm_out
   StdErr=/scratch/mnlgre001/networks_faseyic/slurm/slurm_out/slurm-151476.out
   StdIn=/dev/null
   StdOut=/scratch/mnlgre001/networks_faseyic/slurm/slurm_out/slurm-151476.out
   Power=
   

UCT HPC clusterJOB DETAILS FOR JOB 151476

captured Fri May 30 07:01:01 SAST 2025

JobId=151476 JobName=fluxPar2022
   UserId=mnlgre001(291293653 G Maniel) GroupId=eresearch_hpc_users(1221947160) MCS_label=N/A
   Priority=7498 Nice=0 Account=biosci QOS=normal
   JobState=COMPLETED Reason=None Dependency=(null)
   Requeue=1 Restarts=0 BatchFlag=1 Reboot=0 ExitCode=0:0
   DerivedExitCode=0:0
   RunTime=21:29:47 TimeLimit=2-00:00:00 TimeMin=N/A
   SubmitTime=2025-05-29T09:30:49 EligibleTime=2025-05-29T09:30:49
   AccrueTime=2025-05-29T09:30:49
   StartTime=2025-05-29T09:30:49 EndTime=2025-05-30T07:00:36 Deadline=N/A
   SuspendTime=None SecsPreSuspend=0 LastSchedEval=2025-05-29T09:30:49 Scheduler=Main
   Partition=ada AllocNode:Sid=srvrochpc001.uct.ac.za:597883
   ReqNodeList=(null) ExcNodeList=(null)
   NodeList=srvrochpc105
   BatchHost=srvrochpc105
   NumNodes=1 NumCPUs=10 NumTasks=10 CPUs/Task=1 ReqB:S:C:T=0:0:*:*
   ReqTRES=cpu=10,mem=80000M,node=1,billing=17
   AllocTRES=cpu=10,mem=80000M,node=1,billing=17
   Socks/Node=* NtasksPerN:B:S:C=0:0:*:* CoreSpec=*
   JOB_GRES=(null)
     Nodes=srvrochpc105 CPU_IDs=20-23,28-33 Mem=80000 GRES=
   MinCPUsNode=1 MinMemoryCPU=8000M MinTmpDiskNode=0
   Features=(null) DelayBoot=00:00:00
   OverSubscribe=OK Contiguous=0 Licenses=(null) Network=(null)
   Command=../09_autolimr_pipeline_par_foreach.sh
   WorkDir=/scratch/mnlgre001/networks_faseyic/slurm/slurm_out
   StdErr=/scratch/mnlgre001/networks_faseyic/slurm/slurm_out/slurm-151476.out
   StdIn=/dev/null
   StdOut=/scratch/mnlgre001/networks_faseyic/slurm/slurm_out/slurm-151476.out
   Power=
   

UCT HPC clusterJOB DETAILS FOR JOB 151476

captured Fri May 30 07:01:32 SAST 2025

JobId=151476 JobName=fluxPar2022
   UserId=mnlgre001(291293653 G Maniel) GroupId=eresearch_hpc_users(1221947160) MCS_label=N/A
   Priority=7498 Nice=0 Account=biosci QOS=normal
   JobState=COMPLETED Reason=None Dependency=(null)
   Requeue=1 Restarts=0 BatchFlag=1 Reboot=0 ExitCode=0:0
   DerivedExitCode=0:0
   RunTime=21:29:47 TimeLimit=2-00:00:00 TimeMin=N/A
   SubmitTime=2025-05-29T09:30:49 EligibleTime=2025-05-29T09:30:49
   AccrueTime=2025-05-29T09:30:49
   StartTime=2025-05-29T09:30:49 EndTime=2025-05-30T07:00:36 Deadline=N/A
   SuspendTime=None SecsPreSuspend=0 LastSchedEval=2025-05-29T09:30:49 Scheduler=Main
   Partition=ada AllocNode:Sid=srvrochpc001.uct.ac.za:597883
   ReqNodeList=(null) ExcNodeList=(null)
   NodeList=srvrochpc105
   BatchHost=srvrochpc105
   NumNodes=1 NumCPUs=10 NumTasks=10 CPUs/Task=1 ReqB:S:C:T=0:0:*:*
   ReqTRES=cpu=10,mem=80000M,node=1,billing=17
   AllocTRES=cpu=10,mem=80000M,node=1,billing=17
   Socks/Node=* NtasksPerN:B:S:C=0:0:*:* CoreSpec=*
   JOB_GRES=(null)
     Nodes=srvrochpc105 CPU_IDs=20-23,28-33 Mem=80000 GRES=
   MinCPUsNode=1 MinMemoryCPU=8000M MinTmpDiskNode=0
   Features=(null) DelayBoot=00:00:00
   OverSubscribe=OK Contiguous=0 Licenses=(null) Network=(null)
   Command=../09_autolimr_pipeline_par_foreach.sh
   WorkDir=/scratch/mnlgre001/networks_faseyic/slurm/slurm_out
   StdErr=/scratch/mnlgre001/networks_faseyic/slurm/slurm_out/slurm-151476.out
   StdIn=/dev/null
   StdOut=/scratch/mnlgre001/networks_faseyic/slurm/slurm_out/slurm-151476.out
   Power=
   

UCT HPC clusterJOB DETAILS FOR JOB 151476

captured Fri May 30 07:02:01 SAST 2025

JobId=151476 JobName=fluxPar2022
   UserId=mnlgre001(291293653 G Maniel) GroupId=eresearch_hpc_users(1221947160) MCS_label=N/A
   Priority=7498 Nice=0 Account=biosci QOS=normal
   JobState=COMPLETED Reason=None Dependency=(null)
   Requeue=1 Restarts=0 BatchFlag=1 Reboot=0 ExitCode=0:0
   DerivedExitCode=0:0
   RunTime=21:29:47 TimeLimit=2-00:00:00 TimeMin=N/A
   SubmitTime=2025-05-29T09:30:49 EligibleTime=2025-05-29T09:30:49
   AccrueTime=2025-05-29T09:30:49
   StartTime=2025-05-29T09:30:49 EndTime=2025-05-30T07:00:36 Deadline=N/A
   SuspendTime=None SecsPreSuspend=0 LastSchedEval=2025-05-29T09:30:49 Scheduler=Main
   Partition=ada AllocNode:Sid=srvrochpc001.uct.ac.za:597883
   ReqNodeList=(null) ExcNodeList=(null)
   NodeList=srvrochpc105
   BatchHost=srvrochpc105
   NumNodes=1 NumCPUs=10 NumTasks=10 CPUs/Task=1 ReqB:S:C:T=0:0:*:*
   ReqTRES=cpu=10,mem=80000M,node=1,billing=17
   AllocTRES=cpu=10,mem=80000M,node=1,billing=17
   Socks/Node=* NtasksPerN:B:S:C=0:0:*:* CoreSpec=*
   JOB_GRES=(null)
     Nodes=srvrochpc105 CPU_IDs=20-23,28-33 Mem=80000 GRES=
   MinCPUsNode=1 MinMemoryCPU=8000M MinTmpDiskNode=0
   Features=(null) DelayBoot=00:00:00
   OverSubscribe=OK Contiguous=0 Licenses=(null) Network=(null)
   Command=../09_autolimr_pipeline_par_foreach.sh
   WorkDir=/scratch/mnlgre001/networks_faseyic/slurm/slurm_out
   StdErr=/scratch/mnlgre001/networks_faseyic/slurm/slurm_out/slurm-151476.out
   StdIn=/dev/null
   StdOut=/scratch/mnlgre001/networks_faseyic/slurm/slurm_out/slurm-151476.out
   Power=
   

UCT HPC clusterJOB DETAILS FOR JOB 151476

captured Fri May 30 07:02:32 SAST 2025

JobId=151476 JobName=fluxPar2022
   UserId=mnlgre001(291293653 G Maniel) GroupId=eresearch_hpc_users(1221947160) MCS_label=N/A
   Priority=7498 Nice=0 Account=biosci QOS=normal
   JobState=COMPLETED Reason=None Dependency=(null)
   Requeue=1 Restarts=0 BatchFlag=1 Reboot=0 ExitCode=0:0
   DerivedExitCode=0:0
   RunTime=21:29:47 TimeLimit=2-00:00:00 TimeMin=N/A
   SubmitTime=2025-05-29T09:30:49 EligibleTime=2025-05-29T09:30:49
   AccrueTime=2025-05-29T09:30:49
   StartTime=2025-05-29T09:30:49 EndTime=2025-05-30T07:00:36 Deadline=N/A
   SuspendTime=None SecsPreSuspend=0 LastSchedEval=2025-05-29T09:30:49 Scheduler=Main
   Partition=ada AllocNode:Sid=srvrochpc001.uct.ac.za:597883
   ReqNodeList=(null) ExcNodeList=(null)
   NodeList=srvrochpc105
   BatchHost=srvrochpc105
   NumNodes=1 NumCPUs=10 NumTasks=10 CPUs/Task=1 ReqB:S:C:T=0:0:*:*
   ReqTRES=cpu=10,mem=80000M,node=1,billing=17
   AllocTRES=cpu=10,mem=80000M,node=1,billing=17
   Socks/Node=* NtasksPerN:B:S:C=0:0:*:* CoreSpec=*
   JOB_GRES=(null)
     Nodes=srvrochpc105 CPU_IDs=20-23,28-33 Mem=80000 GRES=
   MinCPUsNode=1 MinMemoryCPU=8000M MinTmpDiskNode=0
   Features=(null) DelayBoot=00:00:00
   OverSubscribe=OK Contiguous=0 Licenses=(null) Network=(null)
   Command=../09_autolimr_pipeline_par_foreach.sh
   WorkDir=/scratch/mnlgre001/networks_faseyic/slurm/slurm_out
   StdErr=/scratch/mnlgre001/networks_faseyic/slurm/slurm_out/slurm-151476.out
   StdIn=/dev/null
   StdOut=/scratch/mnlgre001/networks_faseyic/slurm/slurm_out/slurm-151476.out
   Power=
   

UCT HPC clusterJOB DETAILS FOR JOB 151476

captured Fri May 30 07:03:02 SAST 2025

JobId=151476 JobName=fluxPar2022
   UserId=mnlgre001(291293653 G Maniel) GroupId=eresearch_hpc_users(1221947160) MCS_label=N/A
   Priority=7498 Nice=0 Account=biosci QOS=normal
   JobState=COMPLETED Reason=None Dependency=(null)
   Requeue=1 Restarts=0 BatchFlag=1 Reboot=0 ExitCode=0:0
   DerivedExitCode=0:0
   RunTime=21:29:47 TimeLimit=2-00:00:00 TimeMin=N/A
   SubmitTime=2025-05-29T09:30:49 EligibleTime=2025-05-29T09:30:49
   AccrueTime=2025-05-29T09:30:49
   StartTime=2025-05-29T09:30:49 EndTime=2025-05-30T07:00:36 Deadline=N/A
   SuspendTime=None SecsPreSuspend=0 LastSchedEval=2025-05-29T09:30:49 Scheduler=Main
   Partition=ada AllocNode:Sid=srvrochpc001.uct.ac.za:597883
   ReqNodeList=(null) ExcNodeList=(null)
   NodeList=srvrochpc105
   BatchHost=srvrochpc105
   NumNodes=1 NumCPUs=10 NumTasks=10 CPUs/Task=1 ReqB:S:C:T=0:0:*:*
   ReqTRES=cpu=10,mem=80000M,node=1,billing=17
   AllocTRES=cpu=10,mem=80000M,node=1,billing=17
   Socks/Node=* NtasksPerN:B:S:C=0:0:*:* CoreSpec=*
   JOB_GRES=(null)
     Nodes=srvrochpc105 CPU_IDs=20-23,28-33 Mem=80000 GRES=
   MinCPUsNode=1 MinMemoryCPU=8000M MinTmpDiskNode=0
   Features=(null) DelayBoot=00:00:00
   OverSubscribe=OK Contiguous=0 Licenses=(null) Network=(null)
   Command=../09_autolimr_pipeline_par_foreach.sh
   WorkDir=/scratch/mnlgre001/networks_faseyic/slurm/slurm_out
   StdErr=/scratch/mnlgre001/networks_faseyic/slurm/slurm_out/slurm-151476.out
   StdIn=/dev/null
   StdOut=/scratch/mnlgre001/networks_faseyic/slurm/slurm_out/slurm-151476.out
   Power=