JOB DETAILS FOR JOB 161152

captured Mon Jun 16 20:26:01 SAST 2025

MaxRAM=9.7G
MaxVMSize=0G
AveCPUFreq=111K
AveDiskRead=180387051581
AveDiskWrite=25622654780

JobId=161152 JobName=FW-v11-split2
   UserId=grddan017(733323482 Daniel Gordon) GroupId=eresearch_hpc_users(1221947160) MCS_label=N/A
   Priority=7246 Nice=0 Account=l40sfree QOS=l40sfree
   JobState=RUNNING Reason=None Dependency=(null)
   Requeue=1 Restarts=0 BatchFlag=1 Reboot=0 ExitCode=0:0
   DerivedExitCode=0:0
   RunTime=03:46:14 TimeLimit=2-00:00:00 TimeMin=N/A
   SubmitTime=2025-06-16T16:39:45 EligibleTime=2025-06-16T16:39:45
   AccrueTime=2025-06-16T16:39:47
   StartTime=2025-06-16T16:39:47 EndTime=2025-06-18T16:39:47 Deadline=N/A
   SuspendTime=None SecsPreSuspend=0 LastSchedEval=2025-06-16T16:39:47 Scheduler=Main
   Partition=l40s AllocNode:Sid=srvrochpc001.uct.ac.za:562013
   ReqNodeList=(null) ExcNodeList=(null)
   NodeList=srvrocgpu013
   BatchHost=srvrocgpu013
   NumNodes=1 NumCPUs=8 NumTasks=8 CPUs/Task=1 ReqB:S:C:T=0:0:*:*
   ReqTRES=cpu=8,mem=80000M,node=1,billing=15,gres/gpu=1,gres/gpu:l40s=1
   AllocTRES=cpu=8,mem=80000M,node=1,billing=15,gres/gpu=1,gres/gpu:l40s=1
   Socks/Node=* NtasksPerN:B:S:C=0:0:*:* CoreSpec=*
   JOB_GRES=gpu:l40s:1
     Nodes=srvrocgpu013 CPU_IDs=0-3,24-27 Mem=80000 GRES=gpu:l40s:1(IDX:0)
   MinCPUsNode=1 MinMemoryCPU=10000M MinTmpDiskNode=0
   Features=(null) DelayBoot=00:00:00
   OverSubscribe=OK Contiguous=0 Licenses=(null) Network=(null)
   Command=/home/grddan017/ForestWatch/l40-v11.sh
   WorkDir=/home/grddan017/ForestWatch
   StdErr=/home/grddan017/ForestWatch/slurm-161152.out
   StdIn=/dev/null
   StdOut=/home/grddan017/ForestWatch/slurm-161152.out
   Power=
   TresPerNode=gres/gpu:l40s:1
   

UCT HPC clusterJOB DETAILS FOR JOB 161152

captured Mon Jun 16 20:26:38 SAST 2025

JobId=161152 JobName=FW-v11-split2
   UserId=grddan017(733323482 Daniel Gordon) GroupId=eresearch_hpc_users(1221947160) MCS_label=N/A
   Priority=7246 Nice=0 Account=l40sfree QOS=l40sfree
   JobState=COMPLETED Reason=None Dependency=(null)
   Requeue=1 Restarts=0 BatchFlag=1 Reboot=0 ExitCode=0:0
   DerivedExitCode=0:0
   RunTime=03:46:22 TimeLimit=2-00:00:00 TimeMin=N/A
   SubmitTime=2025-06-16T16:39:45 EligibleTime=2025-06-16T16:39:45
   AccrueTime=2025-06-16T16:39:47
   StartTime=2025-06-16T16:39:47 EndTime=2025-06-16T20:26:09 Deadline=N/A
   SuspendTime=None SecsPreSuspend=0 LastSchedEval=2025-06-16T16:39:47 Scheduler=Main
   Partition=l40s AllocNode:Sid=srvrochpc001.uct.ac.za:562013
   ReqNodeList=(null) ExcNodeList=(null)
   NodeList=srvrocgpu013
   BatchHost=srvrocgpu013
   NumNodes=1 NumCPUs=8 NumTasks=8 CPUs/Task=1 ReqB:S:C:T=0:0:*:*
   ReqTRES=cpu=8,mem=80000M,node=1,billing=15,gres/gpu=1,gres/gpu:l40s=1
   AllocTRES=cpu=8,mem=80000M,node=1,billing=15,gres/gpu=1,gres/gpu:l40s=1
   Socks/Node=* NtasksPerN:B:S:C=0:0:*:* CoreSpec=*
   JOB_GRES=gpu:l40s:1
     Nodes=srvrocgpu013 CPU_IDs=0-3,24-27 Mem=80000 GRES=
   MinCPUsNode=1 MinMemoryCPU=10000M MinTmpDiskNode=0
   Features=(null) DelayBoot=00:00:00
   OverSubscribe=OK Contiguous=0 Licenses=(null) Network=(null)
   Command=/home/grddan017/ForestWatch/l40-v11.sh
   WorkDir=/home/grddan017/ForestWatch
   StdErr=/home/grddan017/ForestWatch/slurm-161152.out
   StdIn=/dev/null
   StdOut=/home/grddan017/ForestWatch/slurm-161152.out
   Power=
   TresPerNode=gres/gpu:l40s:1
   

UCT HPC clusterJOB DETAILS FOR JOB 161152

captured Mon Jun 16 20:27:01 SAST 2025

JobId=161152 JobName=FW-v11-split2
   UserId=grddan017(733323482 Daniel Gordon) GroupId=eresearch_hpc_users(1221947160) MCS_label=N/A
   Priority=7246 Nice=0 Account=l40sfree QOS=l40sfree
   JobState=COMPLETED Reason=None Dependency=(null)
   Requeue=1 Restarts=0 BatchFlag=1 Reboot=0 ExitCode=0:0
   DerivedExitCode=0:0
   RunTime=03:46:22 TimeLimit=2-00:00:00 TimeMin=N/A
   SubmitTime=2025-06-16T16:39:45 EligibleTime=2025-06-16T16:39:45
   AccrueTime=2025-06-16T16:39:47
   StartTime=2025-06-16T16:39:47 EndTime=2025-06-16T20:26:09 Deadline=N/A
   SuspendTime=None SecsPreSuspend=0 LastSchedEval=2025-06-16T16:39:47 Scheduler=Main
   Partition=l40s AllocNode:Sid=srvrochpc001.uct.ac.za:562013
   ReqNodeList=(null) ExcNodeList=(null)
   NodeList=srvrocgpu013
   BatchHost=srvrocgpu013
   NumNodes=1 NumCPUs=8 NumTasks=8 CPUs/Task=1 ReqB:S:C:T=0:0:*:*
   ReqTRES=cpu=8,mem=80000M,node=1,billing=15,gres/gpu=1,gres/gpu:l40s=1
   AllocTRES=cpu=8,mem=80000M,node=1,billing=15,gres/gpu=1,gres/gpu:l40s=1
   Socks/Node=* NtasksPerN:B:S:C=0:0:*:* CoreSpec=*
   JOB_GRES=gpu:l40s:1
     Nodes=srvrocgpu013 CPU_IDs=0-3,24-27 Mem=80000 GRES=
   MinCPUsNode=1 MinMemoryCPU=10000M MinTmpDiskNode=0
   Features=(null) DelayBoot=00:00:00
   OverSubscribe=OK Contiguous=0 Licenses=(null) Network=(null)
   Command=/home/grddan017/ForestWatch/l40-v11.sh
   WorkDir=/home/grddan017/ForestWatch
   StdErr=/home/grddan017/ForestWatch/slurm-161152.out
   StdIn=/dev/null
   StdOut=/home/grddan017/ForestWatch/slurm-161152.out
   Power=
   TresPerNode=gres/gpu:l40s:1
   

UCT HPC clusterJOB DETAILS FOR JOB 161152

captured Mon Jun 16 20:27:43 SAST 2025

JobId=161152 JobName=FW-v11-split2
   UserId=grddan017(733323482 Daniel Gordon) GroupId=eresearch_hpc_users(1221947160) MCS_label=N/A
   Priority=7246 Nice=0 Account=l40sfree QOS=l40sfree
   JobState=COMPLETED Reason=None Dependency=(null)
   Requeue=1 Restarts=0 BatchFlag=1 Reboot=0 ExitCode=0:0
   DerivedExitCode=0:0
   RunTime=03:46:22 TimeLimit=2-00:00:00 TimeMin=N/A
   SubmitTime=2025-06-16T16:39:45 EligibleTime=2025-06-16T16:39:45
   AccrueTime=2025-06-16T16:39:47
   StartTime=2025-06-16T16:39:47 EndTime=2025-06-16T20:26:09 Deadline=N/A
   SuspendTime=None SecsPreSuspend=0 LastSchedEval=2025-06-16T16:39:47 Scheduler=Main
   Partition=l40s AllocNode:Sid=srvrochpc001.uct.ac.za:562013
   ReqNodeList=(null) ExcNodeList=(null)
   NodeList=srvrocgpu013
   BatchHost=srvrocgpu013
   NumNodes=1 NumCPUs=8 NumTasks=8 CPUs/Task=1 ReqB:S:C:T=0:0:*:*
   ReqTRES=cpu=8,mem=80000M,node=1,billing=15,gres/gpu=1,gres/gpu:l40s=1
   AllocTRES=cpu=8,mem=80000M,node=1,billing=15,gres/gpu=1,gres/gpu:l40s=1
   Socks/Node=* NtasksPerN:B:S:C=0:0:*:* CoreSpec=*
   JOB_GRES=gpu:l40s:1
     Nodes=srvrocgpu013 CPU_IDs=0-3,24-27 Mem=80000 GRES=
   MinCPUsNode=1 MinMemoryCPU=10000M MinTmpDiskNode=0
   Features=(null) DelayBoot=00:00:00
   OverSubscribe=OK Contiguous=0 Licenses=(null) Network=(null)
   Command=/home/grddan017/ForestWatch/l40-v11.sh
   WorkDir=/home/grddan017/ForestWatch
   StdErr=/home/grddan017/ForestWatch/slurm-161152.out
   StdIn=/dev/null
   StdOut=/home/grddan017/ForestWatch/slurm-161152.out
   Power=
   TresPerNode=gres/gpu:l40s:1
   

UCT HPC clusterJOB DETAILS FOR JOB 161152

captured Mon Jun 16 20:28:01 SAST 2025

JobId=161152 JobName=FW-v11-split2
   UserId=grddan017(733323482 Daniel Gordon) GroupId=eresearch_hpc_users(1221947160) MCS_label=N/A
   Priority=7246 Nice=0 Account=l40sfree QOS=l40sfree
   JobState=COMPLETED Reason=None Dependency=(null)
   Requeue=1 Restarts=0 BatchFlag=1 Reboot=0 ExitCode=0:0
   DerivedExitCode=0:0
   RunTime=03:46:22 TimeLimit=2-00:00:00 TimeMin=N/A
   SubmitTime=2025-06-16T16:39:45 EligibleTime=2025-06-16T16:39:45
   AccrueTime=2025-06-16T16:39:47
   StartTime=2025-06-16T16:39:47 EndTime=2025-06-16T20:26:09 Deadline=N/A
   SuspendTime=None SecsPreSuspend=0 LastSchedEval=2025-06-16T16:39:47 Scheduler=Main
   Partition=l40s AllocNode:Sid=srvrochpc001.uct.ac.za:562013
   ReqNodeList=(null) ExcNodeList=(null)
   NodeList=srvrocgpu013
   BatchHost=srvrocgpu013
   NumNodes=1 NumCPUs=8 NumTasks=8 CPUs/Task=1 ReqB:S:C:T=0:0:*:*
   ReqTRES=cpu=8,mem=80000M,node=1,billing=15,gres/gpu=1,gres/gpu:l40s=1
   AllocTRES=cpu=8,mem=80000M,node=1,billing=15,gres/gpu=1,gres/gpu:l40s=1
   Socks/Node=* NtasksPerN:B:S:C=0:0:*:* CoreSpec=*
   JOB_GRES=gpu:l40s:1
     Nodes=srvrocgpu013 CPU_IDs=0-3,24-27 Mem=80000 GRES=
   MinCPUsNode=1 MinMemoryCPU=10000M MinTmpDiskNode=0
   Features=(null) DelayBoot=00:00:00
   OverSubscribe=OK Contiguous=0 Licenses=(null) Network=(null)
   Command=/home/grddan017/ForestWatch/l40-v11.sh
   WorkDir=/home/grddan017/ForestWatch
   StdErr=/home/grddan017/ForestWatch/slurm-161152.out
   StdIn=/dev/null
   StdOut=/home/grddan017/ForestWatch/slurm-161152.out
   Power=
   TresPerNode=gres/gpu:l40s:1