JOB DETAILS FOR JOB 412243

captured Tue Dec 30 05:47:32 SAST 2025

MaxRAM=1.3G
MaxVMSize=0G
AveCPUFreq=800K
AveDiskRead=1538699268
AveDiskWrite=115963

lmbanr001(733329384 Anri Lombard) GroupId=eresearch_hpc_users(1221947160) MCS_label=N/A
   Priority=8158 Nice=0 Account=nlpgroup QOS=nlpgroup
   JobState=RUNNING Reason=None Dependency=(null)
   Requeue=0 Restarts=0 BatchFlag=1 Reboot=0 ExitCode=0:0
   DerivedExitCode=0:0
   RunTime=00:16:08 TimeLimit=2-00:00:00 TimeMin=N/A
   SubmitTime=2025-12-30T05:29:45 EligibleTime=2025-12-30T05:29:45
   AccrueTime=2025-12-30T05:29:45
   StartTime=2025-12-30T05:31:24 EndTime=2026-01-01T05:31:24 Deadline=N/A
   SuspendTime=None SecsPreSuspend=0 LastSchedEval=2025-12-30T05:31:24 Scheduler=Main
   Partition=a100 AllocNode:Sid=srvrochpc001:1538092
   ReqNodeList=(null) ExcNodeList=(null)
   NodeList=srvrocgpu010
   BatchHost=srvrocgpu010
   NumNodes=1 NumCPUs=8 NumTasks=1 CPUs/Task=1 ReqB:S:C:T=0:0:*:*
   ReqTRES=cpu=1,mem=9142M,node=1,billing=3,gres/gpu=2
   AllocTRES=cpu=8,mem=73136M,node=1,billing=17,gres/gpu=2,gres/gpu:ampere=2
   Socks/Node=* NtasksPerN:B:S:C=0:0:*:* CoreSpec=*
   JOB_GRES=gpu:ampere:2
     Nodes=srvrocgpu010 CPU_IDs=0-3,28-31 Mem=73136 GRES=gpu:ampere:2(IDX:0-1)
   MinCPUsNode=1 MinMemoryCPU=9142M MinTmpDiskNode=0
   Features=(null) DelayBoot=00:00:00
   OverSubscribe=OK Contiguous=0 Licenses=(null) Network=(null)
   Command=/home/lmbanr001/masters/sallm/scripts/launch_evaluation.sh
   WorkDir=/home/lmbanr001/masters/sallm
   StdErr=/home/lmbanr001/masters/sallm/slurm-412243.out
   StdIn=/dev/null
   StdOut=/home/lmbanr001/masters/sallm/slurm-412243.out
   Power=
   CpusPerTres=gres/gpu:4
   TresPerNode=gres/gpu:2
   MailUser=LMBANR001@myuct.ac.za MailType=END,FAIL
   

UCT HPC clusterJOB DETAILS FOR JOB 412243

captured Tue Dec 30 05:48:01 SAST 2025

lmbanr001(733329384 Anri Lombard) GroupId=eresearch_hpc_users(1221947160) MCS_label=N/A
   Priority=8158 Nice=0 Account=nlpgroup QOS=nlpgroup
   JobState=COMPLETED Reason=None Dependency=(null)
   Requeue=0 Restarts=0 BatchFlag=1 Reboot=0 ExitCode=0:0
   DerivedExitCode=0:0
   RunTime=00:16:34 TimeLimit=2-00:00:00 TimeMin=N/A
   SubmitTime=2025-12-30T05:29:45 EligibleTime=2025-12-30T05:29:45
   AccrueTime=2025-12-30T05:29:45
   StartTime=2025-12-30T05:31:24 EndTime=2025-12-30T05:47:58 Deadline=N/A
   SuspendTime=None SecsPreSuspend=0 LastSchedEval=2025-12-30T05:31:24 Scheduler=Main
   Partition=a100 AllocNode:Sid=srvrochpc001:1538092
   ReqNodeList=(null) ExcNodeList=(null)
   NodeList=srvrocgpu010
   BatchHost=srvrocgpu010
   NumNodes=1 NumCPUs=8 NumTasks=1 CPUs/Task=1 ReqB:S:C:T=0:0:*:*
   ReqTRES=cpu=1,mem=9142M,node=1,billing=3,gres/gpu=2
   AllocTRES=cpu=8,mem=73136M,node=1,billing=17,gres/gpu=2,gres/gpu:ampere=2
   Socks/Node=* NtasksPerN:B:S:C=0:0:*:* CoreSpec=*
   JOB_GRES=gpu:ampere:2
     Nodes=srvrocgpu010 CPU_IDs=0-3,28-31 Mem=73136 GRES=
   MinCPUsNode=1 MinMemoryCPU=9142M MinTmpDiskNode=0
   Features=(null) DelayBoot=00:00:00
   OverSubscribe=OK Contiguous=0 Licenses=(null) Network=(null)
   Command=/home/lmbanr001/masters/sallm/scripts/launch_evaluation.sh
   WorkDir=/home/lmbanr001/masters/sallm
   StdErr=/home/lmbanr001/masters/sallm/slurm-412243.out
   StdIn=/dev/null
   StdOut=/home/lmbanr001/masters/sallm/slurm-412243.out
   Power=
   CpusPerTres=gres/gpu:4
   TresPerNode=gres/gpu:2
   MailUser=LMBANR001@myuct.ac.za MailType=END,FAIL
   

UCT HPC clusterJOB DETAILS FOR JOB 412243

captured Tue Dec 30 05:48:32 SAST 2025

lmbanr001(733329384 Anri Lombard) GroupId=eresearch_hpc_users(1221947160) MCS_label=N/A
   Priority=8158 Nice=0 Account=nlpgroup QOS=nlpgroup
   JobState=COMPLETED Reason=None Dependency=(null)
   Requeue=0 Restarts=0 BatchFlag=1 Reboot=0 ExitCode=0:0
   DerivedExitCode=0:0
   RunTime=00:16:34 TimeLimit=2-00:00:00 TimeMin=N/A
   SubmitTime=2025-12-30T05:29:45 EligibleTime=2025-12-30T05:29:45
   AccrueTime=2025-12-30T05:29:45
   StartTime=2025-12-30T05:31:24 EndTime=2025-12-30T05:47:58 Deadline=N/A
   SuspendTime=None SecsPreSuspend=0 LastSchedEval=2025-12-30T05:31:24 Scheduler=Main
   Partition=a100 AllocNode:Sid=srvrochpc001:1538092
   ReqNodeList=(null) ExcNodeList=(null)
   NodeList=srvrocgpu010
   BatchHost=srvrocgpu010
   NumNodes=1 NumCPUs=8 NumTasks=1 CPUs/Task=1 ReqB:S:C:T=0:0:*:*
   ReqTRES=cpu=1,mem=9142M,node=1,billing=3,gres/gpu=2
   AllocTRES=cpu=8,mem=73136M,node=1,billing=17,gres/gpu=2,gres/gpu:ampere=2
   Socks/Node=* NtasksPerN:B:S:C=0:0:*:* CoreSpec=*
   JOB_GRES=gpu:ampere:2
     Nodes=srvrocgpu010 CPU_IDs=0-3,28-31 Mem=73136 GRES=
   MinCPUsNode=1 MinMemoryCPU=9142M MinTmpDiskNode=0
   Features=(null) DelayBoot=00:00:00
   OverSubscribe=OK Contiguous=0 Licenses=(null) Network=(null)
   Command=/home/lmbanr001/masters/sallm/scripts/launch_evaluation.sh
   WorkDir=/home/lmbanr001/masters/sallm
   StdErr=/home/lmbanr001/masters/sallm/slurm-412243.out
   StdIn=/dev/null
   StdOut=/home/lmbanr001/masters/sallm/slurm-412243.out
   Power=
   CpusPerTres=gres/gpu:4
   TresPerNode=gres/gpu:2
   MailUser=LMBANR001@myuct.ac.za MailType=END,FAIL
   

UCT HPC clusterJOB DETAILS FOR JOB 412243

captured Tue Dec 30 05:49:01 SAST 2025

lmbanr001(733329384 Anri Lombard) GroupId=eresearch_hpc_users(1221947160) MCS_label=N/A
   Priority=8158 Nice=0 Account=nlpgroup QOS=nlpgroup
   JobState=COMPLETED Reason=None Dependency=(null)
   Requeue=0 Restarts=0 BatchFlag=1 Reboot=0 ExitCode=0:0
   DerivedExitCode=0:0
   RunTime=00:16:34 TimeLimit=2-00:00:00 TimeMin=N/A
   SubmitTime=2025-12-30T05:29:45 EligibleTime=2025-12-30T05:29:45
   AccrueTime=2025-12-30T05:29:45
   StartTime=2025-12-30T05:31:24 EndTime=2025-12-30T05:47:58 Deadline=N/A
   SuspendTime=None SecsPreSuspend=0 LastSchedEval=2025-12-30T05:31:24 Scheduler=Main
   Partition=a100 AllocNode:Sid=srvrochpc001:1538092
   ReqNodeList=(null) ExcNodeList=(null)
   NodeList=srvrocgpu010
   BatchHost=srvrocgpu010
   NumNodes=1 NumCPUs=8 NumTasks=1 CPUs/Task=1 ReqB:S:C:T=0:0:*:*
   ReqTRES=cpu=1,mem=9142M,node=1,billing=3,gres/gpu=2
   AllocTRES=cpu=8,mem=73136M,node=1,billing=17,gres/gpu=2,gres/gpu:ampere=2
   Socks/Node=* NtasksPerN:B:S:C=0:0:*:* CoreSpec=*
   JOB_GRES=gpu:ampere:2
     Nodes=srvrocgpu010 CPU_IDs=0-3,28-31 Mem=73136 GRES=
   MinCPUsNode=1 MinMemoryCPU=9142M MinTmpDiskNode=0
   Features=(null) DelayBoot=00:00:00
   OverSubscribe=OK Contiguous=0 Licenses=(null) Network=(null)
   Command=/home/lmbanr001/masters/sallm/scripts/launch_evaluation.sh
   WorkDir=/home/lmbanr001/masters/sallm
   StdErr=/home/lmbanr001/masters/sallm/slurm-412243.out
   StdIn=/dev/null
   StdOut=/home/lmbanr001/masters/sallm/slurm-412243.out
   Power=
   CpusPerTres=gres/gpu:4
   TresPerNode=gres/gpu:2
   MailUser=LMBANR001@myuct.ac.za MailType=END,FAIL
   

UCT HPC clusterJOB DETAILS FOR JOB 412243

captured Tue Dec 30 05:49:32 SAST 2025

lmbanr001(733329384 Anri Lombard) GroupId=eresearch_hpc_users(1221947160) MCS_label=N/A
   Priority=8158 Nice=0 Account=nlpgroup QOS=nlpgroup
   JobState=COMPLETED Reason=None Dependency=(null)
   Requeue=0 Restarts=0 BatchFlag=1 Reboot=0 ExitCode=0:0
   DerivedExitCode=0:0
   RunTime=00:16:34 TimeLimit=2-00:00:00 TimeMin=N/A
   SubmitTime=2025-12-30T05:29:45 EligibleTime=2025-12-30T05:29:45
   AccrueTime=2025-12-30T05:29:45
   StartTime=2025-12-30T05:31:24 EndTime=2025-12-30T05:47:58 Deadline=N/A
   SuspendTime=None SecsPreSuspend=0 LastSchedEval=2025-12-30T05:31:24 Scheduler=Main
   Partition=a100 AllocNode:Sid=srvrochpc001:1538092
   ReqNodeList=(null) ExcNodeList=(null)
   NodeList=srvrocgpu010
   BatchHost=srvrocgpu010
   NumNodes=1 NumCPUs=8 NumTasks=1 CPUs/Task=1 ReqB:S:C:T=0:0:*:*
   ReqTRES=cpu=1,mem=9142M,node=1,billing=3,gres/gpu=2
   AllocTRES=cpu=8,mem=73136M,node=1,billing=17,gres/gpu=2,gres/gpu:ampere=2
   Socks/Node=* NtasksPerN:B:S:C=0:0:*:* CoreSpec=*
   JOB_GRES=gpu:ampere:2
     Nodes=srvrocgpu010 CPU_IDs=0-3,28-31 Mem=73136 GRES=
   MinCPUsNode=1 MinMemoryCPU=9142M MinTmpDiskNode=0
   Features=(null) DelayBoot=00:00:00
   OverSubscribe=OK Contiguous=0 Licenses=(null) Network=(null)
   Command=/home/lmbanr001/masters/sallm/scripts/launch_evaluation.sh
   WorkDir=/home/lmbanr001/masters/sallm
   StdErr=/home/lmbanr001/masters/sallm/slurm-412243.out
   StdIn=/dev/null
   StdOut=/home/lmbanr001/masters/sallm/slurm-412243.out
   Power=
   CpusPerTres=gres/gpu:4
   TresPerNode=gres/gpu:2
   MailUser=LMBANR001@myuct.ac.za MailType=END,FAIL
   

UCT HPC clusterJOB DETAILS FOR JOB 412243

captured Tue Dec 30 05:50:01 SAST 2025

lmbanr001(733329384 Anri Lombard) GroupId=eresearch_hpc_users(1221947160) MCS_label=N/A
   Priority=8158 Nice=0 Account=nlpgroup QOS=nlpgroup
   JobState=COMPLETED Reason=None Dependency=(null)
   Requeue=0 Restarts=0 BatchFlag=1 Reboot=0 ExitCode=0:0
   DerivedExitCode=0:0
   RunTime=00:16:34 TimeLimit=2-00:00:00 TimeMin=N/A
   SubmitTime=2025-12-30T05:29:45 EligibleTime=2025-12-30T05:29:45
   AccrueTime=2025-12-30T05:29:45
   StartTime=2025-12-30T05:31:24 EndTime=2025-12-30T05:47:58 Deadline=N/A
   SuspendTime=None SecsPreSuspend=0 LastSchedEval=2025-12-30T05:31:24 Scheduler=Main
   Partition=a100 AllocNode:Sid=srvrochpc001:1538092
   ReqNodeList=(null) ExcNodeList=(null)
   NodeList=srvrocgpu010
   BatchHost=srvrocgpu010
   NumNodes=1 NumCPUs=8 NumTasks=1 CPUs/Task=1 ReqB:S:C:T=0:0:*:*
   ReqTRES=cpu=1,mem=9142M,node=1,billing=3,gres/gpu=2
   AllocTRES=cpu=8,mem=73136M,node=1,billing=17,gres/gpu=2,gres/gpu:ampere=2
   Socks/Node=* NtasksPerN:B:S:C=0:0:*:* CoreSpec=*
   JOB_GRES=gpu:ampere:2
     Nodes=srvrocgpu010 CPU_IDs=0-3,28-31 Mem=73136 GRES=
   MinCPUsNode=1 MinMemoryCPU=9142M MinTmpDiskNode=0
   Features=(null) DelayBoot=00:00:00
   OverSubscribe=OK Contiguous=0 Licenses=(null) Network=(null)
   Command=/home/lmbanr001/masters/sallm/scripts/launch_evaluation.sh
   WorkDir=/home/lmbanr001/masters/sallm
   StdErr=/home/lmbanr001/masters/sallm/slurm-412243.out
   StdIn=/dev/null
   StdOut=/home/lmbanr001/masters/sallm/slurm-412243.out
   Power=
   CpusPerTres=gres/gpu:4
   TresPerNode=gres/gpu:2
   MailUser=LMBANR001@myuct.ac.za MailType=END,FAIL