#!/bin/bash
#SBATCH -N 1
#SBATCH -n 1
#SBATCH -p a6000
#SBATCH -J run_sps
#SBATCH -o run_sps.o%j
#SBATCH -e run_sps.e%j


### Enable any environment variables here:
# export MKL_NUM_THREADS=32
# export OMP_NUM_THREADS=32
module load cuda
conda activate llm


nvidia-smi

for temperature in 0.0 1.0
do 
    for gamma in 1 2 3 4 5 6 7 8
    do 
        echo "Running with gamma: $gamma and temperature: $temperature"
        # python3 benchmark.py --mode ars -g $gamma -t $temperature --dataset openai_humaneval
        # python3 benchmark.py --mode sps -g $gamma -t $temperature --dataset openai_humaneval
        # python3 benchmark.py --mode dsps -g $gamma -t $temperature --dataset openai_humaneval

        python3 benchmark.py --mode ars -g $gamma -t $temperature --dataset xsum
        # python3 benchmark.py --mode sps -g $gamma -t $temperature --dataset xsum
        # python3 benchmark.py --mode dsps -g $gamma -t $temperature --dataset xsum


        # python3 benchmark.py --draft-model TinyLlama/TinyLlama-1.1B-step-50K-105b --mode ars -g $gamma -t $temperature --dataset openai_humaneval
        # python3 benchmark.py --draft-model TinyLlama/TinyLlama-1.1B-step-50K-105b --mode sps -g $gamma -t $temperature --dataset openai_humaneval
        # python3 benchmark.py --draft-model TinyLlama/TinyLlama-1.1B-step-50K-105b --mode dsps -g $gamma -t $temperature --dataset openai_humaneval

        # python3 benchmark.py --draft-model TinyLlama/TinyLlama-1.1B-step-50K-105b --mode ars -g $gamma -t $temperature --dataset xsum
        # python3 benchmark.py --draft-model TinyLlama/TinyLlama-1.1B-step-50K-105b --mode sps -g $gamma -t $temperature --dataset xsum
        # python3 benchmark.py --draft-model TinyLlama/TinyLlama-1.1B-step-50K-105b --mode dsps -g $gamma -t $temperature --dataset xsum
    done
done 