accelerate==1.6.0
bitsandbytes==0.45.5
datasets==3.2.0
deepspeed==0.16.3
# If flash attention raises any errors during training, try installing flash-attn with the following command:
# pip install flash-attn==2.7.3 --no-build-isolation
flash-attn==2.7.3
matplotlib==3.10.0
numpy==2.2.4
pandas==2.2.3
peft==0.14.0
protobuf==6.31.1
seaborn-0.13.2
sentencepiece==0.2.0
torch==2.5.1
torchinfo==1.8.0
tqdm==4.67.1
transformers==4.47.0

# lm-evaluation-harness
evaluate==0.4.3
rouge-score==0.1.2
sacrebleu==2.5.1
sqlitedict==2.1.0

# Only for development.
yapf
