transformers==4.45.2 datasets==2.19.2 transformer-tricks>=0.2.3 accelerate>=0.9.0 safetensors>=0.4.5 huggingface-hub>=0.25.1 torch>=2.4.1 tqdm>=4.64.1 jupytext>=1.16.4 autopep8>=2.3.1 twine>=5.1.1 build>=1.2.2 # pip list # see all versions # # Phi-3 needs flash-attn, but this requires CUDA # flash-attn==2.5.8