fix: checkpoint saving with deepspeed (#1321)
Browse files- requirements.txt +1 -1
- setup.py +1 -1
requirements.txt
CHANGED
@@ -5,7 +5,7 @@ transformers @ git+https://github.com/huggingface/transformers.git@ae49b218c3d71
|
|
5 |
tokenizers==0.15.0
|
6 |
bitsandbytes>=0.41.1
|
7 |
accelerate==0.26.1
|
8 |
-
deepspeed
|
9 |
pydantic>=2.5.3
|
10 |
addict
|
11 |
fire
|
|
|
5 |
tokenizers==0.15.0
|
6 |
bitsandbytes>=0.41.1
|
7 |
accelerate==0.26.1
|
8 |
+
deepspeed==0.13.1
|
9 |
pydantic>=2.5.3
|
10 |
addict
|
11 |
fire
|
setup.py
CHANGED
@@ -74,7 +74,7 @@ setup(
|
|
74 |
"fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.3.3#subdirectory=csrc/fused_dense_lib",
|
75 |
],
|
76 |
"deepspeed": [
|
77 |
-
"deepspeed
|
78 |
"deepspeed-kernels",
|
79 |
],
|
80 |
"mamba-ssm": [
|
|
|
74 |
"fused-dense-lib @ git+https://github.com/Dao-AILab/flash-attention@v2.3.3#subdirectory=csrc/fused_dense_lib",
|
75 |
],
|
76 |
"deepspeed": [
|
77 |
+
"deepspeed==0.13.1",
|
78 |
"deepspeed-kernels",
|
79 |
],
|
80 |
"mamba-ssm": [
|