Spaces:
Running
on
CPU Upgrade
Running
on
CPU Upgrade
bump-up-transformers (#859)
Browse files- Use transformers 4.43.1 (6fd60822fd16e90541708daac7835546b8ba06fa)
- Use force_download to override the existing cache if any (97a83d4711d87156cea09c9ced09da6b72b391e4)
- pyproject.toml +1 -1
- requirements.txt +1 -1
- src/submission/check_validity.py +1 -2
pyproject.toml
CHANGED
@@ -40,7 +40,7 @@ plotly = "5.14.1"
|
|
40 |
python-dateutil = "2.8.2"
|
41 |
sentencepiece = "^0.2.0"
|
42 |
tqdm = "4.65.0"
|
43 |
-
transformers = "4.
|
44 |
tokenizers = ">=0.15.0"
|
45 |
gradio-space-ci = {git = "https://huggingface.co/spaces/Wauplin/gradio-space-ci", rev = "0.2.3"}
|
46 |
isort = "^5.13.2"
|
|
|
40 |
python-dateutil = "2.8.2"
|
41 |
sentencepiece = "^0.2.0"
|
42 |
tqdm = "4.65.0"
|
43 |
+
transformers = "4.43.1"
|
44 |
tokenizers = ">=0.15.0"
|
45 |
gradio-space-ci = {git = "https://huggingface.co/spaces/Wauplin/gradio-space-ci", rev = "0.2.3"}
|
46 |
isort = "^5.13.2"
|
requirements.txt
CHANGED
@@ -10,7 +10,7 @@ plotly==5.14.1
|
|
10 |
python-dateutil==2.8.2
|
11 |
sentencepiece
|
12 |
tqdm==4.65.0
|
13 |
-
transformers==4.
|
14 |
tokenizers>=0.15.0
|
15 |
gradio-space-ci @ git+https://huggingface.co/spaces/Wauplin/gradio-space-ci@0.2.3 # CI !!!
|
16 |
isort
|
|
|
10 |
python-dateutil==2.8.2
|
11 |
sentencepiece
|
12 |
tqdm==4.65.0
|
13 |
+
transformers==4.43.1
|
14 |
tokenizers>=0.15.0
|
15 |
gradio-space-ci @ git+https://huggingface.co/spaces/Wauplin/gradio-space-ci@0.2.3 # CI !!!
|
16 |
isort
|
src/submission/check_validity.py
CHANGED
@@ -46,8 +46,7 @@ def is_model_on_hub(
|
|
46 |
) -> tuple[bool, str, AutoConfig]:
|
47 |
try:
|
48 |
config = AutoConfig.from_pretrained(
|
49 |
-
model_name, revision=revision, trust_remote_code=trust_remote_code, token=token
|
50 |
-
) # , force_download=True)
|
51 |
if test_tokenizer:
|
52 |
try:
|
53 |
tk = AutoTokenizer.from_pretrained(
|
|
|
46 |
) -> tuple[bool, str, AutoConfig]:
|
47 |
try:
|
48 |
config = AutoConfig.from_pretrained(
|
49 |
+
model_name, revision=revision, trust_remote_code=trust_remote_code, token=token, force_download=True)
|
|
|
50 |
if test_tokenizer:
|
51 |
try:
|
52 |
tk = AutoTokenizer.from_pretrained(
|