Yao Fu
commited on
Commit
•
b3946ef
1
Parent(s):
aa83719
fix bugs
Browse files- backend-cli.py +19 -19
- src/backend/moe_infinity.py +1 -1
backend-cli.py
CHANGED
@@ -406,7 +406,7 @@ if __name__ == "__main__":
|
|
406 |
if local_debug:
|
407 |
debug_model_names = ["mistralai/Mixtral-8x7B-Instruct-v0.1"]
|
408 |
# debug_model_names = ["TheBloke/Mixtral-8x7B-v0.1-GPTQ"]
|
409 |
-
# debug_task_name = '
|
410 |
debug_task_name = "mmlu"
|
411 |
task_lst = TASKS_HARNESS.copy()
|
412 |
for task in task_lst:
|
@@ -418,25 +418,25 @@ if __name__ == "__main__":
|
|
418 |
model=debug_model_name, private=False, status="", json_filepath="", precision="float16"
|
419 |
)
|
420 |
results = process_evaluation(task, eval_request)
|
|
|
|
|
|
|
421 |
|
422 |
-
|
423 |
-
|
424 |
-
|
425 |
-
|
426 |
-
res = process_pending_requests()
|
427 |
-
print(f"waiting for 60 seconds")
|
428 |
-
time.sleep(60)
|
429 |
|
430 |
-
|
431 |
-
|
432 |
-
|
433 |
-
|
434 |
-
|
435 |
|
436 |
-
|
437 |
|
438 |
-
|
439 |
-
|
440 |
-
|
441 |
-
|
442 |
-
|
|
|
406 |
if local_debug:
|
407 |
debug_model_names = ["mistralai/Mixtral-8x7B-Instruct-v0.1"]
|
408 |
# debug_model_names = ["TheBloke/Mixtral-8x7B-v0.1-GPTQ"]
|
409 |
+
# debug_task_name = 'selfcheck'
|
410 |
debug_task_name = "mmlu"
|
411 |
task_lst = TASKS_HARNESS.copy()
|
412 |
for task in task_lst:
|
|
|
418 |
model=debug_model_name, private=False, status="", json_filepath="", precision="float16"
|
419 |
)
|
420 |
results = process_evaluation(task, eval_request)
|
421 |
+
else:
|
422 |
+
while True:
|
423 |
+
res = False
|
424 |
|
425 |
+
# if random.randint(0, 10) == 0:
|
426 |
+
res = process_pending_requests()
|
427 |
+
print(f"waiting for 60 seconds")
|
428 |
+
time.sleep(60)
|
|
|
|
|
|
|
429 |
|
430 |
+
# if res is False:
|
431 |
+
# if random.randint(0, 5) == 0:
|
432 |
+
# res = maybe_refresh_results(100)
|
433 |
+
# else:
|
434 |
+
# res = process_finished_requests(100)
|
435 |
|
436 |
+
# time.sleep(60)
|
437 |
|
438 |
+
# if res is False:
|
439 |
+
# if random.randint(0, 5) == 0:
|
440 |
+
# res = maybe_refresh_results(0)
|
441 |
+
# else:
|
442 |
+
# res = process_finished_requests(0)
|
src/backend/moe_infinity.py
CHANGED
@@ -2,7 +2,7 @@ import torch
|
|
2 |
import os
|
3 |
from transformers import AutoTokenizer
|
4 |
from transformers import AutoModelForCausalLM
|
5 |
-
from moe_infinity import MoE
|
6 |
from typing import List, Tuple, Optional, Union
|
7 |
|
8 |
from lm_eval.models.huggingface import HFLM
|
|
|
2 |
import os
|
3 |
from transformers import AutoTokenizer
|
4 |
from transformers import AutoModelForCausalLM
|
5 |
+
# from moe_infinity import MoE
|
6 |
from typing import List, Tuple, Optional, Union
|
7 |
|
8 |
from lm_eval.models.huggingface import HFLM
|