Spaces:
Sleeping
Sleeping
File size: 1,410 Bytes
78bb25c |
1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 |
import configparser
import logging
def getconfig(configfile_path:str):
"""
configfile_path: file path of .cfg file
"""
config = configparser.ConfigParser()
try:
config.read_file(open(configfile_path))
return config
except:
logging.warning("config file not found")
# Declare all the necessary variables
def get_classifier_params(model_name):
config = getconfig('paramconfig.cfg')
params = {}
params['model_name'] = config.get(model_name,'MODEL')
params['split_by'] = config.get(model_name,'SPLIT_BY')
params['split_length'] = int(config.get(model_name,'SPLIT_LENGTH'))
params['split_overlap'] = int(config.get(model_name,'SPLIT_OVERLAP'))
params['remove_punc'] = bool(int(config.get(model_name,'REMOVE_PUNC')))
params['split_respect_sentence_boundary'] = bool(int(config.get(model_name,'RESPECT_SENTENCE_BOUNDARY')))
params['threshold'] = float(config.get(model_name,'THRESHOLD'))
params['top_n'] = int(config.get(model_name,'TOP_KEY'))
return params
# Model select
model_dict = {
'Llama3.1-8B': 'meta-llama/Meta-Llama-3.1-8B-Instruct',
'Llama3.1-70B': 'meta-llama/Meta-Llama-3.1-70B-Instruct',
'Llama3.1-405B': 'meta-llama/Meta-Llama-3.1-405B-Instruct',
'Zephyr 7B β': 'HuggingFaceH4/zephyr-7b-beta',
'Mistral-7B': 'mistralai/Mistral-7B-Instruct-v0.2',
'Mixtral-8x7B': 'mistralai/Mixtral-8x7B-Instruct-v0.1',
} |