File size: 1,895 Bytes
ee31436
56bf4e8
ee31436
 
 
56bf4e8
9da8cd9
baaed3a
56bf4e8
 
 
 
 
 
 
b8fce5e
ee31436
 
8ac95ce
ee31436
 
 
56bf4e8
ee31436
 
 
 
 
 
 
56bf4e8
 
 
ee31436
56bf4e8
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
36
37
38
39
40
41
42
43
44
45
46
47
import os
import torch
from huggingface_hub import HfApi


# replace this with our token
# TOKEN = os.environ.get("HF_TOKEN", None)
TOKEN = os.getenv("H4_TOKEN")
# print(TOKEN)
# OWNER = "vectara"
# REPO_ID = f"{OWNER}/Humanlike"
# QUEUE_REPO = f"{OWNER}/requests"
# RESULTS_REPO = f"{OWNER}/results"


OWNER = "Simondon" # Change to your org - don't forget to create a results and request dataset, with the correct format!
# ----------------------------------

REPO_ID = f"{OWNER}/HumanLikeness"
QUEUE_REPO = f"{OWNER}/requests"
RESULTS_REPO = f"{OWNER}/results"

# print(RESULTS_REPO)
CACHE_PATH=os.getenv("HF_HOME", ".")

# Local caches
EVAL_REQUESTS_PATH = os.path.join(CACHE_PATH, "eval-queue")
EVAL_RESULTS_PATH = os.path.join(CACHE_PATH, "eval-results")
EVAL_REQUESTS_PATH_BACKEND = os.path.join(CACHE_PATH, "eval-queue-bk")
EVAL_RESULTS_PATH_BACKEND = os.path.join(CACHE_PATH, "eval-results-bk")
# print(EVAL_RESULTS_PATH)
# exit()
DEVICE = torch.device('cuda' if torch.cuda.is_available() else 'cpu') #"cpu"
API = HfApi(token=TOKEN)

DATASET_PATH = "./src/datasets/Material_Llama2_0603.xlsx" #experiment data
PROMPT_PATH = "./src/datasets/prompt.xlsx" #prompt for each experiment
HEM_PATH = 'vectara/hallucination_evaluation_model'
HUMAN_DATA = "./src/datasets/human_data.csv" #experiment data
ITEM_4_DATA = "./src/datasets/associataion_dataset.csv" #database
ITEM_5_DATA = "./src/datasets/Items_5.csv" #experiment 5 need verb words

# SYSTEM_PROMPT = "You are a chat bot answering questions using data. You must stick to the answers provided solely by the text in the passage provided."
SYSTEM_PROMPT = "You are a participant of a psycholinguistic experiment. You will do a task on English language use."
'''prompt'''
# USER_PROMPT = "You are asked the question 'Provide a concise summary of the following passage, covering the core pieces of information described': "
USER_PROMPT = ""