File size: 1,122 Bytes
d5436e0
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
2
3
4
5
6
7
8
9
10
11
12
13
14
15
16
17
18
19
20
21
22
23
24
25
26
27
28
29
30
31
32
33
34
35
config = {
    "HF_Mistrail": {
        "model": "mistralai/Mixtral-8x7B-Instruct-v0.1",
        "temperature": 0.1,
        "max_new_tokens": 1024,
        "top_k": 5,
        "load_in_8bit": True
    },
    "HF_TinyLlama": {
        "model": "TinyLlama/TinyLlama-1.1B-Chat-v1.0",
        "temperature": 0.1,
        "max_new_tokens": 1024,
        "top_k": 5,
        "top_p":0.95,
        "load_in_8bit": True,
        "do_sample": True
    },
    "LC_TinyLlama-1.1B-Chat-v1.0-GGUF": {
        "model_url": "https://huggingface.co/TheBloke/TinyLlama-1.1B-Chat-v1.0-GGUF/resolve/main/tinyllama-1.1b-chat-v1.0.Q8_0.gguf",
        "model_name": "tinyllama-1.1b-chat-v1.0.Q8_0.gguf.bin",
        "temperature": 0.4,
        "max_tokens": 868,
        "top_p": 0.8,
        "top_k": 5,
    },
    "LC_Phi-3-mini-4k-instruct-gguf": {
        "model_url": "https://huggingface.co/microsoft/Phi-3-mini-4k-instruct-gguf/resolve/main/Phi-3-mini-4k-instruct-q4.gguf",
        "model_name": "Phi-3-mini-4k-instruct-gguf.bin",
        "temperature": 0.4,
        "max_tokens": 868,
        "top_p": 0.8,
        "top_k": 5,
    }
}