faisalhr1997 commited on
Commit
2b5b254
·
1 Parent(s): f3fb74e

Update app.py

Browse files
Files changed (1) hide show
  1. app.py +15 -15
app.py CHANGED
@@ -8,9 +8,9 @@ from huggingface_hub import hf_hub_download
8
 
9
  _ = """
10
  snapshot_download(
11
- repo_id="TheBloke/falcon-7b-instruct-GGML",
12
- allow_patterns="falcon7b-instruct.ggmlv3.q4_0.bin",
13
- revision="ggmlv3",
14
  local_dir="models",
15
  local_dir_use_symlinks=False, # default "auto"
16
  )
@@ -26,16 +26,16 @@ hf_hub_download(
26
 
27
  _ = """
28
  llm = AutoModelForCausalLM.from_pretrained(
29
- "TheBloke/falcon-7b-instruct-GGML",
30
- model_file="falcon7b-instruct.ggmlv3.q4_0.bin",
31
- model_type="falcon", gpu_layers=32, threads=2,
32
  )
33
  # """
34
- # _ = Path("models", "falcon7b-instruct.ggmlv3.q4_0.bin").absolute().as_posix()
35
  # assert Path(_).exists(), f"{_} does not exist, perhaps snapshot_download failed?"
36
 
37
- URL = "https://huggingface.co/TheBloke/falcon-7b-instruct-GGML/blob/main/falcon-7b-instruct.ggccv1.q4_1.bin"
38
- URL = "https://huggingface.co/TheBloke/falcon-7b-instruct-GGML/blob/ggmlv3/falcon7b-instruct.ggmlv3.q4_1.bin"
39
  repo_id = "/".join(urlparse(URL).path.strip("/").split("/")[:2])
40
 
41
 
@@ -43,7 +43,7 @@ model_file = Path(URL).name
43
 
44
  _ = hf_hub_download(
45
  repo_id=repo_id,
46
- revision="ggmlv3",
47
  filename=model_file,
48
  local_dir="models",
49
  # local_dir_use_symlinks=True,
@@ -54,14 +54,14 @@ llm = AutoModelForCausalLM.from_pretrained(
54
  # model_file=model_file,
55
  # model_file=_,
56
  _,
57
- model_type="falcon",
58
  threads=psutil.cpu_count(logical=False),
59
  )
60
 
61
- TITLE = f"""<h2 align="center">🦅 Falcon-Chat ggml ({model_file}) 🦅"""
62
  USER_NAME = "User"
63
- BOT_NAME = "Falcon"
64
- DEFAULT_INSTRUCTIONS = """The following is a conversation between a highly knowledgeable and intelligent AI assistant, called Falcon, and a human user, called User. In the following interactions, User and Falcon will converse in natural language, and Falcon will answer User's questions. Falcon was built to be respectful, polite and inclusive. Falcon was built by the Technology Innovation Institute in Abu Dhabi. Falcon will never decline to answer a question, and always attempts to give an answer that User would be satisfied with. It knows a lot, and always tells the truth. The conversation begins.
65
  """
66
  RETRY_COMMAND = "/retry"
67
  STOP_STR = f"\n{USER_NAME}:"
@@ -165,7 +165,7 @@ def chat():
165
  chat_history = chat_history + [[message, ""]]
166
  stream = llm(
167
  prompt,
168
- max_new_tokens=1024,
169
  stop=[STOP_STR, "<|endoftext|>"],
170
  temperature=temperature,
171
  top_p=top_p,
 
8
 
9
  _ = """
10
  snapshot_download(
11
+ repo_id="TheBloke/Luna-AI-Llama2-Uncensored-GGML",
12
+ allow_patterns="luna-ai-llama2-uncensored.ggmlv3.q2_K.bin",
13
+ revision="main",
14
  local_dir="models",
15
  local_dir_use_symlinks=False, # default "auto"
16
  )
 
26
 
27
  _ = """
28
  llm = AutoModelForCausalLM.from_pretrained(
29
+ "TheBloke/Luna-AI-Llama2-Uncensored-GGML",
30
+ model_file="luna-ai-llama2-uncensored.ggmlv3.q2_K.bin",
31
+ model_type="llama", gpu_layers=32, threads=2,
32
  )
33
  # """
34
+ # _ = Path("models", "luna-ai-llama2-uncensored.ggmlv3.q2_K.bin").absolute().as_posix()
35
  # assert Path(_).exists(), f"{_} does not exist, perhaps snapshot_download failed?"
36
 
37
+ # URL = "https://huggingface.co/TheBloke/falcon-7b-instruct-GGML/blob/main/falcon-7b-instruct.ggccv1.q4_1.bin"
38
+ URL = "https://huggingface.co/TheBloke/Luna-AI-Llama2-Uncensored-GGML/resolve/main/luna-ai-llama2-uncensored.ggmlv3.q2_K.bin"
39
  repo_id = "/".join(urlparse(URL).path.strip("/").split("/")[:2])
40
 
41
 
 
43
 
44
  _ = hf_hub_download(
45
  repo_id=repo_id,
46
+ revision="main",
47
  filename=model_file,
48
  local_dir="models",
49
  # local_dir_use_symlinks=True,
 
54
  # model_file=model_file,
55
  # model_file=_,
56
  _,
57
+ model_type="llama",
58
  threads=psutil.cpu_count(logical=False),
59
  )
60
 
61
+ TITLE = f"""<h2 align="center"> chat-ggml ({model_file})"""
62
  USER_NAME = "User"
63
+ BOT_NAME = "Assistant"
64
+ DEFAULT_INSTRUCTIONS = """The following is a conversation between a highly knowledgeable and intelligent AI assistant and a human User. In the following interactions, User and Assistant will converse and Assistant will answer User's questions.
65
  """
66
  RETRY_COMMAND = "/retry"
67
  STOP_STR = f"\n{USER_NAME}:"
 
165
  chat_history = chat_history + [[message, ""]]
166
  stream = llm(
167
  prompt,
168
+ max_new_tokens=2048,
169
  stop=[STOP_STR, "<|endoftext|>"],
170
  temperature=temperature,
171
  top_p=top_p,