asiansoul commited on
Commit
453ef6d
·
verified ·
1 Parent(s): 515fcdd

Upload folder using huggingface_hub

Browse files
.gitattributes CHANGED
@@ -33,3 +33,8 @@ saved_model/**/* filter=lfs diff=lfs merge=lfs -text
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
 
 
 
 
 
 
33
  *.zip filter=lfs diff=lfs merge=lfs -text
34
  *.zst filter=lfs diff=lfs merge=lfs -text
35
  *tfevents* filter=lfs diff=lfs merge=lfs -text
36
+ llama-3.2-1b-instruct-Q3_K_M.gguf filter=lfs diff=lfs merge=lfs -text
37
+ llama-3.2-1b-instruct-Q4_K_M.gguf filter=lfs diff=lfs merge=lfs -text
38
+ llama-3.2-1b-instruct-Q5_K_M.gguf filter=lfs diff=lfs merge=lfs -text
39
+ llama-3.2-1b-instruct-Q6_K.gguf filter=lfs diff=lfs merge=lfs -text
40
+ llama-3.2-1b-instruct-Q8_0.gguf filter=lfs diff=lfs merge=lfs -text
Modelfile_1b ADDED
@@ -0,0 +1,45 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ FROM llama-3.2-1b-instruct-Q8_0.gguf
2
+ TEMPLATE """<|start_header_id|>system<|end_header_id|>
3
+
4
+ Cutting Knowledge Date: December 2023
5
+
6
+ {{ if .System }}{{ .System }}
7
+ {{- end }}
8
+ {{- if .Tools }}When you receive a tool call response, use the output to format an answer to the orginal user question.
9
+
10
+ You are a helpful assistant with tool calling capabilities.
11
+ {{- end }}<|eot_id|>
12
+ {{- range $i, $_ := .Messages }}
13
+ {{- $last := eq (len (slice $.Messages $i)) 1 }}
14
+ {{- if eq .Role "user" }}<|start_header_id|>user<|end_header_id|>
15
+ {{- if and $.Tools $last }}
16
+
17
+ Given the following functions, please respond with a JSON for a function call with its proper arguments that best answers the given prompt.
18
+
19
+ Respond in the format {"name": function name, "parameters": dictionary of argument name and its value}. Do not use variables.
20
+
21
+ {{ range $.Tools }}
22
+ {{- . }}
23
+ {{ end }}
24
+ {{ .Content }}<|eot_id|>
25
+ {{- else }}
26
+
27
+ {{ .Content }}<|eot_id|>
28
+ {{- end }}{{ if $last }}<|start_header_id|>assistant<|end_header_id|>
29
+
30
+ {{ end }}
31
+ {{- else if eq .Role "assistant" }}<|start_header_id|>assistant<|end_header_id|>
32
+ {{- if .ToolCalls }}
33
+ {{ range .ToolCalls }}
34
+ {"name": "{{ .Function.Name }}", "parameters": {{ .Function.Arguments }}}{{ end }}
35
+ {{- else }}
36
+
37
+ {{ .Content }}
38
+ {{- end }}{{ if not $last }}<|eot_id|>{{ end }}
39
+ {{- else if eq .Role "tool" }}<|start_header_id|>ipython<|end_header_id|>
40
+
41
+ {{ .Content }}<|eot_id|>{{ if $last }}<|start_header_id|>assistant<|end_header_id|>
42
+
43
+ {{ end }}
44
+ {{- end }}
45
+ {{- end }}"""
llama-3.2-1b-instruct-Q3_K_M.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:936bd6bd618fd17ee1e4d30bb3f7d0aa949347a8dace75fee9c0a1c935172fa9
3
+ size 690843488
llama-3.2-1b-instruct-Q4_K_M.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:518b776b208afb7562fae83b13f8564ff439a4d5979fb88e384fbfda7d7622e8
3
+ size 807694176
llama-3.2-1b-instruct-Q5_K_M.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:127e8442ad12787e2f78b30cac012469f5a7d00c42b7b58890c01045529849d4
3
+ size 911503200
llama-3.2-1b-instruct-Q6_K.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:b4ad5ee46b34eb7ffeeb24bdadbd82311584ba7cb17b732b21cd7f78344ce1b0
3
+ size 1021800288
llama-3.2-1b-instruct-Q8_0.gguf ADDED
@@ -0,0 +1,3 @@
 
 
 
 
1
+ version https://git-lfs.github.com/spec/v1
2
+ oid sha256:63160d8286181a905ef83d5877dc476c8df337ca6a5580fe999f14663f79cc88
3
+ size 1321082720