Spaces:
Running
Running
Change models to CoEdIT
Browse files- README.md +2 -2
- build/m-quantized.js +14 -14
- build/m-quantized_bg.wasm +2 -2
- build/m.js +1 -1
- build/m_bg.wasm +2 -2
- index.html +15 -27
- utils.js +44 -81
README.md
CHANGED
@@ -1,5 +1,5 @@
|
|
1 |
---
|
2 |
-
title: Candle
|
3 |
emoji: 🕯️🔡
|
4 |
colorFrom: blue
|
5 |
colorTo: purple
|
@@ -7,4 +7,4 @@ sdk: static
|
|
7 |
pinned: false
|
8 |
---
|
9 |
|
10 |
-
|
|
|
1 |
---
|
2 |
+
title: Candle CoEdIT Wasm
|
3 |
emoji: 🕯️🔡
|
4 |
colorFrom: blue
|
5 |
colorTo: purple
|
|
|
7 |
pinned: false
|
8 |
---
|
9 |
|
10 |
+
An space to demo [jbochi/candle-coedit-quantized](https://huggingface.co/jbochi/candle-coedit-quantized/tree/main).
|
build/m-quantized.js
CHANGED
@@ -97,6 +97,15 @@ function takeObject(idx) {
|
|
97 |
return ret;
|
98 |
}
|
99 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
100 |
function addHeapObject(obj) {
|
101 |
if (heap_next === heap.length) heap.push(heap.length + 1);
|
102 |
const idx = heap_next;
|
@@ -115,15 +124,6 @@ function getFloat64Memory0() {
|
|
115 |
return cachedFloat64Memory0;
|
116 |
}
|
117 |
|
118 |
-
const cachedTextDecoder = (typeof TextDecoder !== 'undefined' ? new TextDecoder('utf-8', { ignoreBOM: true, fatal: true }) : { decode: () => { throw Error('TextDecoder not available') } } );
|
119 |
-
|
120 |
-
if (typeof TextDecoder !== 'undefined') { cachedTextDecoder.decode(); };
|
121 |
-
|
122 |
-
function getStringFromWasm0(ptr, len) {
|
123 |
-
ptr = ptr >>> 0;
|
124 |
-
return cachedTextDecoder.decode(getUint8Memory0().subarray(ptr, ptr + len));
|
125 |
-
}
|
126 |
-
|
127 |
let cachedBigInt64Memory0 = null;
|
128 |
|
129 |
function getBigInt64Memory0() {
|
@@ -396,6 +396,10 @@ function __wbg_get_imports() {
|
|
396 |
imports.wbg.__wbindgen_object_drop_ref = function(arg0) {
|
397 |
takeObject(arg0);
|
398 |
};
|
|
|
|
|
|
|
|
|
399 |
imports.wbg.__wbindgen_is_bigint = function(arg0) {
|
400 |
const ret = typeof(getObject(arg0)) === 'bigint';
|
401 |
return ret;
|
@@ -432,10 +436,6 @@ function __wbg_get_imports() {
|
|
432 |
getFloat64Memory0()[arg0 / 8 + 1] = isLikeNone(ret) ? 0 : ret;
|
433 |
getInt32Memory0()[arg0 / 4 + 0] = !isLikeNone(ret);
|
434 |
};
|
435 |
-
imports.wbg.__wbindgen_error_new = function(arg0, arg1) {
|
436 |
-
const ret = new Error(getStringFromWasm0(arg0, arg1));
|
437 |
-
return addHeapObject(ret);
|
438 |
-
};
|
439 |
imports.wbg.__wbindgen_object_clone_ref = function(arg0) {
|
440 |
const ret = getObject(arg0);
|
441 |
return addHeapObject(ret);
|
@@ -488,7 +488,7 @@ function __wbg_get_imports() {
|
|
488 |
wasm.__wbindgen_free(deferred0_0, deferred0_1, 1);
|
489 |
}
|
490 |
};
|
491 |
-
imports.wbg.
|
492 |
console.log(getStringFromWasm0(arg0, arg1));
|
493 |
};
|
494 |
imports.wbg.__wbg_crypto_c48a774b022d20ac = function(arg0) {
|
|
|
97 |
return ret;
|
98 |
}
|
99 |
|
100 |
+
const cachedTextDecoder = (typeof TextDecoder !== 'undefined' ? new TextDecoder('utf-8', { ignoreBOM: true, fatal: true }) : { decode: () => { throw Error('TextDecoder not available') } } );
|
101 |
+
|
102 |
+
if (typeof TextDecoder !== 'undefined') { cachedTextDecoder.decode(); };
|
103 |
+
|
104 |
+
function getStringFromWasm0(ptr, len) {
|
105 |
+
ptr = ptr >>> 0;
|
106 |
+
return cachedTextDecoder.decode(getUint8Memory0().subarray(ptr, ptr + len));
|
107 |
+
}
|
108 |
+
|
109 |
function addHeapObject(obj) {
|
110 |
if (heap_next === heap.length) heap.push(heap.length + 1);
|
111 |
const idx = heap_next;
|
|
|
124 |
return cachedFloat64Memory0;
|
125 |
}
|
126 |
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
127 |
let cachedBigInt64Memory0 = null;
|
128 |
|
129 |
function getBigInt64Memory0() {
|
|
|
396 |
imports.wbg.__wbindgen_object_drop_ref = function(arg0) {
|
397 |
takeObject(arg0);
|
398 |
};
|
399 |
+
imports.wbg.__wbindgen_error_new = function(arg0, arg1) {
|
400 |
+
const ret = new Error(getStringFromWasm0(arg0, arg1));
|
401 |
+
return addHeapObject(ret);
|
402 |
+
};
|
403 |
imports.wbg.__wbindgen_is_bigint = function(arg0) {
|
404 |
const ret = typeof(getObject(arg0)) === 'bigint';
|
405 |
return ret;
|
|
|
436 |
getFloat64Memory0()[arg0 / 8 + 1] = isLikeNone(ret) ? 0 : ret;
|
437 |
getInt32Memory0()[arg0 / 4 + 0] = !isLikeNone(ret);
|
438 |
};
|
|
|
|
|
|
|
|
|
439 |
imports.wbg.__wbindgen_object_clone_ref = function(arg0) {
|
440 |
const ret = getObject(arg0);
|
441 |
return addHeapObject(ret);
|
|
|
488 |
wasm.__wbindgen_free(deferred0_0, deferred0_1, 1);
|
489 |
}
|
490 |
};
|
491 |
+
imports.wbg.__wbg_log_d03200ce29166fbd = function(arg0, arg1) {
|
492 |
console.log(getStringFromWasm0(arg0, arg1));
|
493 |
};
|
494 |
imports.wbg.__wbg_crypto_c48a774b022d20ac = function(arg0) {
|
build/m-quantized_bg.wasm
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:8176b82eaaf7aeadf548b220b0b2f7a897f2edd3d91fb83328995c6fe8dd468b
|
3 |
+
size 4282007
|
build/m.js
CHANGED
@@ -488,7 +488,7 @@ function __wbg_get_imports() {
|
|
488 |
wasm.__wbindgen_free(deferred0_0, deferred0_1, 1);
|
489 |
}
|
490 |
};
|
491 |
-
imports.wbg.
|
492 |
console.log(getStringFromWasm0(arg0, arg1));
|
493 |
};
|
494 |
imports.wbg.__wbg_crypto_c48a774b022d20ac = function(arg0) {
|
|
|
488 |
wasm.__wbindgen_free(deferred0_0, deferred0_1, 1);
|
489 |
}
|
490 |
};
|
491 |
+
imports.wbg.__wbg_log_d03200ce29166fbd = function(arg0, arg1) {
|
492 |
console.log(getStringFromWasm0(arg0, arg1));
|
493 |
};
|
494 |
imports.wbg.__wbg_crypto_c48a774b022d20ac = function(arg0) {
|
build/m_bg.wasm
CHANGED
@@ -1,3 +1,3 @@
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
-
oid sha256:
|
3 |
-
size
|
|
|
1 |
version https://git-lfs.github.com/spec/v1
|
2 |
+
oid sha256:172f4f5387cf311a8ec0f2391b65767b7a0b81001b6fb0c572a6999dc78744c5
|
3 |
+
size 4124838
|
index.html
CHANGED
@@ -1,7 +1,7 @@
|
|
1 |
<html>
|
2 |
<head>
|
3 |
<meta content="text/html;charset=utf-8" http-equiv="Content-Type" />
|
4 |
-
<title>Candle
|
5 |
</head>
|
6 |
|
7 |
<body></body>
|
@@ -25,7 +25,6 @@
|
|
25 |
@apply underline hover:text-blue-500 hover:no-underline;
|
26 |
}
|
27 |
</style>
|
28 |
-
<script src="https://cdnjs.cloudflare.com/ajax/libs/iframe-resizer/4.3.1/iframeResizer.contentWindow.min.js"></script>
|
29 |
<script src="https://cdn.tailwindcss.com"></script>
|
30 |
<script type="module">
|
31 |
import {
|
@@ -136,14 +135,14 @@
|
|
136 |
<main class="grid grid-cols-1 gap-8 relative">
|
137 |
<span class="absolute text-5xl -ml-[1em]"> 🕯️ </span>
|
138 |
<div>
|
139 |
-
<h1 class="text-5xl font-bold">
|
140 |
-
<h2 class="text-2xl font-bold">Rust/WASM Demo</h2>
|
141 |
<p class="max-w-lg">
|
142 |
-
This demo
|
143 |
-
href="https://
|
144 |
target="_blank"
|
145 |
class="link"
|
146 |
-
>
|
147 |
>) models right in your browser, thanks to
|
148 |
<a
|
149 |
href="https://github.com/huggingface/candle/"
|
@@ -151,29 +150,18 @@
|
|
151 |
class="link">
|
152 |
Candle
|
153 |
</a>
|
154 |
-
ML framework and rust/wasm.
|
155 |
-
models, including
|
156 |
<a
|
157 |
-
href="https://huggingface.co/
|
158 |
target="_blank"
|
159 |
class="link">
|
160 |
-
|
161 |
-
|
162 |
-
|
163 |
-
|
164 |
-
>,
|
165 |
-
<a
|
166 |
-
href="https://huggingface.co/google/flan-t5-small"
|
167 |
-
target="_blank"
|
168 |
-
class="link"
|
169 |
-
>flan-t5-small</a
|
170 |
-
>
|
171 |
-
and several t5
|
172 |
-
<a
|
173 |
-
href="https://huggingface.co/lmz/candle-quantized-t5/tree/main"
|
174 |
target="_blank"
|
175 |
class="link">
|
176 |
-
|
177 |
>.
|
178 |
</p>
|
179 |
</div>
|
@@ -197,8 +185,8 @@
|
|
197 |
type="text"
|
198 |
id="prompt"
|
199 |
class="font-light w-full px-3 py-2 mx-1 resize-none outline-none"
|
200 |
-
placeholder="Add prompt here, e.g. '
|
201 |
-
value="
|
202 |
<button
|
203 |
class="bg-gray-700 hover:bg-gray-800 text-white font-normal py-2 w-16 rounded disabled:bg-gray-300 disabled:cursor-not-allowed">
|
204 |
Run
|
|
|
1 |
<html>
|
2 |
<head>
|
3 |
<meta content="text/html;charset=utf-8" http-equiv="Content-Type" />
|
4 |
+
<title>Grammar correction in the browser using CoEdIT and Candle</title>
|
5 |
</head>
|
6 |
|
7 |
<body></body>
|
|
|
25 |
@apply underline hover:text-blue-500 hover:no-underline;
|
26 |
}
|
27 |
</style>
|
|
|
28 |
<script src="https://cdn.tailwindcss.com"></script>
|
29 |
<script type="module">
|
30 |
import {
|
|
|
135 |
<main class="grid grid-cols-1 gap-8 relative">
|
136 |
<span class="absolute text-5xl -ml-[1em]"> 🕯️ </span>
|
137 |
<div>
|
138 |
+
<h1 class="text-5xl font-bold">Grammar correction in the browser using CoEdIT and Candle.</h1>
|
139 |
+
<h2 class="text-2xl font-bold">CoEdIT Rust/WASM Demo</h2>
|
140 |
<p class="max-w-lg">
|
141 |
+
This demo showcases (<a
|
142 |
+
href="https://github.com/vipulraheja/coedit"
|
143 |
target="_blank"
|
144 |
class="link"
|
145 |
+
>CoEdIT</a
|
146 |
>) models right in your browser, thanks to
|
147 |
<a
|
148 |
href="https://github.com/huggingface/candle/"
|
|
|
150 |
class="link">
|
151 |
Candle
|
152 |
</a>
|
153 |
+
ML framework and rust/wasm. The models are loaded from huggingface
|
|
|
154 |
<a
|
155 |
+
href="https://huggingface.co/jbochi/candle-coedit-quantized"
|
156 |
target="_blank"
|
157 |
class="link">
|
158 |
+
jbochi/candle-coedit-quantized</a
|
159 |
+
>.
|
160 |
+
This space is a fork of <a
|
161 |
+
href="https://huggingface.co/spaces/radames/Candle-T5-Generation-Wasm"
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
162 |
target="_blank"
|
163 |
class="link">
|
164 |
+
radames/Candle-T5-Generation-Wasm</a
|
165 |
>.
|
166 |
</p>
|
167 |
</div>
|
|
|
185 |
type="text"
|
186 |
id="prompt"
|
187 |
class="font-light w-full px-3 py-2 mx-1 resize-none outline-none"
|
188 |
+
placeholder="Add prompt here, e.g. 'Fix the grammar: When I grow up, I start to understand what he said is quite right.'"
|
189 |
+
value="Fix the grammar: When I grow up, I start to understand what he said is quite right." />
|
190 |
<button
|
191 |
class="bg-gray-700 hover:bg-gray-800 text-white font-normal py-2 w-16 rounded disabled:bg-gray-300 disabled:cursor-not-allowed">
|
192 |
Run
|
utils.js
CHANGED
@@ -65,98 +65,61 @@ export async function generateText(
|
|
65 |
worker.addEventListener("message", messageHandler);
|
66 |
});
|
67 |
}
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
68 |
export const MODELS = {
|
69 |
-
|
70 |
-
size: "
|
71 |
-
base_url: "https://huggingface.co/
|
72 |
model: "model.gguf",
|
73 |
tokenizer: "tokenizer.json",
|
74 |
config: "config.json",
|
75 |
-
tasks:
|
76 |
-
translation_en_to_de: {
|
77 |
-
prefix: "translate English to German: ",
|
78 |
-
max_length: 300,
|
79 |
-
},
|
80 |
-
translation_en_to_fr: {
|
81 |
-
prefix: "translate English to French: ",
|
82 |
-
max_length: 300,
|
83 |
-
},
|
84 |
-
translation_en_to_ro: {
|
85 |
-
prefix: "translate English to Romanian: ",
|
86 |
-
max_length: 300,
|
87 |
-
},
|
88 |
-
summarization: { prefix: "summarize: ", max_length: 200 },
|
89 |
-
},
|
90 |
},
|
91 |
-
|
92 |
-
size: "
|
93 |
-
base_url: "https://huggingface.co/
|
94 |
-
model: "model.
|
95 |
tokenizer: "tokenizer.json",
|
96 |
-
config: "config.json",
|
97 |
-
tasks:
|
98 |
-
translation_en_to_de: {
|
99 |
-
prefix: "translate English to German: ",
|
100 |
-
max_length: 300,
|
101 |
-
},
|
102 |
-
translation_en_to_fr: {
|
103 |
-
prefix: "translate English to French: ",
|
104 |
-
max_length: 300,
|
105 |
-
},
|
106 |
-
translation_en_to_ro: {
|
107 |
-
prefix: "translate English to Romanian: ",
|
108 |
-
max_length: 300,
|
109 |
-
},
|
110 |
-
summarization: { prefix: "summarize: ", max_length: 200 },
|
111 |
-
},
|
112 |
},
|
113 |
-
|
114 |
-
size: "
|
115 |
-
base_url:
|
116 |
-
"https://huggingface.co/google/flan-t5-small/resolve/refs%2Fpr%2F14/",
|
117 |
model: "model.safetensors",
|
118 |
tokenizer: "tokenizer.json",
|
119 |
config: "config.json",
|
120 |
-
tasks:
|
121 |
-
translation_en_to_de: {
|
122 |
-
prefix: "translate English to German: ",
|
123 |
-
max_length: 300,
|
124 |
-
},
|
125 |
-
translation_en_to_fr: {
|
126 |
-
prefix: "translate English to French: ",
|
127 |
-
max_length: 300,
|
128 |
-
},
|
129 |
-
translation_en_to_ro: {
|
130 |
-
prefix: "translate English to Romanian: ",
|
131 |
-
max_length: 300,
|
132 |
-
},
|
133 |
-
summarization: { prefix: "summarize: ", max_length: 200 },
|
134 |
-
},
|
135 |
-
},
|
136 |
-
|
137 |
-
flan_t5_base_quantized: {
|
138 |
-
size: "263 MB",
|
139 |
-
base_url: "https://huggingface.co/lmz/candle-quantized-t5/resolve/main/",
|
140 |
-
model: "model-flan-t5-base.gguf",
|
141 |
-
tokenizer: "tokenizer.json",
|
142 |
-
config: "config-flan-t5-base.json",
|
143 |
-
tasks: {
|
144 |
-
translation_en_to_de: {
|
145 |
-
prefix: "translate English to German: ",
|
146 |
-
max_length: 300,
|
147 |
-
},
|
148 |
-
translation_en_to_fr: {
|
149 |
-
prefix: "translate English to French: ",
|
150 |
-
max_length: 300,
|
151 |
-
},
|
152 |
-
translation_en_to_ro: {
|
153 |
-
prefix: "translate English to Romanian: ",
|
154 |
-
max_length: 300,
|
155 |
-
},
|
156 |
-
summarization: { prefix: "summarize: ", max_length: 200 },
|
157 |
-
},
|
158 |
},
|
159 |
};
|
|
|
160 |
export function getModelInfo(id, taskID) {
|
161 |
const model = MODELS[id];
|
162 |
return {
|
@@ -165,4 +128,4 @@ export function getModelInfo(id, taskID) {
|
|
165 |
tokenizerURL: model.base_url + model.tokenizer,
|
166 |
maxLength: model.tasks[taskID].max_length,
|
167 |
};
|
168 |
-
}
|
|
|
65 |
worker.addEventListener("message", messageHandler);
|
66 |
});
|
67 |
}
|
68 |
+
|
69 |
+
const TASKS = {
|
70 |
+
fluency: {
|
71 |
+
prefix: "Fix the grammar: ",
|
72 |
+
max_length: 300,
|
73 |
+
},
|
74 |
+
coherence: {
|
75 |
+
prefix: "Rewrite to make this easier to understand: ",
|
76 |
+
max_length: 300,
|
77 |
+
},
|
78 |
+
simplification: {
|
79 |
+
prefix: "translate English to Romanian: ",
|
80 |
+
max_length: 300,
|
81 |
+
},
|
82 |
+
simplification: {
|
83 |
+
prefix: "Paraphrase this: ",
|
84 |
+
max_length: 300,
|
85 |
+
},
|
86 |
+
formalization: {
|
87 |
+
prefix: "Write this more formally: ",
|
88 |
+
max_length: 300,
|
89 |
+
},
|
90 |
+
neutralize: {
|
91 |
+
prefix: "Write in a more neutral way: ",
|
92 |
+
max_length: 300,
|
93 |
+
},
|
94 |
+
};
|
95 |
+
|
96 |
export const MODELS = {
|
97 |
+
coedit_large_quantized: {
|
98 |
+
size: "643 MB",
|
99 |
+
base_url: "https://huggingface.co/jbochi/candle-coedit-quantized/resolve/main/",
|
100 |
model: "model.gguf",
|
101 |
tokenizer: "tokenizer.json",
|
102 |
config: "config.json",
|
103 |
+
tasks: TASKS,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
104 |
},
|
105 |
+
coedit_xl_quantized: {
|
106 |
+
size: "2.34 GB",
|
107 |
+
base_url: "https://huggingface.co/jbochi/candle-coedit-quantized/resolve/main/",
|
108 |
+
model: "model-xl.gguf",
|
109 |
tokenizer: "tokenizer.json",
|
110 |
+
config: "config-xl.json",
|
111 |
+
tasks: TASKS,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
112 |
},
|
113 |
+
coedit_large: {
|
114 |
+
size: "3.13 GB",
|
115 |
+
base_url: "https://huggingface.co/grammarly/coedit-large/resolve/main/",
|
|
|
116 |
model: "model.safetensors",
|
117 |
tokenizer: "tokenizer.json",
|
118 |
config: "config.json",
|
119 |
+
tasks: TASKS,
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
|
120 |
},
|
121 |
};
|
122 |
+
|
123 |
export function getModelInfo(id, taskID) {
|
124 |
const model = MODELS[id];
|
125 |
return {
|
|
|
128 |
tokenizerURL: model.base_url + model.tokenizer,
|
129 |
maxLength: model.tasks[taskID].max_length,
|
130 |
};
|
131 |
+
};
|