neetnestor commited on
Commit
bba96cd
1 Parent(s): 484e5a2

feat: update design for Phi-3.5 specifically

Browse files
Files changed (7) hide show
  1. dist/index.js +0 -0
  2. index.html +106 -16
  3. package-lock.json +0 -0
  4. package.json +1 -1
  5. src/index.js +17 -15
  6. styles/style.css +31 -19
  7. yarn.lock +0 -0
dist/index.js CHANGED
The diff for this file is too large to render. See raw diff
 
index.html CHANGED
@@ -3,31 +3,121 @@
3
  <head>
4
  <meta charset="utf-8" />
5
  <meta name="viewport" content="width=device-width" />
6
- <title>WebLLM Simple Chat Space</title>
7
  <link rel="stylesheet" href="styles/katex.min.css" />
8
- <link rel="stylesheet" href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/default.min.css">
 
 
 
9
  <link rel="stylesheet" href="styles/style.css" />
10
  </head>
11
 
12
  <body>
13
  <main>
14
- <h1>WebLLM Simple Chat Space</h1>
15
  <p>
16
- This is a minimal demo app showcasing how WebLLM enables AI chat
17
- directly in your local browser. You might also enjoy exploring
18
-
19
- <a
20
- href="https://chat.webllm.ai/"
21
- rel="nofollow noopener noreferrer"
22
- target="_blank"
23
- style="display: inline;"
24
- >WebLLM Chat</a
25
- >!
26
  </p>
27
 
28
- <h2>Step 1: Initialize WebLLM and Download Model</h2>
29
- <div class="download-container">
30
- <select id="model-selection"></select>
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
31
  <button id="download">Download</button>
32
  </div>
33
  <p id="download-status" class="hidden"></p>
 
3
  <head>
4
  <meta charset="utf-8" />
5
  <meta name="viewport" content="width=device-width" />
6
+ <title>WebLLM Phi 3.5 Chat</title>
7
  <link rel="stylesheet" href="styles/katex.min.css" />
8
+ <link
9
+ rel="stylesheet"
10
+ href="https://cdnjs.cloudflare.com/ajax/libs/highlight.js/11.9.0/styles/default.min.css"
11
+ />
12
  <link rel="stylesheet" href="styles/style.css" />
13
  </head>
14
 
15
  <body>
16
  <main>
17
+ <h1>WebLLM Phi 3.5 Chat</h1>
18
  <p>
19
+ This space enables AI chat with Phi 3.5 models directly in your local
20
+ browser, empowered by WebLLM.
 
 
 
 
 
 
 
 
21
  </p>
22
 
23
+ <h2>Step 1: Configure And Download Model</h2>
24
+ <div class="card vertical">
25
+ <form class="configure-form">
26
+ <!-- Quantization -->
27
+ <div class="form-group">
28
+ <label for="quantization">Quantization:</label>
29
+ <select id="quantization" name="quantization">
30
+ <option value="q4f16">q4f16</option>
31
+ <option value="q4f32">q4f32</option>
32
+ </select>
33
+ </div>
34
+
35
+ <!-- Context Window -->
36
+ <div class="form-group">
37
+ <label for="context">Context Window:</label>
38
+ <select id="context" name="context">
39
+ <option value="1k">1024</option>
40
+ <option value="">4096</option>
41
+ </select>
42
+ </div>
43
+
44
+ <!-- Temperature -->
45
+ <div class="form-group">
46
+ <label for="temperature"
47
+ >Temperature:
48
+ <span id="temperature-value" class="range-value">1.00</span></label
49
+ >
50
+ <input
51
+ type="range"
52
+ id="temperature"
53
+ name="temperature"
54
+ min="0.0"
55
+ max="1.0"
56
+ step="0.01"
57
+ value="1.0"
58
+ oninput="document.getElementById('temperature-value').textContent = Number(this.value).toFixed(2)"
59
+ />
60
+ </div>
61
+
62
+ <!-- Top-p -->
63
+ <div class="form-group">
64
+ <label for="top_p"
65
+ >Top-p:
66
+ <span id="top_p-value" class="range-value">0.90</span></label
67
+ >
68
+ <input
69
+ type="range"
70
+ id="top_p"
71
+ name="top_p"
72
+ min="0.0"
73
+ max="1.0"
74
+ step="0.01"
75
+ value="0.0"
76
+ oninput="document.getElementById('top_p-value').textContent = Number(this.value).toFixed(2)"
77
+ />
78
+ </div>
79
+
80
+ <!-- Presence Penalty -->
81
+ <div class="form-group">
82
+ <label for="presence_penalty"
83
+ >Presence Penalty:
84
+ <span id="presence_penalty-value" class="range-value"
85
+ >0.00</span
86
+ ></label
87
+ >
88
+ <input
89
+ type="range"
90
+ id="presence_penalty"
91
+ name="presence_penalty"
92
+ min="0.0"
93
+ max="1.0"
94
+ step="0.01"
95
+ value="0.0"
96
+ oninput="document.getElementById('presence_penalty-value').textContent = Number(this.value).toFixed(2)"
97
+ />
98
+ </div>
99
+
100
+ <!-- Frequency Penalty -->
101
+ <div class="form-group">
102
+ <label for="frequency_penalty"
103
+ >Frequency Penalty:
104
+ <span id="frequency_penalty-value" class="range-value"
105
+ >0.00</span
106
+ ></label
107
+ >
108
+ <input
109
+ type="range"
110
+ id="frequency_penalty"
111
+ name="frequency_penalty"
112
+ min="0.0"
113
+ max="1.0"
114
+ step="0.01"
115
+ value="0.0"
116
+ oninput="document.getElementById('frequency_penalty-value').textContent = Number(this.value).toFixed(2)"
117
+ />
118
+ </div>
119
+ </form>
120
+
121
  <button id="download">Download</button>
122
  </div>
123
  <p id="download-status" class="hidden"></p>
package-lock.json ADDED
The diff for this file is too large to render. See raw diff
 
package.json CHANGED
@@ -8,7 +8,7 @@
8
  "dependencies": {
9
  "@babel/core": "^7.13.15",
10
  "@babel/preset-env": "^7.13.15",
11
- "@mlc-ai/web-llm": "^0.2.56",
12
  "katex": "^0.16.11",
13
  "rehype-highlight": "^7.0.0",
14
  "rehype-katex": "^7.0.0",
 
8
  "dependencies": {
9
  "@babel/core": "^7.13.15",
10
  "@babel/preset-env": "^7.13.15",
11
+ "@mlc-ai/web-llm": "^0.2.62",
12
  "katex": "^0.16.11",
13
  "rehype-highlight": "^7.0.0",
14
  "rehype-katex": "^7.0.0",
src/index.js CHANGED
@@ -31,11 +31,6 @@ const messages = [
31
  },
32
  ];
33
 
34
- const availableModels = webllm.prebuiltAppConfig.model_list.map(
35
- (m) => m.model_id
36
- );
37
- let selectedModel = "Llama-3.1-8B-Instruct-q4f32_1-MLC-1k";
38
-
39
  // Callback function for initializing progress
40
  function updateEngineInitProgressCallback(report) {
41
  console.log("initialize", report.progress);
@@ -48,12 +43,26 @@ const engine = new webllm.MLCEngine();
48
  engine.setInitProgressCallback(updateEngineInitProgressCallback);
49
 
50
  async function initializeWebLLMEngine() {
 
 
 
 
 
 
 
51
  document.getElementById("download-status").classList.remove("hidden");
52
- selectedModel = document.getElementById("model-selection").value;
 
 
 
53
  const config = {
54
- temperature: 1.0,
55
- top_p: 1,
 
 
56
  };
 
 
57
  await engine.reload(selectedModel, config);
58
  modelLoaded = true;
59
  }
@@ -166,13 +175,6 @@ async function updateLastMessage(content) {
166
  }
167
 
168
  /*************** UI binding ***************/
169
- availableModels.forEach((modelId) => {
170
- const option = document.createElement("option");
171
- option.value = modelId;
172
- option.textContent = modelId;
173
- document.getElementById("model-selection").appendChild(option);
174
- });
175
- document.getElementById("model-selection").value = selectedModel;
176
  document.getElementById("download").addEventListener("click", function () {
177
  initializeWebLLMEngine().then(() => {
178
  document.getElementById("send").disabled = false;
 
31
  },
32
  ];
33
 
 
 
 
 
 
34
  // Callback function for initializing progress
35
  function updateEngineInitProgressCallback(report) {
36
  console.log("initialize", report.progress);
 
43
  engine.setInitProgressCallback(updateEngineInitProgressCallback);
44
 
45
  async function initializeWebLLMEngine() {
46
+ const quantization = document.getElementById("quantization").value;
47
+ const context_window = document.getElementById("context").value;
48
+ const temperature = document.getElementById("temperature").value;
49
+ const top_p = document.getElementById("top_p").value;
50
+ const presence_penalty = document.getElementById("presence_penalty").value;
51
+ const frequency_penalty = document.getElementById("frequency_penalty").value;
52
+
53
  document.getElementById("download-status").classList.remove("hidden");
54
+ let selectedModel = `Phi-3.5-mini-instruct-${quantization}_1-MLC`;
55
+ if (context_window) {
56
+ selectedModel += "-" + context_window;
57
+ }
58
  const config = {
59
+ temperature,
60
+ top_p,
61
+ frequency_penalty,
62
+ presence_penalty,
63
  };
64
+ console.log(`Loading Model: ${selectedModel}`);
65
+ console.log(`Config: ${Object.toString(config)}`);
66
  await engine.reload(selectedModel, config);
67
  modelLoaded = true;
68
  }
 
175
  }
176
 
177
  /*************** UI binding ***************/
 
 
 
 
 
 
 
178
  document.getElementById("download").addEventListener("click", function () {
179
  initializeWebLLMEngine().then(() => {
180
  document.getElementById("send").disabled = false;
styles/style.css CHANGED
@@ -12,29 +12,18 @@ h1 {
12
  }
13
 
14
  h2 {
15
- font-size: 1rem;
16
  }
17
 
18
  main {
19
  padding: 2rem;
20
- max-width: 48rem;
21
- height: max(calc(100dvh - 4rem), 800px);
22
  margin-left: auto;
23
  margin-right: auto;
24
  display: flex;
25
  flex-direction: column;
26
  }
27
 
28
- .download-container {
29
- display: flex;
30
- justify-content: space-between;
31
- margin-bottom: 20px;
32
- }
33
-
34
- .download-container select {
35
- padding: .5rem;
36
- }
37
-
38
  #download-status {
39
  border: solid 1px black;
40
  box-shadow: 0 10px 15px -3px rgba(0, 0, 0, 0.1),
@@ -43,7 +32,7 @@ main {
43
  }
44
 
45
  .chat-container {
46
- height: 400px;
47
  width: 100%;
48
  border: 2px solid black;
49
  display: flex;
@@ -177,18 +166,41 @@ button:hover:not(:disabled) {
177
  color: rgb(0, 0, 0);
178
  }
179
 
 
 
 
 
180
  .message pre code.hljs {
181
  background: #ffffff;
182
  border-radius: 10px;
183
  }
184
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
185
  @media only screen and (max-width: 600px) {
186
  .card {
187
  flex-direction: column;
188
  }
189
-
190
- .download-container {
191
- flex-direction: column;
192
- gap: 1rem;
193
- }
194
  }
 
12
  }
13
 
14
  h2 {
15
+ font-size: 1.2rem;
16
  }
17
 
18
  main {
19
  padding: 2rem;
20
+ max-width: 70rem;
 
21
  margin-left: auto;
22
  margin-right: auto;
23
  display: flex;
24
  flex-direction: column;
25
  }
26
 
 
 
 
 
 
 
 
 
 
 
27
  #download-status {
28
  border: solid 1px black;
29
  box-shadow: 0 10px 15px -3px rgba(0, 0, 0, 0.1),
 
32
  }
33
 
34
  .chat-container {
35
+ height: calc(100dvh - 100px);
36
  width: 100%;
37
  border: 2px solid black;
38
  display: flex;
 
166
  color: rgb(0, 0, 0);
167
  }
168
 
169
+ .card.vertical {
170
+ flex-direction: column;
171
+ }
172
+
173
  .message pre code.hljs {
174
  background: #ffffff;
175
  border-radius: 10px;
176
  }
177
 
178
+ .configure-form {
179
+ display: flex;
180
+ gap: 1.5rem;
181
+ flex-wrap: wrap;
182
+ justify-content: center;
183
+ }
184
+ .configure-form .form-group {
185
+ margin-bottom: 15px;
186
+ }
187
+ .configure-form label {
188
+ display: block;
189
+ margin-bottom: 5px;
190
+ font-weight: bold;
191
+ }
192
+ .configure-form input[type="range"] {
193
+ width: 100%;
194
+ }
195
+ .configure-form select, .configure-form input[type="range"], .configure-form input[type="number"] {
196
+ padding: 5px;
197
+ font-size: 1rem;
198
+ width: 100%;
199
+ box-sizing: border-box;
200
+ }
201
+
202
  @media only screen and (max-width: 600px) {
203
  .card {
204
  flex-direction: column;
205
  }
 
 
 
 
 
206
  }
yarn.lock CHANGED
The diff for this file is too large to render. See raw diff