eduardmtz commited on
Commit
318ad79
·
verified ·
1 Parent(s): 30ff09c

Create preguntar-pdf.html

Browse files
Files changed (1) hide show
  1. preguntar-pdf.html +69 -0
preguntar-pdf.html ADDED
@@ -0,0 +1,69 @@
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
1
+ <!DOCTYPE html>
2
+ <html lang="en">
3
+ <head>
4
+ <meta charset="UTF-8">
5
+ <meta name="viewport" content="width=device-width, initial-scale=1.0">
6
+ <title>Ask Questions to the Model</title>
7
+ <script src="https://cdn.jsdelivr.net/npm/@tensorflow/tfjs"></script>
8
+ </head>
9
+ <body>
10
+ <h1>Ask Questions to the Model</h1>
11
+ <input type="text" id="question" placeholder="Type your question here">
12
+ <button id="askQuestion">Ask</button>
13
+
14
+ <pre id="response"></pre>
15
+
16
+ <script>
17
+ async function loadModel() {
18
+ try {
19
+ const model = await tf.loadLayersModel('localstorage://pdf-trained-model');
20
+ return model;
21
+ } catch (err) {
22
+ document.getElementById('response').textContent = 'Model not found. Train it first!';
23
+ throw err;
24
+ }
25
+ }
26
+
27
+ function tokenizeQuestion(question, tokenizer) {
28
+ const tokens = question.split(/\s+/);
29
+ return tokens.map(token => tokenizer[token] || 0);
30
+ }
31
+
32
+ document.getElementById('askQuestion').addEventListener('click', async () => {
33
+ const question = document.getElementById('question').value;
34
+ const responseElement = document.getElementById('response');
35
+
36
+ if (!question) {
37
+ responseElement.textContent = 'Please enter a question.';
38
+ return;
39
+ }
40
+
41
+ responseElement.textContent = 'Loading model...';
42
+
43
+ try {
44
+ const model = await loadModel();
45
+
46
+ // Tokenizer setup (replace with actual tokenizer logic from training)
47
+ const tokenizer = { "example": 1, "question": 2 }; // Placeholder for actual tokenizer
48
+
49
+ const input = tokenizeQuestion(question, tokenizer);
50
+
51
+ if (input.length === 0) {
52
+ responseElement.textContent = 'Error: Question could not be tokenized.';
53
+ return;
54
+ }
55
+
56
+ const paddedInput = tf.pad(tf.tensor2d([input], [1, input.length]), [[0, 0], [0, Math.max(0, 10 - input.length)]], 'constant');
57
+
58
+ const prediction = model.predict(paddedInput);
59
+ const predictionArray = await prediction.array();
60
+
61
+ responseElement.textContent = `Model response: ${predictionArray}`;
62
+ } catch (err) {
63
+ responseElement.textContent = 'Error: Could not load model or process question.';
64
+ console.error(err);
65
+ }
66
+ });
67
+ </script>
68
+ </body>
69
+ </html>