nbroad HF staff commited on
Commit
5ca5e6d
1 Parent(s): 448bd5e

sql db, filter logs

Browse files
Files changed (2) hide show
  1. app.py +118 -30
  2. static/index.html +30 -13
app.py CHANGED
@@ -2,17 +2,15 @@ import os
2
  import json
3
  import random
4
  import string
5
- from datetime import datetime
6
- from typing import List, Dict
7
-
8
- import requests
9
- from fastapi import FastAPI, HTTPException
10
  from fastapi.staticfiles import StaticFiles
11
  from fastapi.responses import FileResponse
12
  from pydantic import BaseModel
13
- import plotly.graph_objs as go
14
  from apscheduler.schedulers.asyncio import AsyncIOScheduler
15
-
16
  from huggingface_hub import AsyncInferenceClient
17
 
18
  app = FastAPI()
@@ -30,9 +28,9 @@ models = [
30
  "mistralai/Mistral-7B-Instruct-v0.3",
31
  "mistralai/Mixtral-8x7B-Instruct-v0.1",
32
  ]
33
- LOG_FILE = "/data/api_logs.json"
34
-
35
 
 
 
36
  client = AsyncInferenceClient(token=os.environ["HF_INFERENCE_API_TOKEN"])
37
 
38
  # Ensure log file exists
@@ -40,44 +38,84 @@ if not os.path.exists(LOG_FILE):
40
  with open(LOG_FILE, "w") as f:
41
  json.dump([], f)
42
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
43
  class LogEntry(BaseModel):
44
  model: str
45
  success: bool
46
  timestamp: str
47
  failure_message: str
48
-
49
-
50
 
51
  def random_string(length=10):
52
  characters = string.ascii_letters + string.digits
53
  return ''.join(random.choice(characters) for _ in range(length))
54
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
55
  async def check_apis():
56
  results = []
57
  for model in models:
58
  try:
59
  response = await client.chat_completion(
60
- messages=[{"role": "user", "content": f"{random_string()}\nWhat is the capital of France?"}],
61
- max_tokens=10,
62
  )
63
  success = True
 
64
  e = 'success'
65
  except Exception as e:
66
  print(e)
67
  success = False
68
-
69
- results.append(LogEntry(
 
70
  model=model,
71
  success=success,
72
- timestamp=datetime.now().isoformat(),
73
- failure_message=str(e)
74
- ))
75
-
 
 
 
76
  with open(LOG_FILE, "r+") as f:
77
  logs = json.load(f)
78
  logs.extend([result.dict() for result in results])
79
  f.seek(0)
80
- json.dump(logs, f)
 
81
 
82
  @app.on_event("startup")
83
  async def start_scheduler():
@@ -85,32 +123,82 @@ async def start_scheduler():
85
  scheduler.add_job(check_apis, 'interval', minutes=10)
86
  scheduler.start()
87
 
 
 
 
 
88
  @app.get("/")
89
  async def index():
90
  return FileResponse("static/index.html")
91
 
92
  @app.get("/api/logs", response_model=List[LogEntry])
93
- async def get_logs():
94
- with open(LOG_FILE, "r") as f:
95
- logs = json.load(f)
96
- return logs
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
97
 
98
  @app.get("/api/chart-data", response_model=Dict[str, Dict[str, Dict[str, List]]])
99
  async def get_chart_data():
100
- with open(LOG_FILE, "r") as f:
101
- logs = json.load(f)
 
 
 
102
 
103
  chart_data = {}
104
  for log in logs:
105
- model = log['model']
106
  if model not in chart_data:
107
  chart_data[model] = {
108
  'success': {'x': [], 'y': []},
109
  'failure': {'x': [], 'y': []}
110
  }
111
-
112
- status = 'success' if log['success'] else 'failure'
113
- chart_data[model][status]['x'].append(log['timestamp'])
114
  chart_data[model][status]['y'].append(1)
115
 
116
  return chart_data
 
2
  import json
3
  import random
4
  import string
5
+ import sqlite3
6
+ from datetime import datetime, timezone
7
+ from fastapi import Query
8
+ from typing import List, Dict, Any
9
+ from fastapi import FastAPI
10
  from fastapi.staticfiles import StaticFiles
11
  from fastapi.responses import FileResponse
12
  from pydantic import BaseModel
 
13
  from apscheduler.schedulers.asyncio import AsyncIOScheduler
 
14
  from huggingface_hub import AsyncInferenceClient
15
 
16
  app = FastAPI()
 
28
  "mistralai/Mistral-7B-Instruct-v0.3",
29
  "mistralai/Mixtral-8x7B-Instruct-v0.1",
30
  ]
 
 
31
 
32
+ LOG_FILE = "/data/api_logs.json"
33
+ DB_FILE = "/data/api_logs.db"
34
  client = AsyncInferenceClient(token=os.environ["HF_INFERENCE_API_TOKEN"])
35
 
36
  # Ensure log file exists
 
38
  with open(LOG_FILE, "w") as f:
39
  json.dump([], f)
40
 
41
+ # Initialize SQLite database
42
+ def init_db():
43
+ conn = sqlite3.connect(DB_FILE)
44
+ cursor = conn.cursor()
45
+ cursor.execute('''
46
+ CREATE TABLE IF NOT EXISTS api_logs (
47
+ id INTEGER PRIMARY KEY AUTOINCREMENT,
48
+ model TEXT,
49
+ success BOOLEAN,
50
+ timestamp TEXT,
51
+ failure_message TEXT,
52
+ response_data TEXT
53
+ )
54
+ ''')
55
+ conn.commit()
56
+ conn.close()
57
+
58
+ init_db()
59
+
60
  class LogEntry(BaseModel):
61
  model: str
62
  success: bool
63
  timestamp: str
64
  failure_message: str
65
+ response_data: Dict[str, Any] = None
 
66
 
67
  def random_string(length=10):
68
  characters = string.ascii_letters + string.digits
69
  return ''.join(random.choice(characters) for _ in range(length))
70
 
71
+ def log_to_sqlite(entry: LogEntry):
72
+ conn = sqlite3.connect(DB_FILE)
73
+ cursor = conn.cursor()
74
+ cursor.execute('''
75
+ INSERT INTO api_logs (model, success, timestamp, failure_message, response_data)
76
+ VALUES (?, ?, ?, ?, ?)
77
+ ''', (
78
+ entry.model,
79
+ entry.success,
80
+ entry.timestamp,
81
+ entry.failure_message,
82
+ json.dumps(entry.response_data) if entry.response_data else None
83
+ ))
84
+ conn.commit()
85
+ conn.close()
86
+
87
  async def check_apis():
88
  results = []
89
  for model in models:
90
  try:
91
  response = await client.chat_completion(
92
+ messages=[{"role": "user", "content": f"{random_string()}\nWhat is the capital of France?"}],
93
+ max_tokens=10,
94
  )
95
  success = True
96
+ response_data = response
97
  e = 'success'
98
  except Exception as e:
99
  print(e)
100
  success = False
101
+ response_data = None
102
+
103
+ log_entry = LogEntry(
104
  model=model,
105
  success=success,
106
+ timestamp=datetime.now(timezone.utc).isoformat(),
107
+ failure_message=str(e) if not success else "",
108
+ response_data=dict(response_data)
109
+ )
110
+ results.append(log_entry)
111
+ log_to_sqlite(log_entry)
112
+
113
  with open(LOG_FILE, "r+") as f:
114
  logs = json.load(f)
115
  logs.extend([result.dict() for result in results])
116
  f.seek(0)
117
+ f.truncate()
118
+ json.dump(logs, f, indent=2)
119
 
120
  @app.on_event("startup")
121
  async def start_scheduler():
 
123
  scheduler.add_job(check_apis, 'interval', minutes=10)
124
  scheduler.start()
125
 
126
+ @app.get("/api/models")
127
+ async def get_models():
128
+ return models
129
+
130
  @app.get("/")
131
  async def index():
132
  return FileResponse("static/index.html")
133
 
134
  @app.get("/api/logs", response_model=List[LogEntry])
135
+ async def get_logs(
136
+ model: str = Query(None, description="Filter by model name"),
137
+ start: str = Query(None, description="Start time for filtering (ISO format)"),
138
+ end: str = Query(None, description="End time for filtering (ISO format)")
139
+ ):
140
+ conn = sqlite3.connect(DB_FILE)
141
+ cursor = conn.cursor()
142
+
143
+ query = "SELECT * FROM api_logs"
144
+ params = []
145
+
146
+ if any([model, start, end]):
147
+ query += " WHERE"
148
+
149
+ if model:
150
+ query += " AND model = ?"
151
+ params.append(model)
152
+
153
+ if start:
154
+ query += " AND timestamp >= ?"
155
+ params.append(start)
156
+
157
+ if end:
158
+ query += " AND timestamp <= ?"
159
+ params.append(end)
160
+
161
+ query += " ORDER BY timestamp DESC LIMIT 100"
162
+
163
+ cursor.execute(query, params)
164
+ logs = cursor.fetchall()
165
+ conn.close()
166
+
167
+ return [LogEntry(
168
+ model=log[1],
169
+ success=log[2],
170
+ timestamp=log[3],
171
+ failure_message=log[4],
172
+ response_data=json.loads(log[5]) if log[5] else None
173
+ ) for log in logs]
174
+
175
+ @app.get("/api/db-logs")
176
+ async def get_db_logs():
177
+ conn = sqlite3.connect(DB_FILE)
178
+ cursor = conn.cursor()
179
+ cursor.execute("SELECT * FROM api_logs ORDER BY timestamp DESC LIMIT 100")
180
+ logs = cursor.fetchall()
181
+ conn.close()
182
+ return [{"id": log[0], "model": log[1], "success": log[2], "timestamp": log[3], "failure_message": log[4], "response_data": json.loads(log[5]) if log[5] else None} for log in logs]
183
 
184
  @app.get("/api/chart-data", response_model=Dict[str, Dict[str, Dict[str, List]]])
185
  async def get_chart_data():
186
+ conn = sqlite3.connect(DB_FILE)
187
+ cursor = conn.cursor()
188
+ cursor.execute("SELECT model, success, timestamp FROM api_logs ORDER BY timestamp")
189
+ logs = cursor.fetchall()
190
+ conn.close()
191
 
192
  chart_data = {}
193
  for log in logs:
194
+ model, success, timestamp = log
195
  if model not in chart_data:
196
  chart_data[model] = {
197
  'success': {'x': [], 'y': []},
198
  'failure': {'x': [], 'y': []}
199
  }
200
+ status = 'success' if success else 'failure'
201
+ chart_data[model][status]['x'].append(timestamp)
 
202
  chart_data[model][status]['y'].append(1)
203
 
204
  return chart_data
static/index.html CHANGED
@@ -15,6 +15,8 @@
15
  table { width: 100%; border-collapse: collapse; }
16
  th, td { border: 1px solid #ddd; padding: 8px; text-align: left; }
17
  th { background-color: #f2f2f2; }
 
 
18
  </style>
19
  </head>
20
  <body>
@@ -22,16 +24,14 @@
22
  <div id="charts"></div>
23
  <div id="logs">
24
  <h2>Logs</h2>
25
- <table id="logTable">
26
- <thead>
27
- <tr>
28
- <th>Timestamp</th>
29
- <th>Model</th>
30
- <th>Status</th>
31
- </tr>
32
- </thead>
33
- <tbody></tbody>
34
- </table>
35
  </div>
36
 
37
  <script>
@@ -66,8 +66,8 @@
66
  });
67
  }
68
 
69
- function updateLogs() {
70
- $.getJSON('/api/logs', function(data) {
71
  let tbody = $('#logTable tbody');
72
  tbody.empty();
73
  data.forEach(function(log) {
@@ -82,11 +82,28 @@
82
  });
83
  }
84
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
 
85
  $(document).ready(function() {
86
  updateCharts();
87
  updateLogs();
 
88
  setInterval(updateCharts, 60000); // Update every minute
89
- setInterval(updateLogs, 60000); // Update every minute
90
  });
91
  </script>
92
  </body>
 
15
  table { width: 100%; border-collapse: collapse; }
16
  th, td { border: 1px solid #ddd; padding: 8px; text-align: left; }
17
  th { background-color: #f2f2f2; }
18
+ #logFilter { margin-top: 20px; margin-bottom: 20px; }
19
+ #logFilter input, #logFilter select, #logFilter button { margin-right: 10px; }
20
  </style>
21
  </head>
22
  <body>
 
24
  <div id="charts"></div>
25
  <div id="logs">
26
  <h2>Logs</h2>
27
+ <div id="logFilter">
28
+ <select id="modelSelect">
29
+ <option value="">All Models</option>
30
+ </select>
31
+ <input type="datetime-local" id="startTime">
32
+ <input type="datetime-local" id="endTime">
33
+ <button onclick="filterLogs()">Filter Logs</button>
34
+ </div>
 
 
35
  </div>
36
 
37
  <script>
 
66
  });
67
  }
68
 
69
+ function updateLogs(filterModel = '', startTime = '', endTime = '') {
70
+ $.getJSON('/api/logs', { model: filterModel, start: startTime, end: endTime }, function(data) {
71
  let tbody = $('#logTable tbody');
72
  tbody.empty();
73
  data.forEach(function(log) {
 
82
  });
83
  }
84
 
85
+ function updateModelSelect() {
86
+ $.getJSON('/api/models', function(models) {
87
+ let select = $('#modelSelect');
88
+ models.forEach(function(model) {
89
+ $('<option>').val(model).text(model).appendTo(select);
90
+ });
91
+ });
92
+ }
93
+
94
+ function filterLogs() {
95
+ let model = $('#modelSelect').val();
96
+ let startTime = $('#startTime').val();
97
+ let endTime = $('#endTime').val();
98
+ updateLogs(model, startTime, endTime);
99
+ }
100
+
101
  $(document).ready(function() {
102
  updateCharts();
103
  updateLogs();
104
+ updateModelSelect();
105
  setInterval(updateCharts, 60000); // Update every minute
106
+ setInterval(function() { updateLogs($('#modelSelect').val(), $('#startTime').val(), $('#endTime').val()); }, 60000);
107
  });
108
  </script>
109
  </body>