完善查询日志分组

This commit is contained in:
2025-08-02 22:33:23 +08:00
parent eb48cf17e6
commit 9cfc363227
2 changed files with 256 additions and 83 deletions

90
app.py
View File

@@ -21,13 +21,33 @@ class QueryLogCollector:
def __init__(self, max_logs=1000):
self.logs = []
self.max_logs = max_logs
self.current_batch_id = None
self.batch_counter = 0
def add_log(self, level, message):
def start_new_batch(self, query_type='single'):
"""开始新的查询批次"""
self.batch_counter += 1
self.current_batch_id = f"batch_{self.batch_counter}_{datetime.now().strftime('%H%M%S')}"
# 添加批次开始标记
self.add_log('INFO', f"=== 开始{query_type}查询批次 (ID: {self.current_batch_id}) ===", force_batch_id=self.current_batch_id)
return self.current_batch_id
def end_current_batch(self):
"""结束当前查询批次"""
if self.current_batch_id:
self.add_log('INFO', f"=== 查询批次完成 (ID: {self.current_batch_id}) ===", force_batch_id=self.current_batch_id)
self.current_batch_id = None
def add_log(self, level, message, force_batch_id=None):
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]
batch_id = force_batch_id or self.current_batch_id
log_entry = {
'timestamp': timestamp,
'level': level,
'message': message
'message': message,
'batch_id': batch_id
}
self.logs.append(log_entry)
# 保持日志数量在限制内
@@ -39,8 +59,26 @@ class QueryLogCollector:
return self.logs[-limit:]
return self.logs
def get_logs_grouped_by_batch(self, limit=None):
"""按批次分组获取日志"""
logs = self.get_logs(limit)
grouped_logs = {}
batch_order = []
for log in logs:
batch_id = log.get('batch_id', 'unknown')
if batch_id not in grouped_logs:
grouped_logs[batch_id] = []
batch_order.append(batch_id)
grouped_logs[batch_id].append(log)
# 返回按时间顺序排列的批次
return [(batch_id, grouped_logs[batch_id]) for batch_id in batch_order]
def clear_logs(self):
self.logs.clear()
self.current_batch_id = None
self.batch_counter = 0
# 全局日志收集器实例
query_log_collector = QueryLogCollector()
@@ -1251,6 +1289,10 @@ def sharding_query_compare():
"""分表查询比对API"""
try:
data = request.json
# 开始新的查询批次
batch_id = query_log_collector.start_new_batch('分表')
logger.info("开始执行分表数据库比对查询")
# 解析配置
@@ -1352,10 +1394,14 @@ def sharding_query_compare():
logger.info(f"分表比对完成:发现 {len(differences)} 处差异")
# 结束查询批次
query_log_collector.end_current_batch()
return jsonify(result)
except Exception as e:
logger.error(f"分表查询执行失败:{str(e)}")
# 结束查询批次(出错情况)
query_log_collector.end_current_batch()
return jsonify({'error': f'分表查询执行失败:{str(e)}'}), 500
finally:
# 关闭连接
@@ -1366,12 +1412,18 @@ def sharding_query_compare():
except Exception as e:
logger.error(f"分表查询请求处理失败:{str(e)}")
# 结束查询批次(请求处理出错)
query_log_collector.end_current_batch()
return jsonify({'error': f'分表查询请求处理失败:{str(e)}'}), 500
@app.route('/api/query', methods=['POST'])
def query_compare():
try:
data = request.json
# 开始新的查询批次
batch_id = query_log_collector.start_new_batch('单表')
logger.info("开始执行数据库比对查询")
# 解析配置
@@ -1482,10 +1534,14 @@ def query_compare():
except Exception as e:
logger.warning(f"保存查询历史记录失败: {e}")
# 结束查询批次
query_log_collector.end_current_batch()
return jsonify(result)
except Exception as e:
logger.error(f"查询执行失败:{str(e)}")
# 结束查询批次(出错情况)
query_log_collector.end_current_batch()
return jsonify({'error': f'查询执行失败:{str(e)}'}), 500
finally:
# 关闭连接
@@ -1496,6 +1552,8 @@ def query_compare():
except Exception as e:
logger.error(f"请求处理失败:{str(e)}")
# 结束查询批次(请求处理出错)
query_log_collector.end_current_batch()
return jsonify({'error': f'请求处理失败:{str(e)}'}), 500
@app.route('/api/default-config')
@@ -1645,15 +1703,29 @@ def api_delete_query_history(history_id):
@app.route('/api/query-logs', methods=['GET'])
def api_get_query_logs():
"""获取查询日志"""
"""获取查询日志,支持分组显示"""
try:
limit = request.args.get('limit', type=int)
logs = query_log_collector.get_logs(limit)
return jsonify({
'success': True,
'data': logs,
'total': len(query_log_collector.logs)
})
grouped = request.args.get('grouped', 'true').lower() == 'true' # 默认分组显示
if grouped:
# 返回分组日志
grouped_logs = query_log_collector.get_logs_grouped_by_batch(limit)
return jsonify({
'success': True,
'data': grouped_logs,
'total': len(query_log_collector.logs),
'grouped': True
})
else:
# 返回原始日志列表
logs = query_log_collector.get_logs(limit)
return jsonify({
'success': True,
'data': logs,
'total': len(query_log_collector.logs),
'grouped': False
})
except Exception as e:
logger.error(f"获取查询日志失败: {e}")
return jsonify({'success': False, 'error': str(e)}), 500

View File

@@ -3132,12 +3132,17 @@ let allQueryLogs = []; // 存储所有日志
async function refreshQueryLogs() {
try {
const response = await fetch('/api/query-logs');
const response = await fetch('/api/query-logs?grouped=true');
const result = await response.json();
if (result.success && result.data) {
allQueryLogs = result.data;
filterLogsByLevel();
if (result.grouped) {
displayGroupedQueryLogs(result.data);
} else {
// 兼容旧版本的平铺显示
allQueryLogs = result.data;
filterLogsByLevel();
}
} else {
document.getElementById('query-logs').innerHTML = '<div class="alert alert-warning">无法获取查询日志</div>';
}
@@ -3147,21 +3152,176 @@ async function refreshQueryLogs() {
}
}
function filterLogsByLevel() {
const showInfo = document.getElementById('log-level-info').checked;
const showWarning = document.getElementById('log-level-warning').checked;
const showError = document.getElementById('log-level-error').checked;
// 显示分组查询日志
function displayGroupedQueryLogs(groupedLogs) {
const container = document.getElementById('query-logs');
const filteredLogs = allQueryLogs.filter(log => {
switch(log.level) {
case 'INFO': return showInfo;
case 'WARNING': return showWarning;
case 'ERROR': return showError;
default: return true;
}
if (!groupedLogs || groupedLogs.length === 0) {
container.innerHTML = '<div class="alert alert-info">暂无查询日志</div>';
return;
}
let html = '';
// 为每个批次生成折叠面板
groupedLogs.forEach((batchData, index) => {
const [batchId, logs] = batchData;
const isExpanded = index === groupedLogs.length - 1; // 默认展开最新批次
const collapseId = `batch-${batchId}`;
// 统计批次信息
const logCounts = {
INFO: logs.filter(log => log.level === 'INFO').length,
WARNING: logs.filter(log => log.level === 'WARNING').length,
ERROR: logs.filter(log => log.level === 'ERROR').length
};
const totalLogs = logs.length;
const firstLog = logs[0];
const lastLog = logs[logs.length - 1];
const duration = firstLog && lastLog ? calculateDuration(firstLog.timestamp, lastLog.timestamp) : '0秒';
// 确定批次类型和状态
const hasErrors = logCounts.ERROR > 0;
const hasWarnings = logCounts.WARNING > 0;
const batchStatus = hasErrors ? 'danger' : hasWarnings ? 'warning' : 'success';
const batchIcon = hasErrors ? 'fas fa-times-circle' : hasWarnings ? 'fas fa-exclamation-triangle' : 'fas fa-check-circle';
// 提取查询类型
const batchTypeMatch = firstLog?.message.match(/开始(\\w+)查询批次/);
const batchType = batchTypeMatch ? batchTypeMatch[1] : '未知';
html += `
<div class="card mb-3 border-${batchStatus}">
<div class="card-header bg-${batchStatus} bg-opacity-10" data-bs-toggle="collapse" data-bs-target="#${collapseId}" style="cursor: pointer;">
<div class="d-flex justify-content-between align-items-center">
<div class="d-flex align-items-center">
<i class="${batchIcon} text-${batchStatus} me-2"></i>
<strong>${batchType}查询批次 ${batchId}</strong>
<span class="badge bg-primary ms-2">${totalLogs}条日志</span>
</div>
<div class="d-flex align-items-center">
<small class="text-muted me-3">
<i class="fas fa-clock"></i> ${duration}
</small>
<div>
${logCounts.INFO > 0 ? `<span class="badge bg-info me-1">${logCounts.INFO} INFO</span>` : ''}
${logCounts.WARNING > 0 ? `<span class="badge bg-warning me-1">${logCounts.WARNING} WARN</span>` : ''}
${logCounts.ERROR > 0 ? `<span class="badge bg-danger me-1">${logCounts.ERROR} ERROR</span>` : ''}
</div>
<i class="fas fa-chevron-down ms-2"></i>
</div>
</div>
</div>
<div class="collapse ${isExpanded ? 'show' : ''}" id="${collapseId}">
<div class="card-body p-0">
<div class="log-entries" style="max-height: 400px; overflow-y: auto;">
`;
// 显示该批次的日志条目
logs.forEach(log => {
const showInfo = document.getElementById('log-level-info').checked;
const showWarning = document.getElementById('log-level-warning').checked;
const showError = document.getElementById('log-level-error').checked;
// 应用级别过滤
let shouldShow = false;
switch(log.level) {
case 'INFO': shouldShow = showInfo; break;
case 'WARNING': shouldShow = showWarning; break;
case 'ERROR': shouldShow = showError; break;
default: shouldShow = true;
}
if (!shouldShow) return;
const levelClass = {
'INFO': 'text-primary',
'WARNING': 'text-warning',
'ERROR': 'text-danger',
'DEBUG': 'text-secondary'
}[log.level] || 'text-dark';
const levelIcon = {
'INFO': 'fas fa-info-circle',
'WARNING': 'fas fa-exclamation-triangle',
'ERROR': 'fas fa-times-circle',
'DEBUG': 'fas fa-bug'
}[log.level] || 'fas fa-circle';
// 改进SQL高亮显示
let message = escapeHtml(log.message);
// 高亮SQL查询语句
if (message.includes('执行查询SQL:')) {
message = message.replace(/执行查询SQL: (SELECT.*?);/g,
'执行查询SQL: <br><code class="bg-light d-block p-2 text-dark" style="font-size: 0.9em;">$1;</code>');
}
// 高亮重要信息
message = message.replace(/(\\d+\\.\\d{3}秒)/g, '<strong class="text-success">$1</strong>');
message = message.replace(/(返回记录数=\\d+)/g, '<strong class="text-info">$1</strong>');
message = message.replace(/(执行时间=[\\d.]+秒)/g, '<strong class="text-success">$1</strong>');
html += `
<div class="border-bottom py-2 px-3 log-entry" data-level="${log.level}">
<div class="d-flex justify-content-between align-items-start">
<div class="flex-grow-1">
<span class="${levelClass}">
<i class="${levelIcon}"></i>
<strong>[${log.level}]</strong>
</span>
<div class="ms-4 mt-1" style="font-size: 0.9em;">${message}</div>
</div>
<small class="text-muted ms-2 flex-shrink-0" style="min-width: 140px;">${log.timestamp}</small>
</div>
</div>
`;
});
html += `
</div>
</div>
</div>
</div>
`;
});
displayQueryLogs(filteredLogs);
container.innerHTML = html;
// 自动滚动到最新批次
if (groupedLogs.length > 0) {
const latestBatch = container.querySelector('.card:last-child');
if (latestBatch) {
latestBatch.scrollIntoView({ behavior: 'smooth', block: 'end' });
}
}
}
// 计算持续时间
function calculateDuration(startTime, endTime) {
try {
const start = new Date(startTime);
const end = new Date(endTime);
const diffMs = end - start;
if (diffMs < 1000) {
return `${diffMs}ms`;
} else if (diffMs < 60000) {
return `${(diffMs / 1000).toFixed(1)}`;
} else {
const minutes = Math.floor(diffMs / 60000);
const seconds = Math.floor((diffMs % 60000) / 1000);
return `${minutes}${seconds}`;
}
} catch (e) {
return '未知';
}
}
function filterLogsByLevel() {
// 刷新分组日志显示,应用过滤器
refreshQueryLogs();
}
async function clearQueryLogs() {
@@ -3187,65 +3347,6 @@ async function clearQueryLogs() {
}
}
function displayQueryLogs(logs) {
const container = document.getElementById('query-logs');
if (!logs || logs.length === 0) {
container.innerHTML = '<div class="alert alert-info">暂无查询日志</div>';
return;
}
const logHtml = logs.map(log => {
const levelClass = {
'INFO': 'text-primary',
'WARNING': 'text-warning',
'ERROR': 'text-danger',
'DEBUG': 'text-secondary'
}[log.level] || 'text-dark';
const levelIcon = {
'INFO': 'fas fa-info-circle',
'WARNING': 'fas fa-exclamation-triangle',
'ERROR': 'fas fa-times-circle',
'DEBUG': 'fas fa-bug'
}[log.level] || 'fas fa-circle';
// 改进SQL高亮显示
let message = escapeHtml(log.message);
// 高亮SQL查询语句
if (message.includes('执行查询SQL:')) {
message = message.replace(/执行查询SQL: (SELECT.*?);/g,
'执行查询SQL: <br><code class="bg-light d-block p-2 text-dark" style="font-size: 0.9em;">$1;</code>');
}
// 高亮重要信息
message = message.replace(/(\d+\.\d{3}秒)/g, '<strong class="text-success">$1</strong>');
message = message.replace(/(返回记录数=\d+)/g, '<strong class="text-info">$1</strong>');
message = message.replace(/(执行时间=[\d.]+秒)/g, '<strong class="text-success">$1</strong>');
return `
<div class="border-bottom py-2 log-entry" data-level="${log.level}">
<div class="d-flex justify-content-between align-items-start">
<div class="flex-grow-1">
<span class="${levelClass}">
<i class="${levelIcon}"></i>
<strong>[${log.level}]</strong>
</span>
<div class="ms-4 mt-1">${message}</div>
</div>
<small class="text-muted ms-2 flex-shrink-0" style="min-width: 140px;">${log.timestamp}</small>
</div>
</div>
`;
}).join('');
container.innerHTML = logHtml;
// 自动滚动到底部
container.scrollTop = container.scrollHeight;
}
// 在查询执行后自动刷新日志
function autoRefreshLogsAfterQuery() {
// 延迟一下确保后端日志已经记录