完善查询日志分组

This commit is contained in:
2025-08-02 22:33:23 +08:00
parent eb48cf17e6
commit 9cfc363227
2 changed files with 256 additions and 83 deletions

90
app.py
View File

@@ -21,13 +21,33 @@ class QueryLogCollector:
def __init__(self, max_logs=1000):
self.logs = []
self.max_logs = max_logs
self.current_batch_id = None
self.batch_counter = 0
def add_log(self, level, message):
def start_new_batch(self, query_type='single'):
"""开始新的查询批次"""
self.batch_counter += 1
self.current_batch_id = f"batch_{self.batch_counter}_{datetime.now().strftime('%H%M%S')}"
# 添加批次开始标记
self.add_log('INFO', f"=== 开始{query_type}查询批次 (ID: {self.current_batch_id}) ===", force_batch_id=self.current_batch_id)
return self.current_batch_id
def end_current_batch(self):
"""结束当前查询批次"""
if self.current_batch_id:
self.add_log('INFO', f"=== 查询批次完成 (ID: {self.current_batch_id}) ===", force_batch_id=self.current_batch_id)
self.current_batch_id = None
def add_log(self, level, message, force_batch_id=None):
timestamp = datetime.now().strftime('%Y-%m-%d %H:%M:%S.%f')[:-3]
batch_id = force_batch_id or self.current_batch_id
log_entry = {
'timestamp': timestamp,
'level': level,
'message': message
'message': message,
'batch_id': batch_id
}
self.logs.append(log_entry)
# 保持日志数量在限制内
@@ -39,8 +59,26 @@ class QueryLogCollector:
return self.logs[-limit:]
return self.logs
def get_logs_grouped_by_batch(self, limit=None):
"""按批次分组获取日志"""
logs = self.get_logs(limit)
grouped_logs = {}
batch_order = []
for log in logs:
batch_id = log.get('batch_id', 'unknown')
if batch_id not in grouped_logs:
grouped_logs[batch_id] = []
batch_order.append(batch_id)
grouped_logs[batch_id].append(log)
# 返回按时间顺序排列的批次
return [(batch_id, grouped_logs[batch_id]) for batch_id in batch_order]
def clear_logs(self):
self.logs.clear()
self.current_batch_id = None
self.batch_counter = 0
# 全局日志收集器实例
query_log_collector = QueryLogCollector()
@@ -1251,6 +1289,10 @@ def sharding_query_compare():
"""分表查询比对API"""
try:
data = request.json
# 开始新的查询批次
batch_id = query_log_collector.start_new_batch('分表')
logger.info("开始执行分表数据库比对查询")
# 解析配置
@@ -1352,10 +1394,14 @@ def sharding_query_compare():
logger.info(f"分表比对完成:发现 {len(differences)} 处差异")
# 结束查询批次
query_log_collector.end_current_batch()
return jsonify(result)
except Exception as e:
logger.error(f"分表查询执行失败:{str(e)}")
# 结束查询批次(出错情况)
query_log_collector.end_current_batch()
return jsonify({'error': f'分表查询执行失败:{str(e)}'}), 500
finally:
# 关闭连接
@@ -1366,12 +1412,18 @@ def sharding_query_compare():
except Exception as e:
logger.error(f"分表查询请求处理失败:{str(e)}")
# 结束查询批次(请求处理出错)
query_log_collector.end_current_batch()
return jsonify({'error': f'分表查询请求处理失败:{str(e)}'}), 500
@app.route('/api/query', methods=['POST'])
def query_compare():
try:
data = request.json
# 开始新的查询批次
batch_id = query_log_collector.start_new_batch('单表')
logger.info("开始执行数据库比对查询")
# 解析配置
@@ -1482,10 +1534,14 @@ def query_compare():
except Exception as e:
logger.warning(f"保存查询历史记录失败: {e}")
# 结束查询批次
query_log_collector.end_current_batch()
return jsonify(result)
except Exception as e:
logger.error(f"查询执行失败:{str(e)}")
# 结束查询批次(出错情况)
query_log_collector.end_current_batch()
return jsonify({'error': f'查询执行失败:{str(e)}'}), 500
finally:
# 关闭连接
@@ -1496,6 +1552,8 @@ def query_compare():
except Exception as e:
logger.error(f"请求处理失败:{str(e)}")
# 结束查询批次(请求处理出错)
query_log_collector.end_current_batch()
return jsonify({'error': f'请求处理失败:{str(e)}'}), 500
@app.route('/api/default-config')
@@ -1645,15 +1703,29 @@ def api_delete_query_history(history_id):
@app.route('/api/query-logs', methods=['GET'])
def api_get_query_logs():
"""获取查询日志"""
"""获取查询日志,支持分组显示"""
try:
limit = request.args.get('limit', type=int)
logs = query_log_collector.get_logs(limit)
return jsonify({
'success': True,
'data': logs,
'total': len(query_log_collector.logs)
})
grouped = request.args.get('grouped', 'true').lower() == 'true' # 默认分组显示
if grouped:
# 返回分组日志
grouped_logs = query_log_collector.get_logs_grouped_by_batch(limit)
return jsonify({
'success': True,
'data': grouped_logs,
'total': len(query_log_collector.logs),
'grouped': True
})
else:
# 返回原始日志列表
logs = query_log_collector.get_logs(limit)
return jsonify({
'success': True,
'data': logs,
'total': len(query_log_collector.logs),
'grouped': False
})
except Exception as e:
logger.error(f"获取查询日志失败: {e}")
return jsonify({'success': False, 'error': str(e)}), 500