自动记录日志

This commit is contained in:
2025-08-04 15:41:59 +08:00
parent 4c4d168471
commit dbf4255aea
3 changed files with 64 additions and 14 deletions

View File

@@ -50,6 +50,10 @@ def setup_routes(app, query_log_collector):
@app.route('/redis-compare')
def redis_compare():
return render_template('redis_compare.html')
@app.route('/redis-test')
def redis_test():
return render_template('redis_test.html')
# 基础API
@app.route('/api/default-config')

View File

@@ -12,6 +12,23 @@ from .redis_client import RedisPerformanceTracker
logger = logging.getLogger(__name__)
# 导入查询日志收集器
try:
from app import query_log_collector
except ImportError:
# 如果导入失败,创建一个空的日志收集器
class DummyQueryLogCollector:
def start_new_batch(self, query_type):
return None
def end_current_batch(self):
pass
def set_history_id(self, history_id):
pass
def add_log(self, level, message):
pass
query_log_collector = DummyQueryLogCollector()
def get_random_keys_from_redis(redis_client, count=100, pattern="*", performance_tracker=None):
"""
从Redis集群中获取随机keys
@@ -29,6 +46,7 @@ def get_random_keys_from_redis(redis_client, count=100, pattern="*", performance
keys = set()
logger.info(f"开始扫描获取随机keys目标数量: {count},模式: {pattern}")
query_log_collector.add_log('INFO', f"开始扫描获取随机keys目标数量: {count},模式: {pattern}")
try:
# 使用scan_iter获取keys
@@ -52,6 +70,7 @@ def get_random_keys_from_redis(redis_client, count=100, pattern="*", performance
performance_tracker.record_scan_time(scan_duration)
logger.info(f"扫描获取 {len(keys)} 个随机keys耗时 {scan_duration:.3f}")
query_log_collector.add_log('INFO', f"扫描获取 {len(keys)} 个随机keys耗时 {scan_duration:.3f}")
return keys
except RedisError as e:
@@ -62,6 +81,7 @@ def get_random_keys_from_redis(redis_client, count=100, pattern="*", performance
performance_tracker.record_scan_time(scan_duration)
logger.error(f"获取随机keys失败: {e},耗时 {scan_duration:.3f}")
query_log_collector.add_log('ERROR', f"获取随机keys失败: {e},耗时 {scan_duration:.3f}")
return []
def get_redis_values_by_keys(redis_client, keys, cluster_name="Redis集群", performance_tracker=None):
@@ -81,6 +101,7 @@ def get_redis_values_by_keys(redis_client, keys, cluster_name="Redis集群", per
result = [None] * len(keys)
logger.info(f"开始从{cluster_name}批量查询 {len(keys)} 个keys")
query_log_collector.add_log('INFO', f"开始从{cluster_name}批量查询 {len(keys)} 个keys")
try:
# 检查是否是集群模式
@@ -94,6 +115,7 @@ def get_redis_values_by_keys(redis_client, keys, cluster_name="Redis集群", per
slot_groups.setdefault(slot, []).append((idx, key))
logger.info(f"集群模式keys分布在 {len(slot_groups)} 个slot中")
query_log_collector.add_log('INFO', f"集群模式keys分布在 {len(slot_groups)} 个slot中")
# 分组批量查询
for group in slot_groups.values():
@@ -104,6 +126,7 @@ def get_redis_values_by_keys(redis_client, keys, cluster_name="Redis集群", per
else:
# 单节点模式:直接批量查询
logger.info(f"单节点模式:直接批量查询")
query_log_collector.add_log('INFO', f"单节点模式:直接批量查询")
result = redis_client.mget(keys)
end_time = time.time()
@@ -115,6 +138,7 @@ def get_redis_values_by_keys(redis_client, keys, cluster_name="Redis集群", per
# 统计成功获取的key数量
successful_count = sum(1 for v in result if v is not None)
logger.info(f"{cluster_name}查询完成,成功获取 {successful_count}/{len(keys)} 个值,耗时 {query_duration:.3f}")
query_log_collector.add_log('INFO', f"{cluster_name}查询完成,成功获取 {successful_count}/{len(keys)} 个值,耗时 {query_duration:.3f}")
return result
@@ -126,6 +150,7 @@ def get_redis_values_by_keys(redis_client, keys, cluster_name="Redis集群", per
performance_tracker.record_query(f"{cluster_name}_batch_query_error", query_duration)
logger.error(f"{cluster_name}批量查询失败: {e},耗时 {query_duration:.3f}")
query_log_collector.add_log('ERROR', f"{cluster_name}批量查询失败: {e},耗时 {query_duration:.3f}")
return result
def compare_redis_data(client1, client2, keys, cluster1_name="生产集群", cluster2_name="测试集群", performance_tracker=None):
@@ -146,6 +171,7 @@ def compare_redis_data(client1, client2, keys, cluster1_name="生产集群", clu
comparison_start_time = time.time()
logger.info(f"开始比较 {cluster1_name}{cluster2_name} 的数据")
query_log_collector.add_log('INFO', f"开始比较 {cluster1_name}{cluster2_name} 的数据")
# 获取两个集群的数据
values1 = get_redis_values_by_keys(client1, keys, cluster1_name, performance_tracker)
@@ -264,6 +290,8 @@ def compare_redis_data(client1, client2, keys, cluster1_name="生产集群", clu
logger.info(f"数据比对完成,耗时 {comparison_duration:.3f}")
logger.info(f"比对统计: 总计{stats['total_keys']}个key相同{stats['identical_count']}个,不同{stats['different_count']}个,缺失{stats['missing_in_cluster1'] + stats['missing_in_cluster2'] + stats['both_missing']}")
query_log_collector.add_log('INFO', f"数据比对完成,耗时 {comparison_duration:.3f}")
query_log_collector.add_log('INFO', f"比对统计: 总计{stats['total_keys']}个key相同{stats['identical_count']}个,不同{stats['different_count']}个,缺失{stats['missing_in_cluster1'] + stats['missing_in_cluster2'] + stats['both_missing']}")
return result
@@ -288,16 +316,21 @@ def execute_redis_comparison(config1, config2, query_options):
cluster2_name = config2.get('name', '测试集群')
logger.info(f"开始执行Redis数据比较: {cluster1_name} vs {cluster2_name}")
query_log_collector.add_log('INFO', f"开始执行Redis数据比较: {cluster1_name} vs {cluster2_name}")
# 创建连接
client1 = create_redis_client(config1, cluster1_name, performance_tracker)
client2 = create_redis_client(config2, cluster2_name, performance_tracker)
if not client1:
return {'error': f'{cluster1_name}连接失败'}
error_msg = f'{cluster1_name}连接失败'
query_log_collector.add_log('ERROR', error_msg)
return {'error': error_msg}
if not client2:
return {'error': f'{cluster2_name}连接失败'}
error_msg = f'{cluster2_name}连接失败'
query_log_collector.add_log('ERROR', error_msg)
return {'error': error_msg}
try:
# 获取要比较的keys
@@ -314,6 +347,7 @@ def execute_redis_comparison(config1, config2, query_options):
source_name = cluster2_name if source_cluster == 'cluster2' else cluster1_name
logger.info(f"{source_name}随机获取 {count} 个keys")
query_log_collector.add_log('INFO', f"{source_name}随机获取 {count} 个keys")
keys = get_random_keys_from_redis(source_client, count, pattern, performance_tracker)
elif query_mode == 'specified':
@@ -321,11 +355,15 @@ def execute_redis_comparison(config1, config2, query_options):
keys = query_options.get('keys', [])
# 如果keys是字符串需要转换为bytesRedis通常使用bytes
keys = [k.encode('utf-8') if isinstance(k, str) else k for k in keys]
query_log_collector.add_log('INFO', f"使用指定的 {len(keys)} 个keys进行比较")
if not keys:
return {'error': '未获取到任何keys进行比较'}
error_msg = '未获取到任何keys进行比较'
query_log_collector.add_log('ERROR', error_msg)
return {'error': error_msg}
logger.info(f"准备比较 {len(keys)} 个keys")
query_log_collector.add_log('INFO', f"准备比较 {len(keys)} 个keys")
# 执行比较
comparison_result = compare_redis_data(
@@ -342,6 +380,7 @@ def execute_redis_comparison(config1, config2, query_options):
except Exception as e:
logger.error(f"Redis数据比较执行失败: {e}")
query_log_collector.add_log('ERROR', f"Redis数据比较执行失败: {e}")
return {'error': f'执行失败: {str(e)}'}
finally:

View File

@@ -271,7 +271,10 @@ async function executeRedisComparison() {
if (response.ok && result.success !== false) {
currentResults = result;
displayResults(result);
showAlert('Redis数据比较完成', 'success');
showAlert('Redis数据比较完成,历史记录已自动保存', 'success');
// 刷新历史记录列表(后台已自动保存)
loadRedisQueryHistory();
} else {
showAlert(`比较失败: ${result.error}`, 'danger');
}
@@ -1067,7 +1070,7 @@ async function loadSelectedRedisHistory() {
// 如果有原始结果,直接显示
if (history.raw_results) {
const displayResults = {
const resultsData = {
stats: history.results_summary,
identical_results: history.raw_results.identical_results || [],
different_results: history.raw_results.different_results || [],
@@ -1079,8 +1082,8 @@ async function loadSelectedRedisHistory() {
}
};
currentResults = displayResults;
displayResults(displayResults);
currentResults = resultsData;
displayResults(resultsData);
showAlert(`历史记录 "${history.name}" 加载成功`, 'success');
} else {
showAlert(`历史记录 "${history.name}" 配置加载成功,但没有结果数据`, 'info');
@@ -1158,7 +1161,7 @@ async function loadRedisHistoryById(historyId) {
// 如果有原始结果,直接显示
if (history.raw_results) {
const displayResults = {
const resultsData = {
stats: history.results_summary,
identical_results: history.raw_results.identical_results || [],
different_results: history.raw_results.different_results || [],
@@ -1170,8 +1173,8 @@ async function loadRedisHistoryById(historyId) {
}
};
currentResults = displayResults;
displayResults(displayResults);
currentResults = resultsData;
displayResults(resultsData);
}
// 关闭管理对话框
@@ -1232,7 +1235,7 @@ async function loadRedisQueryLogs() {
if (result.success && result.data && result.data.length > 0) {
// 过滤Redis相关的日志
const redisLogs = result.data.filter(log =>
log.message.toLowerCase().includes('redis') ||
(log.message && log.message.toLowerCase().includes('redis')) ||
log.query_type === 'redis'
);
@@ -1241,11 +1244,15 @@ async function loadRedisQueryLogs() {
redisLogs.forEach(log => {
const levelClass = log.level === 'ERROR' ? 'text-danger' :
log.level === 'WARNING' ? 'text-warning' : 'text-info';
const timestamp = log.timestamp || '未知时间';
const level = log.level || 'INFO';
const message = log.message || '无消息内容';
html += `
<div class="mb-2">
<span class="text-muted">[${log.timestamp}]</span>
<span class="badge bg-secondary">${log.level}</span>
<span class="${levelClass}">${log.message}</span>
<span class="text-muted">[${timestamp}]</span>
<span class="badge bg-secondary">${level}</span>
<span class="${levelClass}">${message}</span>
</div>
`;
});