修复reids

This commit is contained in:
2025-08-04 21:55:35 +08:00
parent dbf4255aea
commit 07467d27ae
4 changed files with 1275 additions and 617 deletions

4
app.py
View File

@@ -52,6 +52,6 @@ if __name__ == '__main__':
logger.info("=== BigDataTool 启动 ===")
logger.info("应用架构:模块化")
logger.info("支持功能:单表查询、分表查询、多主键查询、配置管理、查询历史")
logger.info("访问地址http://localhost:5001")
logger.info("访问地址http://localhost:5000")
logger.info("API文档/api/* 路径下的所有端点")
app.run(debug=True, port=5001)
app.run(debug=True, port=5000)

View File

@@ -51,6 +51,10 @@ def setup_routes(app, query_log_collector):
def redis_compare():
return render_template('redis_compare.html')
@app.route('/redis-js-test')
def redis_js_test():
return render_template('redis_js_test.html')
@app.route('/redis-test')
def redis_test():
return render_template('redis_test.html')
@@ -783,6 +787,62 @@ def setup_routes(app, query_log_collector):
logger.info(f"Redis比较完成")
logger.info(f"比较统计: 总计{result['stats']['total_keys']}个key相同{result['stats']['identical_count']}个,不同{result['stats']['different_count']}")
# 增强结果,添加原生数据信息
enhanced_result = result.copy()
enhanced_result['raw_data'] = {
'cluster1_data': [],
'cluster2_data': []
}
# 从比较结果中提取原生数据信息
for item in result.get('identical_results', []):
if 'key' in item and 'value' in item:
enhanced_result['raw_data']['cluster1_data'].append({
'key': item['key'],
'value': item['value'],
'type': 'identical'
})
enhanced_result['raw_data']['cluster2_data'].append({
'key': item['key'],
'value': item['value'],
'type': 'identical'
})
for item in result.get('different_results', []):
if 'key' in item:
if 'cluster1_value' in item:
enhanced_result['raw_data']['cluster1_data'].append({
'key': item['key'],
'value': item['cluster1_value'],
'type': 'different'
})
if 'cluster2_value' in item:
enhanced_result['raw_data']['cluster2_data'].append({
'key': item['key'],
'value': item['cluster2_value'],
'type': 'different'
})
for item in result.get('missing_results', []):
if 'key' in item:
if 'cluster1_value' in item and item['cluster1_value'] is not None:
enhanced_result['raw_data']['cluster1_data'].append({
'key': item['key'],
'value': item['cluster1_value'],
'type': 'missing'
})
if 'cluster2_value' in item and item['cluster2_value'] is not None:
enhanced_result['raw_data']['cluster2_data'].append({
'key': item['key'],
'value': item['cluster2_value'],
'type': 'missing'
})
logger.info(f"原生数据统计: 集群1={len(enhanced_result['raw_data']['cluster1_data'])}条, 集群2={len(enhanced_result['raw_data']['cluster2_data'])}")
# 使用增强结果进行后续处理
result = enhanced_result
# 自动保存Redis查询历史记录
try:
# 生成历史记录名称
@@ -790,29 +850,42 @@ def setup_routes(app, query_log_collector):
history_name = f"Redis比较_{timestamp}"
history_description = f"自动保存 - Redis比较{result['stats']['total_keys']}个Key发现{result['stats']['different_count']}处差异"
# 保存历史记录
history_id = save_query_history(
# 计算查询键值列表
query_keys = []
if query_options.get('mode') == 'specified':
query_keys = query_options.get('keys', [])
elif query_options.get('mode') == 'random':
# 对于随机模式,从结果中提取实际查询的键
for item in result.get('identical_results', []):
if 'key' in item:
query_keys.append(item['key'])
for item in result.get('different_results', []):
if 'key' in item:
query_keys.append(item['key'])
for item in result.get('missing_results', []):
if 'key' in item:
query_keys.append(item['key'])
# 保存Redis查询历史记录
history_id = save_redis_query_history(
name=history_name,
description=history_description,
pro_config=cluster1_config,
test_config=cluster2_config,
query_config=query_options,
query_keys=result.get('query_options', {}).get('keys', []),
cluster1_config=cluster1_config,
cluster2_config=cluster2_config,
query_options=query_options,
query_keys=query_keys,
results_summary=result['stats'],
execution_time=result['performance_report']['total_time'],
total_keys=result['stats']['total_keys'],
differences_count=result['stats']['different_count'],
different_count=result['stats']['different_count'],
identical_count=result['stats']['identical_count'],
query_type='redis',
# 添加查询结果数据
missing_count=result['stats']['missing_in_cluster1'] + result['stats']['missing_in_cluster2'] + result['stats']['both_missing'],
raw_results={
'identical_results': result['identical_results'],
'different_results': result['different_results'],
'missing_results': result['missing_results'],
'performance_report': result['performance_report']
},
differences_data=result['different_results'],
identical_data=result['identical_results']
}
)
# 关联查询日志与历史记录
@@ -824,6 +897,8 @@ def setup_routes(app, query_log_collector):
except Exception as e:
logger.warning(f"保存Redis查询历史记录失败: {e}")
import traceback
logger.error(f"详细错误信息: {traceback.format_exc()}")
# 结束查询批次
query_log_collector.end_current_batch()
@@ -1021,4 +1096,44 @@ def setup_routes(app, query_log_collector):
if success:
return jsonify({'success': True, 'message': 'Redis查询历史记录删除成功'})
else:
return jsonify({'success': False, 'error': 'Redis查询历史记录删除失败'}), 500
return jsonify({'success': False, 'error': 'Redis查询历史记录删除失败'}), 500
# Redis查询日志API
@app.route('/api/redis/query-logs', methods=['GET'])
def api_get_redis_query_logs():
"""获取Redis查询日志"""
try:
limit = request.args.get('limit', 100, type=int)
# 获取最新的查询日志
logs = query_log_collector.get_logs(limit=limit)
# 过滤Redis相关的日志
redis_logs = []
for log in logs:
if (log.get('message') and 'redis' in log.get('message', '').lower()) or log.get('query_type') == 'redis':
redis_logs.append(log)
return jsonify({'success': True, 'data': redis_logs})
except Exception as e:
logger.error(f"获取Redis查询日志失败: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
@app.route('/api/redis/query-logs/history/<int:history_id>', methods=['GET'])
def api_get_redis_query_logs_by_history(history_id):
"""获取特定历史记录的Redis查询日志"""
try:
logs = query_log_collector.get_logs_by_history_id(history_id)
return jsonify({'success': True, 'data': logs})
except Exception as e:
logger.error(f"获取历史记录 {history_id} 的Redis查询日志失败: {e}")
return jsonify({'success': False, 'error': str(e)}), 500
@app.route('/api/redis/query-logs', methods=['DELETE'])
def api_clear_redis_query_logs():
"""清空Redis查询日志"""
try:
query_log_collector.clear_logs()
return jsonify({'success': True, 'message': 'Redis查询日志清空成功'})
except Exception as e:
logger.error(f"清空Redis查询日志失败: {e}")
return jsonify({'success': False, 'error': str(e)}), 500

File diff suppressed because it is too large Load Diff

File diff suppressed because it is too large Load Diff