完善查询历史记录

This commit is contained in:
2025-08-02 22:47:39 +08:00
parent 9cfc363227
commit 36915c45ea
2 changed files with 177 additions and 9 deletions

145
app.py
View File

@@ -319,6 +319,25 @@ def ensure_database():
conn.commit() conn.commit()
logger.info("query_history表query_type字段添加成功") logger.info("query_history表query_type字段添加成功")
# 添加查询结果数据存储字段
if 'raw_results' not in history_column_names:
logger.info("添加raw_results字段到query_history表...")
cursor.execute("ALTER TABLE query_history ADD COLUMN raw_results TEXT")
conn.commit()
logger.info("query_history表raw_results字段添加成功")
if 'differences_data' not in history_column_names:
logger.info("添加differences_data字段到query_history表...")
cursor.execute("ALTER TABLE query_history ADD COLUMN differences_data TEXT")
conn.commit()
logger.info("query_history表differences_data字段添加成功")
if 'identical_data' not in history_column_names:
logger.info("添加identical_data字段到query_history表...")
cursor.execute("ALTER TABLE query_history ADD COLUMN identical_data TEXT")
conn.commit()
logger.info("query_history表identical_data字段添加成功")
conn.close() conn.close()
return True return True
except Exception as e: except Exception as e:
@@ -672,8 +691,8 @@ def delete_config_group(group_id):
def save_query_history(name, description, pro_config, test_config, query_config, query_keys, def save_query_history(name, description, pro_config, test_config, query_config, query_keys,
results_summary, execution_time, total_keys, differences_count, identical_count, results_summary, execution_time, total_keys, differences_count, identical_count,
sharding_config=None, query_type='single'): sharding_config=None, query_type='single', raw_results=None, differences_data=None, identical_data=None):
"""保存查询历史记录,支持分表查询""" """保存查询历史记录,支持分表查询和查询结果数据"""
if not ensure_database(): if not ensure_database():
logger.error("数据库初始化失败") logger.error("数据库初始化失败")
return False return False
@@ -686,8 +705,8 @@ def save_query_history(name, description, pro_config, test_config, query_config,
INSERT INTO query_history INSERT INTO query_history
(name, description, pro_config, test_config, query_config, query_keys, (name, description, pro_config, test_config, query_config, query_keys,
results_summary, execution_time, total_keys, differences_count, identical_count, results_summary, execution_time, total_keys, differences_count, identical_count,
sharding_config, query_type) sharding_config, query_type, raw_results, differences_data, identical_data)
VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?)
''', ( ''', (
name, description, name, description,
json.dumps(pro_config), json.dumps(pro_config),
@@ -700,7 +719,10 @@ def save_query_history(name, description, pro_config, test_config, query_config,
differences_count, differences_count,
identical_count, identical_count,
json.dumps(sharding_config) if sharding_config else None, json.dumps(sharding_config) if sharding_config else None,
query_type query_type,
json.dumps(raw_results) if raw_results else None,
json.dumps(differences_data) if differences_data else None,
json.dumps(identical_data) if identical_data else None
)) ))
conn.commit() conn.commit()
logger.info(f"查询历史记录 '{name}' 保存成功,查询类型:{query_type}") logger.info(f"查询历史记录 '{name}' 保存成功,查询类型:{query_type}")
@@ -731,6 +753,8 @@ def get_query_history():
history_list = [] history_list = []
for row in rows: for row in rows:
# 获取列名列表以检查字段是否存在
column_names = [desc[0] for desc in cursor.description]
history_list.append({ history_list.append({
'id': row['id'], 'id': row['id'],
'name': row['name'], 'name': row['name'],
@@ -740,7 +764,7 @@ def get_query_history():
'differences_count': row['differences_count'], 'differences_count': row['differences_count'],
'identical_count': row['identical_count'], 'identical_count': row['identical_count'],
'created_at': row['created_at'], 'created_at': row['created_at'],
'query_type': row.get('query_type', 'single') 'query_type': row['query_type'] if 'query_type' in column_names else 'single'
}) })
return history_list return history_list
@@ -766,6 +790,8 @@ def get_query_history_by_id(history_id):
row = cursor.fetchone() row = cursor.fetchone()
if row: if row:
# 获取列名列表以检查字段是否存在
column_names = [desc[0] for desc in cursor.description]
return { return {
'id': row['id'], 'id': row['id'],
'name': row['name'], 'name': row['name'],
@@ -781,8 +807,12 @@ def get_query_history_by_id(history_id):
'identical_count': row['identical_count'], 'identical_count': row['identical_count'],
'created_at': row['created_at'], 'created_at': row['created_at'],
# 处理新字段,保持向后兼容 # 处理新字段,保持向后兼容
'sharding_config': json.loads(row['sharding_config']) if row.get('sharding_config') else None, 'sharding_config': json.loads(row['sharding_config']) if 'sharding_config' in column_names and row['sharding_config'] else None,
'query_type': row.get('query_type', 'single') 'query_type': row['query_type'] if 'query_type' in column_names else 'single',
# 添加查询结果数据支持
'raw_results': json.loads(row['raw_results']) if 'raw_results' in column_names and row['raw_results'] else None,
'differences_data': json.loads(row['differences_data']) if 'differences_data' in column_names and row['differences_data'] else None,
'identical_data': json.loads(row['identical_data']) if 'identical_data' in column_names and row['identical_data'] else None
} }
return None return None
except Exception as e: except Exception as e:
@@ -1394,6 +1424,46 @@ def sharding_query_compare():
logger.info(f"分表比对完成:发现 {len(differences)} 处差异") logger.info(f"分表比对完成:发现 {len(differences)} 处差异")
# 自动保存分表查询历史记录
try:
# 生成历史记录名称
timestamp = datetime.now().strftime("%Y%m%d_%H%M%S")
history_name = f"分表查询_{timestamp}"
history_description = f"自动保存 - 分表查询{len(values)}个Key发现{len(differences)}处差异"
# 保存历史记录
save_query_history(
name=history_name,
description=history_description,
pro_config=pro_config,
test_config=test_config,
query_config={
'keys': keys,
'fields_to_compare': fields_to_compare,
'exclude_fields': exclude_fields
},
query_keys=values,
results_summary=summary,
execution_time=0.0, # 可以后续优化计算实际执行时间
total_keys=len(values),
differences_count=len(differences),
identical_count=len(identical_results),
# 新增分表相关参数
sharding_config=sharding_config,
query_type='sharding',
# 添加查询结果数据
raw_results={
'raw_pro_data': [dict(row._asdict()) for row in pro_data] if pro_data else [],
'raw_test_data': [dict(row._asdict()) for row in test_data] if test_data else [],
'sharding_info': sharding_info # 包含分表信息
},
differences_data=differences,
identical_data=identical_results
)
logger.info(f"分表查询历史记录保存成功: {history_name}")
except Exception as e:
logger.warning(f"保存分表查询历史记录失败: {e}")
# 结束查询批次 # 结束查询批次
query_log_collector.end_current_batch() query_log_collector.end_current_batch()
return jsonify(result) return jsonify(result)
@@ -1529,7 +1599,14 @@ def query_compare():
execution_time=0.0, # 可以后续优化计算实际执行时间 execution_time=0.0, # 可以后续优化计算实际执行时间
total_keys=len(values), total_keys=len(values),
differences_count=len(differences), differences_count=len(differences),
identical_count=len(identical_results) identical_count=len(identical_results),
# 添加查询结果数据
raw_results={
'raw_pro_data': [dict(row._asdict()) for row in pro_data] if pro_data else [],
'raw_test_data': [dict(row._asdict()) for row in test_data] if test_data else []
},
differences_data=differences,
identical_data=identical_results
) )
except Exception as e: except Exception as e:
logger.warning(f"保存查询历史记录失败: {e}") logger.warning(f"保存查询历史记录失败: {e}")
@@ -1691,6 +1768,56 @@ def api_get_query_history_detail(history_id):
else: else:
return jsonify({'success': False, 'error': '查询历史记录不存在'}), 404 return jsonify({'success': False, 'error': '查询历史记录不存在'}), 404
@app.route('/api/query-history/<int:history_id>/results', methods=['GET'])
def api_get_query_history_results(history_id):
"""获取查询历史记录的完整结果数据"""
try:
history_record = get_query_history_by_id(history_id)
if not history_record:
return jsonify({'success': False, 'error': '历史记录不存在'}), 404
# 构建完整的查询结果格式与API查询结果保持一致
result = {
'total_keys': history_record['total_keys'],
'pro_count': len(history_record.get('raw_results', {}).get('raw_pro_data', [])) if history_record.get('raw_results') else 0,
'test_count': len(history_record.get('raw_results', {}).get('raw_test_data', [])) if history_record.get('raw_results') else 0,
'differences': history_record.get('differences_data', []),
'identical_results': history_record.get('identical_data', []),
'field_diff_count': {}, # 可以从differences_data中重新计算
'summary': history_record.get('results_summary', {}),
'raw_pro_data': history_record.get('raw_results', {}).get('raw_pro_data', []) if history_record.get('raw_results') else [],
'raw_test_data': history_record.get('raw_results', {}).get('raw_test_data', []) if history_record.get('raw_results') else [],
# 如果是分表查询,添加分表信息
'sharding_info': history_record.get('raw_results', {}).get('sharding_info') if history_record.get('raw_results') and history_record.get('query_type') == 'sharding' else None,
# 添加历史记录元信息
'history_info': {
'id': history_record['id'],
'name': history_record['name'],
'description': history_record['description'],
'created_at': history_record['created_at'],
'query_type': history_record.get('query_type', 'single')
}
}
# 重新计算field_diff_count
if history_record.get('differences_data'):
field_diff_count = {}
for diff in history_record['differences_data']:
if 'field' in diff:
field_name = diff['field']
field_diff_count[field_name] = field_diff_count.get(field_name, 0) + 1
result['field_diff_count'] = field_diff_count
return jsonify({
'success': True,
'data': result,
'message': f'历史记录 "{history_record["name"]}" 结果加载成功'
})
except Exception as e:
logger.error(f"获取查询历史记录结果失败: {e}")
return jsonify({'success': False, 'error': f'获取历史记录结果失败: {str(e)}'}), 500
@app.route('/api/query-history/<int:history_id>', methods=['DELETE']) @app.route('/api/query-history/<int:history_id>', methods=['DELETE'])
def api_delete_query_history(history_id): def api_delete_query_history(history_id):
"""删除查询历史记录""" """删除查询历史记录"""

View File

@@ -2446,6 +2446,9 @@ async function showQueryHistoryDialog() {
<button class="btn btn-sm btn-info me-1" onclick="viewHistoryDetail(${history.id})" title="查看详情"> <button class="btn btn-sm btn-info me-1" onclick="viewHistoryDetail(${history.id})" title="查看详情">
<i class="fas fa-eye"></i> <i class="fas fa-eye"></i>
</button> </button>
<button class="btn btn-sm btn-success me-1" onclick="loadHistoryResults(${history.id})" title="查看结果">
<i class="fas fa-chart-bar"></i>
</button>
<button class="btn btn-sm btn-danger" onclick="deleteHistoryRecord(${history.id}, '${history.name}')" title="删除"> <button class="btn btn-sm btn-danger" onclick="deleteHistoryRecord(${history.id}, '${history.name}')" title="删除">
<i class="fas fa-trash"></i> <i class="fas fa-trash"></i>
</button> </button>
@@ -2723,6 +2726,44 @@ async function viewHistoryDetail(historyId) {
} }
} }
// 加载历史记录结果
async function loadHistoryResults(historyId) {
try {
const response = await fetch(`/api/query-history/${historyId}/results`);
const result = await response.json();
if (result.success) {
// 设置当前结果数据
currentResults = result.data;
// 根据查询类型设置分表模式
if (result.data.history_info.query_type === 'sharding') {
isShardingMode = true;
document.getElementById('enableSharding').checked = true;
toggleShardingMode();
} else {
isShardingMode = false;
document.getElementById('enableSharding').checked = false;
toggleShardingMode();
}
// 显示结果
displayResults(result.data);
// 关闭历史记录modal
const modal = bootstrap.Modal.getInstance(document.getElementById('queryHistoryModal'));
modal.hide();
const queryTypeDesc = result.data.history_info.query_type === 'sharding' ? '分表查询' : '单表查询';
showAlert('success', `${queryTypeDesc}历史记录结果 "${result.data.history_info.name}" 加载成功`);
} else {
showAlert('danger', result.error || '加载历史记录结果失败');
}
} catch (error) {
showAlert('danger', '加载历史记录结果失败: ' + error.message);
}
}
// 删除历史记录 // 删除历史记录
async function deleteHistoryRecord(historyId, historyName) { async function deleteHistoryRecord(historyId, historyName) {
if (!confirm(`确定要删除历史记录 "${historyName}" 吗?此操作不可撤销。`)) { if (!confirm(`确定要删除历史记录 "${historyName}" 吗?此操作不可撤销。`)) {