feat(export): 添加字段去重逻辑以优化导出功能

在exports.go中实现字段去重,移除完全重复的字段,并确保主表字段优先于副表字段。更新toString函数以支持更多数据类型的格式化,确保导出数据的准确性和一致性。
This commit is contained in:
zhouyonggao 2025-12-15 12:03:59 +08:00
parent d17ae70ea9
commit a84e8a5a66
4 changed files with 155 additions and 1695 deletions

View File

@ -242,6 +242,64 @@ func (a *ExportsAPI) create(w http.ResponseWriter, r *http.Request) {
if len(bad) > 0 {
logging.JSON("ERROR", map[string]interface{}{"event": "fields_not_whitelisted", "removed": bad})
}
// 字段去重:移除完全重复的字段(包括主表自身的重复)
{
seen := make(map[string]bool)
deduped := make([]string, 0, len(filtered))
removed := []string{}
for _, tf := range filtered {
if seen[tf] {
removed = append(removed, tf)
continue
}
seen[tf] = true
deduped = append(deduped, tf)
}
if len(removed) > 0 {
logging.JSON("INFO", map[string]interface{}{"event": "fields_deduplicated_exact", "removed": removed, "reason": "移除完全重复的字段"})
}
filtered = deduped
}
// 主表和副表相同字段去重:以主表为主,移除副表的重复字段
if ds == "ymt" && (main == "order" || main == "order_info") {
mainTableFields := make(map[string]bool)
// 先收集主表的所有字段名
for _, tf := range filtered {
parts := strings.Split(tf, ".")
if len(parts) == 2 && parts[0] == "order" {
mainTableFields[parts[1]] = true
}
}
if len(mainTableFields) > 0 {
deduped := make([]string, 0, len(filtered))
removed := []string{}
for _, tf := range filtered {
parts := strings.Split(tf, ".")
if len(parts) == 2 {
if parts[0] == "order" {
// 主表字段,保留
deduped = append(deduped, tf)
} else {
// 副表字段,检查是否与主表字段重复
if mainTableFields[parts[1]] {
// 字段名重复,移除副表字段
removed = append(removed, tf)
continue
}
// 字段名不重复,保留
deduped = append(deduped, tf)
}
} else {
// 格式不正确,保留原样
deduped = append(deduped, tf)
}
}
if len(removed) > 0 {
logging.JSON("INFO", map[string]interface{}{"event": "fields_deduplicated", "removed": removed, "reason": "主表和副表存在相同字段,以主表为主"})
}
filtered = deduped
}
}
// 字段匹配校验(数量与顺序)
if len(filtered) != len(fs) {
logging.JSON("ERROR", map[string]interface{}{"event": "field_count_mismatch", "template_count": len(fs), "final_count": len(filtered)})
@ -280,6 +338,7 @@ func (a *ExportsAPI) create(w http.ResponseWriter, r *http.Request) {
hdrs[i] = tf
}
}
// 列头去重:如果仍有重复的列头(中文标签),对非主表字段添加前缀
{
cnt := map[string]int{}
for _, h := range hdrs {
@ -1027,10 +1086,28 @@ func toString(v interface{}) string {
return t
case int64:
return strconv.FormatInt(t, 10)
case int32:
return strconv.FormatInt(int64(t), 10)
case int:
return strconv.Itoa(t)
case uint64:
return strconv.FormatUint(t, 10)
case uint32:
return strconv.FormatUint(uint64(t), 10)
case uint:
return strconv.FormatUint(uint64(t), 10)
case float64:
// 对于整数部分,使用整数格式;对于小数部分,保留必要精度
if t == float64(int64(t)) {
return strconv.FormatInt(int64(t), 10)
}
return strconv.FormatFloat(t, 'f', -1, 64)
case float32:
// 对于整数部分,使用整数格式;对于小数部分,保留必要精度
if t == float32(int64(t)) {
return strconv.FormatInt(int64(t), 10)
}
return strconv.FormatFloat(float64(t), 'f', -1, 32)
case bool:
if t {
return "1"
@ -1038,8 +1115,14 @@ func toString(v interface{}) string {
return "0"
case time.Time:
return t.Format("2006-01-02 15:04:05")
case nil:
return ""
default:
return fmt.Sprintf("%v", t)
// 尝试转换为字符串,如果是数字类型则格式化
if s := fmt.Sprintf("%v", t); s != "" {
return s
}
return ""
}
}
func renderSQL(q string, args []interface{}) string {

View File

@ -516,12 +516,32 @@ func toString(v interface{}) string {
return t
case int64:
return strconv.FormatInt(t, 10)
case int32:
return strconv.FormatInt(int64(t), 10)
case int:
return strconv.Itoa(t)
case uint64:
return strconv.FormatUint(t, 10)
case uint32:
return strconv.FormatUint(uint64(t), 10)
case uint:
return strconv.FormatUint(uint64(t), 10)
case float64:
// 对于整数部分,使用整数格式;对于小数部分,保留必要精度
if t == float64(int64(t)) {
return strconv.FormatInt(int64(t), 10)
}
return strconv.FormatFloat(t, 'f', -1, 64)
case float32:
// 对于整数部分,使用整数格式;对于小数部分,保留必要精度
if t == float32(int64(t)) {
return strconv.FormatInt(int64(t), 10)
}
return strconv.FormatFloat(float64(t), 'f', -1, 32)
case time.Time:
return t.Format("2006-01-02 15:04:05")
case nil:
return ""
default:
return ""
}

Binary file not shown.

File diff suppressed because one or more lines are too long