营销系统导出

This commit is contained in:
renzhiyuan 2026-01-14 15:34:41 +08:00
commit 1000d3b78d
54 changed files with 4195 additions and 0 deletions

15
.gitignore vendored Normal file
View File

@ -0,0 +1,15 @@
# Binaries for programs and plugins
*.exe
*.exe~
*.dll
*.so
*.dylib
# Test binary, built with `go test -c`
*.test
# Output of the go coverage tool, specifically when used with LiteIDE
*.out
# Dependency directories (remove the comment below to include it)
# vendor/

8
.idea/.gitignore vendored Normal file
View File

@ -0,0 +1,8 @@
# Default ignored files
/shelf/
/workspace.xml
# Editor-based HTTP Client requests
/httpRequests/
# Datasource local storage ignored files
/dataSources/
/dataSources.local.xml

14
.idea/deployment.xml Normal file
View File

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="PublishConfigData" serverName="11" remoteFilesAllowedToDisappearOnAutoupload="false">
<serverData>
<paths name="11">
<serverdata>
<mappings>
<mapping local="$PROJECT_DIR$" web="/" />
</mappings>
</serverdata>
</paths>
</serverData>
</component>
</project>

13
.idea/excel-export.iml Normal file
View File

@ -0,0 +1,13 @@
<?xml version="1.0" encoding="UTF-8"?>
<module type="WEB_MODULE" version="4">
<component name="Go" enabled="true">
<buildTags>
<option name="os" value="linux" />
</buildTags>
</component>
<component name="NewModuleRootManager">
<content url="file://$MODULE_DIR$" />
<orderEntry type="inheritedJdk" />
<orderEntry type="sourceFolder" forTests="false" />
</component>
</module>

8
.idea/modules.xml Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="ProjectModuleManager">
<modules>
<module fileurl="file://$PROJECT_DIR$/.idea/excel-export.iml" filepath="$PROJECT_DIR$/.idea/excel-export.iml" />
</modules>
</component>
</project>

8
.idea/sshConfigs.xml Normal file
View File

@ -0,0 +1,8 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="SshConfigs">
<configs>
<sshConfig authType="PASSWORD" host="192.168.6.75" id="cd4edbbd-31a6-425a-b6d5-71c1d2ed58bc" port="22" nameFormat="DESCRIPTIVE" username="root" useOpenSSHConfig="true" />
</configs>
</component>
</project>

6
.idea/vcs.xml Normal file
View File

@ -0,0 +1,6 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="VcsDirectoryMappings">
<mapping directory="" vcs="Git" />
</component>
</project>

14
.idea/webServers.xml Normal file
View File

@ -0,0 +1,14 @@
<?xml version="1.0" encoding="UTF-8"?>
<project version="4">
<component name="WebServers">
<option name="servers">
<webServer id="e87b4886-d506-4bd6-976f-23d0d64c0017" name="11">
<fileTransfer accessType="SFTP" host="192.168.6.75" port="22" sshConfigId="cd4edbbd-31a6-425a-b6d5-71c1d2ed58bc" sshConfig="root@192.168.6.75:22 password">
<advancedOptions>
<advancedOptions dataProtectionLevel="Private" keepAliveTimeout="0" passiveMode="true" shareSSLContext="true" />
</advancedOptions>
</fileTransfer>
</webServer>
</option>
</component>
</project>

21
Dockerfile Normal file
View File

@ -0,0 +1,21 @@
# 使用官方的golang作为基础镜像
FROM golang:1.21.4
WORKDIR /src
VOLUME /src
# 安装项目依赖
RUN go mod tidy
# 构建项目
RUN
# 暴露端口
EXPOSE 3030
# 设置环境变量 exi
ENV PORT=3030

238
biz/config/config.go Normal file
View File

@ -0,0 +1,238 @@
package config
import (
"errors"
"fmt"
"github.com/flytam/filenamify"
"github.com/spf13/viper"
"io/fs"
"log"
"path/filepath"
"regexp"
"strconv"
"strings"
"time"
)
var DefaultConfig = &Config{}
type (
Config struct {
Systems []System `json:"System" mapstructure:"System"`
Servers map[string]Server `json:"Servers" mapstructure:"Servers"`
}
System struct {
Name string
Db string
Jobs []Job
}
Server struct {
Db string
Sql string
}
Job struct {
Name string
Base string
Excel string
Tasks []Task
File string
Size int //文件最大行数
}
Task struct {
PK string `mapstructure:"pk"`
Sql string
Timestamp bool
Elt string
Order string
Excel []map[string]string
}
ResPage struct {
Page int `json:"current_page"`
PageSize int `json:"per_page"`
Total int `json:"total"`
LastPage int `json:"last_page"`
Data []map[string]interface{} `json:"data"`
}
FileInfoWithModTime struct {
fs.FileInfo
ModTime time.Time
}
FileInfoStatus struct {
fs.FileInfo
Status int8
Time time.Time
}
)
func (c Config) GetSystem(name string) (System, error) {
for _, s := range c.Systems {
//fmt.Println(s.Name)
if s.Name == name {
return s, nil
}
}
return System{}, errors.New("没有找到相关配置:" + name)
}
func (s System) GetJob(name string) (Job, error) {
for _, j := range s.Jobs {
if j.Name == name {
return j, nil
}
}
return Job{}, errors.New("没有找到相关配置:" + name)
}
func (j Job) GetFileName(params map[string]interface{}) string {
m := regexp.MustCompile("({[a-zA-Z0-9]+})")
//替换文件名参数
fileName := m.ReplaceAllFunc([]byte(j.File), func(b []byte) []byte {
field := string(b[1 : len(b)-1])
if val, ok := params[field]; ok {
return []byte(toString(val, "20060102"))
}
return b
})
//安全命名
path, name := filepath.Split(string(fileName))
name, err := filenamify.Filenamify(name, filenamify.Options{Replacement: "-"})
if err != nil {
log.Printf("不安全的文件名:%s", err.Error())
}
return path + name
}
func (t Task) GetSql(params map[string]interface{}) string {
sql := params["sql"].(string)
split := "where"
sql = strings.ToLower(sql)
if strings.Index(sql, split) != -1 {
split = " and "
}
hasOrder := strings.Index(sql, " order by ")
if hasOrder != -1 {
sql = sql[:hasOrder]
}
m := regexp.MustCompile("({[a-zA-Z0-9]+})")
if strings.Trim(t.Elt, " ") != "" {
sql = sql + split + t.Elt
build := m.ReplaceAllFunc([]byte(sql), func(b []byte) []byte {
field := string(b[1 : len(b)-1])
if val, ok := params[field]; ok {
return []byte(toString(val, t.Timestamp))
}
return b
})
sql = string(build)
}
if strings.Trim(t.Order, " ") != "" {
sql = sql + " order by " + t.Order
}
return sql
}
func toString(parm interface{}, timestamp interface{}) string {
switch p := parm.(type) {
case time.Time:
switch t := timestamp.(type) {
case bool:
if t {
return strconv.FormatInt(p.Unix(), 10)
} else {
return p.Format("2006-01-02 15:04:05")
}
case string:
return p.Format(t)
default:
return ""
}
case string:
return p
case int:
return strconv.Itoa(p)
case int32:
return strconv.FormatInt(int64(p), 10)
default:
return fmt.Sprint(p)
}
}
func LoadConfig(path string) *Config {
var c Config
viper.AddConfigPath(path) //设置读取的文件路径
viper.SetConfigName("config") //设置读取的文件名
viper.SetConfigType("yaml") //设置文件的类型
//尝试进行配置读取
if err := viper.ReadInConfig(); err != nil {
fmt.Println("请将config.yml.example拷贝为config.yaml")
panic(err)
}
//v := viper.GetViper()
//fmt.Println(v)
if err := viper.Unmarshal(&c); err != nil {
panic(err)
}
DefaultConfig = &c
return &c
}
func GetJob(conf *Config, sys, job string) (Job, string, error) {
s, err := conf.GetSystem(sys)
if err != nil {
return Job{}, "", err
}
j, err := s.GetJob(job)
if err != nil {
return Job{}, "", err
}
return j, s.Db, nil
}
func GetServer(conf *Config, serverName string) (Server, error) {
if _, ok := conf.Servers[serverName]; !ok {
return Server{}, errors.New("没有找到相关配置:" + serverName)
}
return conf.Servers[serverName], nil
}
func GetBaseSql(job *Job, condition [][3]interface{}) string {
base := job.Base
conditionStr := ""
for _, value := range condition {
conditionStr += " AND "
conditionStr += fmt.Sprintf("%s %s ", value[0], value[1])
last := value[2]
if lasts, ok := last.([]interface{}); ok {
conditionStr += "("
for _, lastValue := range lasts {
conditionStr += fmt.Sprintf("'%s',", lastValue.(string))
}
conditionStr = conditionStr[:len(conditionStr)-1]
conditionStr += ")"
} else {
conditionStr += fmt.Sprintf(" %s", last.(string))
}
}
return base + conditionStr
}

33
biz/config/config_test.go Normal file
View File

@ -0,0 +1,33 @@
package config
import (
"excel_export/pkg"
"fmt"
"github.com/stretchr/testify/assert"
"os"
"testing"
)
func TestLoadConfig(t *testing.T) {
path, err := os.Getwd()
assert.Nil(t, err)
path = path + "/../../config/"
config := LoadConfig(path)
zltx, err := config.GetSystem("营销系统")
assert.Nil(t, err)
fmt.Println(t)
assert.Equal(t, "renzhiyuan:renzhiyuan@123@tcp(rr-2vcszlj05siu227f9qo.mysql.cn-chengdu.rds.aliyuncs.com:3306)/market?charset=utf8mb4&parseTime=True", zltx.Db)
order, err := zltx.GetJob("订单导出")
assert.Nil(t, err)
assert.Len(t, order.Tasks, 2)
assert.False(t, order.Tasks[0].Timestamp)
}
func TestProcess(t *testing.T) {
pkg.MissionLog("market", "order", "87987987979798", "", 0, "")
}

49
biz/config/job_test.go Normal file
View File

@ -0,0 +1,49 @@
package config
import (
"github.com/stretchr/testify/assert"
"testing"
"time"
)
func TestJob_GetFileName(t *testing.T) {
t.Run("normal", func(t *testing.T) {
j := &Job{
File: "/var/www/aa-{begin}-{end}-{task}.xls",
}
name := j.GetFileName(map[string]interface{}{
"begin": time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC),
"end": time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC),
"task": 1,
})
assert.Equal(t, "/var/www/aa-20230101-20230101-1.xls", name)
})
t.Run("deletion", func(t *testing.T) {
j := &Job{
File: "/var/www/aa-{begin}-{end}-{task}*.xls",
}
name := j.GetFileName(map[string]interface{}{
"begin": time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC),
"end": time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC),
})
assert.Equal(t, "/var/www/aa-20230101-20230101-{task}-.xls", name)
})
t.Run("err", func(t *testing.T) {
j := &Job{
File: "/var/www/aa-{begin}-{end}-{task}*.xls",
}
name := j.GetFileName(map[string]interface{}{
"begin": time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC),
"end": time.Date(2023, 1, 1, 0, 0, 0, 0, time.UTC),
})
assert.Equal(t, "/var/www/aa-20230101-20230101-{task}-.xls", name)
})
}

35
biz/config/task_test.go Normal file
View File

@ -0,0 +1,35 @@
package config
import (
"fmt"
"github.com/stretchr/testify/assert"
"testing"
"time"
)
func TestTask_GetSql(t *testing.T) {
task := Task{
Sql: "select * from order",
Elt: "create_time between {begin} and {end} and o.order_number > {last}",
Timestamp: true,
Order: " ",
}
begin, _ := time.Parse("2006-01-02 15:04:05", "2023-01-01 00:00:00")
end := begin.Add(time.Hour * 24)
sql := task.GetSql(map[string]interface{}{"begin": begin, "end": end, "last": 0})
assert.Equal(t, fmt.Sprintf("select * from order where create_time between %d and %d and o.order_number > 0", begin.Unix(), end.Unix()), sql)
}
func TestTask_GetSql_OrderBy(t *testing.T) {
task := Task{
Sql: "select * from order",
Elt: "create_time between '{begin}' and '{end}'",
Timestamp: false,
Order: "create_time",
}
begin, _ := time.Parse("2006-01-02 15:04:05", "2023-01-01 00:00:00")
end := begin.Add(time.Hour * 24)
sql := task.GetSql(map[string]interface{}{"begin": begin, "end": end})
assert.Equal(t, fmt.Sprintf("select * from order where create_time between '%s' and '%s' order by create_time", begin.Format("2006-01-02 15:04:05"), end.Format("2006-01-02 15:04:05")), sql)
}

BIN
biz/db/cpu.prof Normal file

Binary file not shown.

169
biz/db/db.go Normal file
View File

@ -0,0 +1,169 @@
package db
import (
"context"
"database/sql"
"excel_export/biz/config"
"excel_export/biz/export"
"excel_export/pkg"
"fmt"
"gorm.io/driver/mysql"
"gorm.io/gorm"
"runtime/trace"
"strconv"
"time"
)
var _ export.DataFetcher = new(Db)
type Db struct {
db *gorm.DB
}
func NewDb(str string) (*Db, error) {
db, err := gorm.Open(
mysql.Open(str+""),
&gorm.Config{
//Logger: logger.Discard,
},
)
if err != nil {
return nil, err
}
db = db.Debug()
return &Db{
db: db,
}, nil
}
func (d *Db) Fetch(s string) (*export.Data, error) {
fetchRegion := trace.StartRegion(context.Background(), "db.fetch")
defer func() {
fetchRegion.End()
}()
rows, err := d.db.Raw(s).Rows()
if err != nil {
return nil, err
}
defer rows.Close()
titles, err := rows.Columns()
if err != nil {
return nil, err
}
data := getData(rows, d.db, titles)
dataMap := getDataMap(titles, data)
return &export.Data{
Title: titles,
Data: data,
DataMap: dataMap,
}, nil
}
func getDataMap(titles []string, data [][]string) []map[string]string {
result := make([]map[string]string, 0, len(data))
for _, row := range data {
rowMap := make(map[string]string, len(row))
for i, value := range row {
rowMap[titles[i]] = value
}
result = append(result, rowMap)
}
return result
}
func getData(rows *sql.Rows, db *gorm.DB, titles []string) [][]string {
result := make([][]string, 0, 10)
for rows.Next() {
var row map[string]interface{}
db.ScanRows(rows, &row)
result = append(result, transformRow(titles, row))
}
return result
}
func transform(titles []string, data []map[string]interface{}) [][]string {
result := make([][]string, len(data))
for i, m := range data {
result[i] = transformRow(titles, m)
}
return result
}
func transformRow(titles []string, data map[string]interface{}) []string {
row := make([]string, 0, len(data))
for _, title := range titles {
col := data[title]
switch v := col.(type) {
case string:
row = append(row, v)
case time.Time:
row = append(row, v.Format("2006-01-02 15:04:05"))
case int, int8, int16, int32, int64:
row = append(row, fmt.Sprintf("%d", v))
case float64:
// When formatting floats, do not use fmt.Sprintf("%v", n), this will cause numbers below 1e-4 to be printed in
// scientific notation. Scientific notation is not a valid way to store numbers in XML.
// Also not not use fmt.Sprintf("%f", n), this will cause numbers to be stored as X.XXXXXX. Which means that
// numbers will lose precision and numbers with fewer significant digits such as 0 will be stored as 0.000000
// which causes tests to fail.
row = append(row, strconv.FormatFloat(v, 'f', -1, 64))
case float32:
row = append(row, strconv.FormatFloat(float64(v), 'f', -1, 32))
case []byte:
row = append(row, string(v))
case nil:
row = append(row, "")
default:
row = append(row, fmt.Sprintf("%v", v))
}
}
return row
}
func ServerData(conf *config.Config, serverName string) (*export.Data, error) {
configInfo, err := config.GetServer(conf, serverName)
if err != nil {
return nil, err
}
d, err := NewDb(configInfo.Db)
if err != nil {
return nil, err
}
data, err := d.Fetch(configInfo.Sql)
return data, err
}
func ResellerData(conf *config.Config) (map[string]map[string]string, error) {
resellerData, err := ServerData(conf, "reseller")
if err != nil {
return nil, err
}
directResellerData, err := ServerData(conf, "direct_reseller")
if err != nil {
return nil, err
}
relation := MergeReseller(resellerData, directResellerData)
return relation, nil
}
func MergeReseller(reseller *export.Data, directReseller *export.Data) map[string]map[string]string {
result := make(map[string]map[string]string, len(reseller.DataMap))
directMap := make(map[string]map[string]string, len(directReseller.DataMap))
for _, row := range directReseller.DataMap {
directMap[row["direct_id"]] = row
}
for _, row := range reseller.DataMap {
directRow, ok := directMap[row["direct_reseller_id"]]
if ok {
row = pkg.MergeMaps(row, directRow)
}
result[row["id"]] = row
}
return result
}

28
biz/db/db_test.go Normal file
View File

@ -0,0 +1,28 @@
package db
import (
"excel_export/biz/util"
"fmt"
"github.com/stretchr/testify/assert"
"testing"
)
const test_db = "renzhiyuan:renzhiyuan@123@tcp(rr-2vcszlj05siu227f9qo.mysql.cn-chengdu.rds.aliyuncs.com:3306)/market?charset=utf8mb4&parseTime=True"
func TestDb_Fetch(t *testing.T) {
p := util.NewProf()
defer p.Close()
db, err := NewDb(test_db)
assert.Nil(t, err)
sql := "select * from `order` limit 10"
ret, err := db.Fetch(sql)
assert.Nil(t, err)
fmt.Printf("%v \n", ret)
item := ret.Data[0]
for _, i := range item {
fmt.Printf("%v", i)
}
}

BIN
biz/db/men.prof Normal file

Binary file not shown.

292
biz/export/csv_exporter.go Normal file
View File

@ -0,0 +1,292 @@
package export
import (
"excel_export/biz/config"
"excel_export/pkg"
"fmt"
"math/big"
"sync"
"time"
)
type CsvExporter struct {
mFetcher DataFetcher
file FileAdapter
count int
wg *sync.WaitGroup
}
func NewCsvExporter(fetcher DataFetcher, file FileAdapter) DataExporter {
return &CsvExporter{
mFetcher: fetcher,
file: file,
}
}
func (ee *CsvExporter) Fetcher(fetcher DataFetcher) {
ee.mFetcher = fetcher
}
func (ee *CsvExporter) File(file FileAdapter) {
ee.file = file
}
func (ee *CsvExporter) WaitGroup(wg *sync.WaitGroup) {
ee.wg = wg
}
func (ee *CsvExporter) Export(sql string, t config.Task, extraData interface{}) error {
begin := time.Now()
data, err := ee.mFetcher.Fetch(sql)
if err != nil {
return fmt.Errorf("数据获取错误:%w", err)
}
duration := time.Now().Sub(begin)
if duration.Seconds() > 10 {
pkg.ProcessLog(fmt.Sprintf("数据获取耗时:%s \n", duration.String()))
}
ee.count = len(data.Data)
if ee.count > 0 {
//异步导出数据到csv文件中
go ee.exportToCsv(data, t.Excel, extraData)
} else {
ee.wg.Done()
}
return nil
}
func (ee *CsvExporter) exportToCsv(data *Data, excel []map[string]string, extraData interface{}) {
begin := time.Now()
var (
title []string
)
ee.file.Open()
keyMap := make(map[string]int)
for k, v := range excel {
title = append(title, v["name"])
keyMap[v["column"]] = k
}
ee.file.WriteTitle(title)
titleReflect := make([]int, len(data.Title))
for tk, tv := range data.Title {
if _, ok := keyMap[tv]; !ok {
titleReflect[tk] = -1
continue
}
titleReflect[tk] = keyMap[tv]
}
for key, rows := range data.Data {
row := make([]string, len(title))
for rowKey, rowValue := range rows {
if titleReflect[rowKey] != -1 {
row[titleReflect[rowKey]] = rowValue
}
}
for rk, rv := range row {
row = ee.handleData(excel, keyMap, rk, rv, row, data.DataMap[key], extraData)
}
ee.file.Write(row)
}
ee.file.Close()
ee.wg.Done()
duration := time.Now().Sub(begin)
if duration.Seconds() > 10 {
pkg.ProcessLog(fmt.Sprintf("csv输出耗时%s \n", duration.String()))
}
}
func (ee *CsvExporter) Count() int {
return ee.count
}
func (ee *CsvExporter) getPkIndex(titles []string, pk string) int {
for i, title := range titles {
if title == pk {
return i
}
}
return -1
}
func (ee *CsvExporter) handleData(excel []map[string]string, keyMap map[string]int, rk int, value string, row []string, dataMap map[string]string, extraData interface{}) []string {
key := excel[rk]["column"]
if key == "key" {
converter := pkg.NewConverter()
num := new(big.Int)
num, success := num.SetString(value, 10)
if !success {
pkg.ProcessLog(fmt.Sprintf("数据处理失败:%s ", key))
}
key = converter.EnBase(num, 16, 1)
value = pkg.HideStringMiddle(key, 4, 4)
}
//if key == "out_trade_no" {
// value = row[keyMap["order_number"]]
//
//}
if key == "type" {
typeMap := make(map[string]string)
typeMap["1"] = "兑换码"
typeMap["2"] = "立减金"
typeMap["3"] = "兑换码现金红包"
value = typeMap[value]
}
//if key == "account" {
// if row[keyMap["type"]] == "2" {
//
// value = dataMap["ordervoucher__channel_user_id"]
// }
// if row[keyMap["type"]] == "3" {
// value = dataMap["ordercash__receive_user_id"]
// }
//
//}
if key == "pay_type" {
typeMap := make(map[string]string)
typeMap["1"] = "支付宝"
typeMap["5"] = "微信"
typeMap["0"] = ""
value = typeMap[value]
}
if key == "pay_status" {
typeMap := make(map[string]string)
typeMap["1"] = "待支付"
typeMap["2"] = "已支付"
typeMap["3"] = "已退款"
value = typeMap[value]
}
if key == "status" {
typeMap := make(map[string]string)
if row[keyMap["type"]] == "1" {
typeMap["0"] = "待充值"
typeMap["1"] = "充值中"
typeMap["2"] = "已完成"
typeMap["3"] = "充值失败"
typeMap["4"] = "已取消"
typeMap["5"] = "已过期"
typeMap["6"] = "待支付"
} else if row[keyMap["type"]] == "2" {
typeMap["0"] = "待领取"
typeMap["1"] = "待领取"
typeMap["2"] = "已领取"
typeMap["3"] = "领取失败"
typeMap["4"] = "已取消"
typeMap["5"] = "已过期"
typeMap["6"] = "待支付"
} else {
typeMap["0"] = "待领取"
typeMap["1"] = "待领取"
typeMap["2"] = "已核销"
typeMap["3"] = "领取失败"
typeMap["4"] = "已取消"
typeMap["5"] = "已过期"
typeMap["6"] = "待支付"
}
value = typeMap[value]
}
if key == "ordervoucher__status" {
typeMap := make(map[string]string)
typeMap["1"] = "待核销"
typeMap["2"] = "已核销"
typeMap["3"] = "已过期"
typeMap["4"] = "已退款"
typeMap["5"] = "领取失败"
typeMap["6"] = "发放中"
typeMap["7"] = "部分退款"
typeMap["8"] = "已退回(全额退)"
typeMap["9"] = "发放失败"
value = typeMap[value]
}
if key == "use_coupon" {
typeMap := make(map[string]string)
typeMap["1"] = "使用"
typeMap["2"] = "未使用"
value = typeMap[value]
}
if key == "reseller_id" {
resellerData := extraData.(map[string]map[string]string)
if _, ok := resellerData[value]; ok {
if resellerData[value]["direct_reseller_id"] != "0" {
row[keyMap["map_time"]] = resellerData[value]["map_time"]
row[keyMap["direct_reseller_name"]] = resellerData[value]["name"]
value = "是"
} else {
value = "否"
}
}
}
if key == "codebatch__reduce" {
if row[keyMap["use_coupon"]] == "2" {
value = "0.00"
}
}
/*if key == "orderdetail__product" {
getProviderColumn := true
product := map[string]interface{}{}
err := json.Unmarshal([]byte(value), &product)
if err != nil {
getProviderColumn = false
} else {
if _, ok := product["entity"]; !ok {
getProviderColumn = false
} else {
entity := product["entity"].(map[string]interface{})
if _, ok := entity["provider"]; ok {
getProviderColumn = true
typeMap := make(map[string]string)
typeMap["voucher_wechat_lsxd"] = "蓝色兄弟"
typeMap["voucher_wechat_fjxw"] = "福建兴旺"
value = typeMap[entity["provider"].(string)]
}
}
}
if !getProviderColumn {
value = "蓝色兄弟"
}
}*/
if key == "direct_reseller_id" {
if row[keyMap["use_coupon"]] == "2" {
value = "0.00"
}
}
if key == "codebatch__reduce" {
if row[keyMap["use_coupon"]] == "2" {
value = "0.00"
}
}
if key == "orderCash__channel" {
typeMap := make(map[string]string)
typeMap["1"] = "支付宝"
typeMap["2"] = "微信"
typeMap["3"] = "云闪付"
value = typeMap[value]
}
if key == "orderCash__receive_status" {
typeMap := make(map[string]string)
typeMap["0"] = "待领取"
typeMap["1"] = "领取中"
typeMap["2"] = "领取成功"
typeMap["3"] = "领取失败"
value = typeMap[value]
}
row[rk] = value
return row
}

127
biz/export/cvs.go Normal file
View File

@ -0,0 +1,127 @@
package export
import (
"bufio"
"encoding/csv"
"errors"
"fmt"
"io"
"os"
"reflect"
)
type Csv struct {
fc *os.File
csv *csv.Writer
f *File
isNew bool
titles []string
}
func NewCsv(fileName string, param map[string]string) *Csv {
return &Csv{
f: NewFile(fileName, 100000000, param),
}
}
func (e *Csv) slice() {
if e.f.slice() {
e.reset()
}
}
func (e *Csv) SetParam(param map[string]string) {
e.f.param = param
}
func (e *Csv) reset() {
e.save()
e.f.NextFile()
e.Open()
e.WriteTitle(nil)
e.slice()
}
func (e *Csv) Open() error {
var err error
if e.f.IsFileExist() {
e.fc, err = os.OpenFile(e.f.FileName(), os.O_APPEND|os.O_WRONLY, 0644)
if err != nil {
return err
}
e.f.SetRow(e.getLineCount(e.fc))
} else {
e.isNew = true
e.fc, err = os.Create(e.f.FileName())
}
if err == nil {
e.csv = csv.NewWriter(e.fc)
}
return nil
}
func (e *Csv) getLineCount(file io.Reader) (line int) {
reader := bufio.NewReader(file)
line = 0
for {
_, isPrefix, err := reader.ReadLine()
if err != nil {
break
}
if !isPrefix {
line++
}
}
return line
}
func (e *Csv) save() error {
e.csv.Flush()
e.fc.Close()
return nil
}
func (e *Csv) WriteTitle(titles []string) error {
if titles != nil {
e.titles = titles
}
if e.titles != nil && e.isNew {
e.Write(e.titles)
e.isNew = false
}
return nil
}
func (e *Csv) Write(data interface{}) error {
if e.f.slice() {
e.reset()
}
v := reflect.ValueOf(data)
if v.Kind() == reflect.Ptr {
v = v.Elem()
}
if v.Kind() != reflect.Slice {
return errors.New("数据无效,不是切片类型")
}
switch val := data.(type) {
case []string:
return e.csv.Write(val)
case []interface{}:
strs := make([]string, len(val))
for i, v := range val {
strs[i] = fmt.Sprintf("%v", v)
}
return e.csv.Write(strs)
}
return nil
}
func (e *Csv) Close() error {
return e.save()
}

46
biz/export/export.go Normal file
View File

@ -0,0 +1,46 @@
package export
import "excel_export/biz/config"
type (
DataExporter interface {
Fetcher(fetcher DataFetcher)
File(file FileAdapter)
Export(sql string, t config.Task, extraData interface{}) error
Count() int
}
Data struct {
Title []string
Data [][]string
DataMap []map[string]string
}
DataFetcher interface {
Fetch(sql string) (*Data, error)
}
FileAdapter interface {
Open() error
WriteTitle([]string) error
Write(interface{}) error
Close() error
}
ResellerData struct {
ResellerData []Reseller
DirectResellerData []DirectReseller
Relation []map[string]string
}
Reseller struct {
Id int
Name string
DirectResellerId int
MapTime string
}
DirectReseller struct {
Id int
Name string
}
)

88
biz/export/file.go Normal file
View File

@ -0,0 +1,88 @@
package export
import (
"os"
"regexp"
"strconv"
)
type FileOpts struct {
fileName string
limit int
row int
}
func (f *FileOpts) slice() bool {
f.row++
if f.row > f.limit+1 { // +1 排除标题行
f.row = 0
return true
}
return false
}
type File struct {
FileOpts
param map[string]string
index int
}
func NewFile(name string, limit int, param map[string]string) *File {
return &File{
FileOpts: FileOpts{
fileName: name,
limit: limit,
},
param: param,
}
}
func (f *File) SetIndex(index int) *File {
f.index = index
return f
}
func (f *File) SetRow(row int) *File {
f.row = row
return f
}
func (f *File) NextFile() *File {
f.index++
return f
}
func (f *File) FileName() string {
m := regexp.MustCompile("({[a-zA-Z0-9]+})")
name := m.ReplaceAllFunc([]byte(f.fileName), func(b []byte) []byte {
field := string(b[1 : len(b)-1])
if val, ok := f.param[field]; ok {
return []byte(val)
}
return b
})
ex := regexp.MustCompile("(\\..*)")
name = ex.ReplaceAllFunc(name, func(b []byte) []byte {
i := []byte("_" + strconv.Itoa(f.index))
ret := make([]byte, len(b)+len(i))
copy(ret, i)
copy(ret[len(i):], b)
return ret
})
return string(name)
}
func (f *File) IsFileExist() bool {
_, err := os.Stat(f.FileName())
if err == nil {
return true
}
if os.IsNotExist(err) {
return false
}
return false
}

39
biz/export/file_test.go Normal file
View File

@ -0,0 +1,39 @@
package export
import (
"github.com/stretchr/testify/assert"
"os"
"testing"
)
func TestFile_FileName(t *testing.T) {
f := NewFile("/usr/file-{begin}-{end}.xlsx", 10, map[string]string{
"begin": "20230404",
"end": "20230404",
})
assert.Equal(t, "/usr/file-20230404-20230404_0.xlsx", f.FileName())
f.NextFile()
assert.Equal(t, "/usr/file-20230404-20230404_1.xlsx", f.FileName())
}
func TestFile_IsFileExist(t *testing.T) {
gwd, _ := os.Getwd()
f := NewFile(gwd+"/file-{begin}-{end}.xlsx", 10, map[string]string{
"begin": "20230404",
"end": "20230404",
})
assert.False(t, f.IsFileExist())
path := gwd + "/file-20230404-20230404_0.xlsx"
_, err := os.Create(path)
assert.Nil(t, err)
assert.FileExists(t, path)
assert.True(t, f.IsFileExist())
err = os.Remove(path)
assert.Nil(t, err)
}

54
biz/util/ppro.go Normal file
View File

@ -0,0 +1,54 @@
package util
import (
"log"
"os"
"runtime/pprof"
"runtime/trace"
)
type Pprof struct {
fc *os.File
fm *os.File
ft *os.File
}
func NewProf() *Pprof {
fc, err := os.Create("./cpu.prof")
if err != nil {
log.Fatal(err)
}
fm, err := os.Create("./men.prof")
if err != nil {
log.Fatal(err)
}
ft, err := os.Create("./trace.out")
if err != nil {
log.Fatal(err)
}
p := &Pprof{
fc: fc,
fm: fm,
ft: ft,
}
p.Start()
return p
}
func (p *Pprof) Start() {
trace.Start(p.ft)
pprof.StartCPUProfile(p.fc)
}
func (p *Pprof) Close() {
pprof.WriteHeapProfile(p.fm)
pprof.StopCPUProfile()
trace.Stop()
p.fc.Close()
p.fm.Close()
p.ft.Close()
}

262
cmd/cmd/csv.go Executable file
View File

@ -0,0 +1,262 @@
package cmd
import (
"archive/zip"
"excel_export/biz/config"
"excel_export/biz/db"
"excel_export/biz/export"
"excel_export/pkg"
"fmt"
"io"
"os"
"path/filepath"
"strconv"
"sync"
"time"
)
type Csv struct {
conf *config.Config
dirTemp string
SysName string
JobName string
AdminId string
TaskIdStr string
Name string
CreateTime string
}
func NewCsv(conf *config.Config) *Csv {
return &Csv{
conf: conf,
}
}
func (e *Csv) ExportMarket(adminId string, sysName string, jobName string, begin time.Time, end time.Time, slice int, condition [][3]interface{}, taskIdStr string) error {
job, dbStr, err := config.GetJob(e.conf, sysName, jobName)
if err != nil {
return err
}
d, err := db.NewDb(dbStr)
if err != nil {
return err
}
e.SysName = sysName
e.JobName = jobName
e.TaskIdStr = taskIdStr
e.AdminId = adminId
e.Name = fmt.Sprintf("%s%s--%s %s", job.Excel, begin.Format("20060102150405"), end.Format("20060102150405"), time.Now().Format("20060102150405"))
e.CreateTime = time.Now().Format(time.DateTime)
err = pkg.MissionLog(adminId, sysName, jobName, taskIdStr, e.Name, 0, e.CreateTime)
if err != nil {
return err
}
sql := config.GetBaseSql(&job, condition)
return e.JobHandler(job, d, map[string]interface{}{
"begin": begin,
"end": end,
"slice": slice,
"sql": sql,
})
}
func (e *Csv) JobHandler(job config.Job, d export.DataFetcher, params map[string]interface{}) error {
for i, task := range job.Tasks {
params["task"] = i
if err := e.TaskExport(d, task, params, job.GetFileName(params)); err != nil {
return err
}
}
return nil
}
func (e *Csv) TaskExport(d export.DataFetcher, t config.Task, params map[string]interface{}, fileName string) error {
var i int
var wg sync.WaitGroup
var total int
beginTime := params["begin"].(time.Time)
lastTime := params["end"].(time.Time)
slice := params["slice"].(int)
sql := params["sql"].(string)
reseller, err := db.ResellerData(e.conf)
if err != nil {
return err
}
interval := 2*time.Hour - (1 * time.Second)
sliceTimeTotal := lastTime.Sub(beginTime).Hours()/2 + 1
over := false
for i = 0; i < 10000; i++ {
endTime := beginTime.Add(interval)
//结束时间大于最后时间
if endTime.After(lastTime) {
endTime = lastTime
over = true
}
f, err := e.getCsvFile(i)
if err != nil {
return err
}
params["begin"] = beginTime
params["end"] = endTime
params["slice"] = slice
params["sql"] = sql
sql := t.GetSql(params)
ee := export.NewCsvExporter(d, f)
ee.(*export.CsvExporter).WaitGroup(&wg)
wg.Add(1)
ee.Export(sql, t, reseller)
count := ee.Count()
//fmt.Printf("已导出 %d 条数据\n", batch*i+count)
total = total + count
process := int((float64(i) / float64(sliceTimeTotal)) * 100)
pkg.MissionLog(e.AdminId, e.SysName, e.JobName, e.TaskIdStr, e.Name, process, e.CreateTime)
if count == 0 {
i = i - 1
}
if over {
break
}
beginTime = endTime
time.Sleep(time.Microsecond * 30)
}
wg.Wait()
//fmt.Println("tempDir", e.dirTemp)
if total > 0 { //查询到数据
//合并csv文件并删除 临时目录
if err := e.mergeCsvToExcel(e.dirTemp, i, fileName, slice); err != nil {
pkg.ErrLog(fmt.Sprintf("合并csv文件异常%s", err.Error()))
return err
}
//打包
if err := e.FolderToZip(e.OutFilePath("excel"), e.ZipFile()); err != nil {
pkg.ErrLog(fmt.Sprintf("合并csv文件异常%s", err.Error()))
return err
}
pkg.MissionLog(e.AdminId, e.SysName, e.JobName, e.TaskIdStr, e.Name, 100, e.CreateTime)
}
//重置临时路径
e.dirTemp = ""
return nil
}
func (e *Csv) getCsvFile(index int) (*export.Csv, error) {
if e.dirTemp == "" {
path, err := os.MkdirTemp(os.TempDir(), "")
if err != nil {
return nil, err
}
e.dirTemp = path
}
filename := e.dirTemp + "/data_{index}.csv"
f := export.NewCsv(filename, map[string]string{
"index": strconv.Itoa(index),
})
return f, nil
}
func (e *Csv) mergeCsvToExcel(path string, max int, out string, slice int) error {
m := NewMerge(
Reader{Path: path, Index: max},
Writer{File: e.OutFileName(out), Limit: slice},
)
if err := m.Merge(); err != nil {
return err
}
//删除临时目录
//return m.ClearCsv()
return nil
}
func (e *Csv) OutFileName(out string) string {
return fmt.Sprintf("%s/%s", e.OutFilePath("excel"), out)
}
func (e *Csv) OutFilePath(fileType string) string {
path, _ := os.Getwd()
dir := fmt.Sprintf("%s/%s/%s", path, fileType, e.TaskIdStr)
_ = pkg.CheckDir(dir)
return fmt.Sprintf("%s/%s/%s", path, fileType, e.TaskIdStr)
}
func (e *Csv) ZipFile() string {
return fmt.Sprintf("%s/%s%s", e.OutFilePath("zip"), e.Name, ".zip")
}
// 将文件夹打包成ZIP文件
func (e *Csv) FolderToZip(folderPath, zipFilePath string) error {
// 创建文件
zipFile, err := os.Create(zipFilePath)
if err != nil {
return err
}
defer zipFile.Close()
// 创建zip writer
archive := zip.NewWriter(zipFile)
defer archive.Close()
// 遍历文件夹
err = filepath.Walk(folderPath, func(path string, info os.FileInfo, err error) error {
if err != nil {
return err
}
// 忽略文件夹自身
if info.IsDir() {
return nil
}
// 打开文件
file, err := os.Open(path)
if err != nil {
return err
}
defer file.Close()
// 创建zip文件条目
header, err := zip.FileInfoHeader(info)
if err != nil {
return err
}
// 更改工作目录到zip路径
header.Name = filepath.ToSlash(path[len(folderPath):])
// 创建zip文件条目
writer, err := archive.CreateHeader(header)
if err != nil {
return err
}
// 将文件内容写入zip文件条目
_, err = io.Copy(writer, file)
return err
})
if err != nil {
return err
}
//return nil
return e.xlsxClear(folderPath)
}
func (e *Csv) xlsxClear(folderPath string) error {
return os.RemoveAll(folderPath)
}

24
cmd/cmd/csv_test.go Executable file
View File

@ -0,0 +1,24 @@
package cmd
import (
"fmt"
"github.com/stretchr/testify/assert"
"os"
"testing"
)
func TestCsv_getCsvFile(t *testing.T) {
c := &Csv{}
f, err := c.getCsvFile(1)
assert.Nil(t, err)
assert.NotNil(t, f)
f.Open()
f.Close()
fmt.Println(c.dirTemp)
assert.DirExists(t, c.dirTemp)
os.RemoveAll(c.dirTemp)
assert.NoDirExists(t, c.dirTemp)
}

36
cmd/cmd/job.go Executable file
View File

@ -0,0 +1,36 @@
package cmd
import (
"excel_export/biz/config"
"github.com/spf13/cobra"
)
func init() {
jobCmd.Flags().StringP("system", "s", "", "系统名称")
rootCmd.AddCommand(jobCmd)
}
var jobCmd = &cobra.Command{
Use: "job",
Short: "支持的系统",
SilenceUsage: true,
SilenceErrors: true,
Run: jobRun,
}
func jobRun(cmd *cobra.Command, args []string) {
c := config.DefaultConfig
sName := cmd.Flag("system").Value.String()
sys, err := c.GetSystem(sName)
if err != nil {
CmdError(cmd, "%s\n请输入export-tool system进行查看", err.Error())
}
cmd.Println("支持的任务:")
for _, job := range sys.Jobs {
cmd.Printf("%s \n", job.Name)
}
return
}

193
cmd/cmd/merge.go Executable file
View File

@ -0,0 +1,193 @@
package cmd
import (
"encoding/csv"
"excel_export/pkg"
"fmt"
"github.com/xuri/excelize/v2"
"io"
"log"
"math"
"os"
"regexp"
"strconv"
"time"
)
type (
Reader struct {
Path string
Index int
}
Writer struct {
File string
Limit int
}
Merge struct {
reader Reader
writer Writer
file *excelize.File
sw *excelize.StreamWriter
titles []interface{}
fileIndex int
total int
rowIndex int
}
)
func NewMerge(r Reader, w Writer) *Merge {
m := &Merge{
reader: r,
writer: w,
}
m.open()
return m
}
func (m *Merge) Merge() error {
begin := time.Now()
defer func() {
pkg.ProcessLog(fmt.Sprintf("mergeCsvToExcel:耗时 %s\n", time.Now().Sub(begin).String()))
if err := m.Save(); err != nil {
log.Println(err)
}
}()
for i := 0; i <= m.reader.Index; i++ {
filename := fmt.Sprintf("%s/data_%d_0.csv", m.reader.Path, i)
csvOpen, err := os.Open(filename)
if err != nil {
if os.IsNotExist(err) {
continue
}
return fmt.Errorf("打开读取文件%s失败%w", filename, err)
}
csvReader := csv.NewReader(csvOpen)
frist := true
for {
record, err := csvReader.Read()
if err == io.EOF {
break
} else if err != nil {
return fmt.Errorf("读取文件%s错误%w", filename, err)
}
row := transform(record)
//不是第一个文件时,跳过第一条数据
if frist {
frist = false
if i == 0 {
m.WriteTitle(row)
}
continue
}
m.Write(row)
}
csvOpen.Close()
}
return nil
}
func (m *Merge) WriteTitle(titles []interface{}) error {
if titles != nil {
m.titles = titles
}
if m.titles != nil {
return m.Write(m.titles)
}
return nil
}
func (m *Merge) Write(values []interface{}) error {
cell, err := excelize.CoordinatesToCellName(1, m.rowIndex+1)
if err != nil {
return err
}
err = m.sw.SetRow(cell, values)
if err != nil {
return err
}
m.count()
return nil
}
func (m *Merge) reset() (err error) {
if m.file != nil {
if err := m.Save(); err != nil {
return err
}
}
m.fileIndex++
m.rowIndex = 0
return m.open()
}
func (m *Merge) count() {
m.total++
m.rowIndex++
if m.rowIndex > m.writer.GetLimit() {
m.reset()
}
}
func (m *Merge) open() (err error) {
m.file = excelize.NewFile()
m.sw, err = m.file.NewStreamWriter("Sheet1")
m.WriteTitle(nil)
return err
}
func (m *Merge) Save() error {
//忽略只有标题的文件
if m.titles != nil && m.rowIndex == 1 {
return nil
}
if err := m.sw.Flush(); err != nil {
return err
}
return m.file.SaveAs(m.writer.GetFileName(m.fileIndex))
}
func (m *Merge) ClearCsv() error {
return os.RemoveAll(m.reader.Path)
}
// GetFileName 获取文件名
func (w *Writer) GetFileName(fileIndex int) string {
ex := regexp.MustCompile("(\\..*)")
name := ex.ReplaceAllFunc([]byte(w.File), func(b []byte) []byte {
i := []byte("_" + strconv.Itoa(fileIndex))
ret := make([]byte, len(b)+len(i))
copy(ret, i)
copy(ret[len(i):], b)
return ret
})
return string(name)
}
func (w *Writer) GetLimit() int {
//excel 单表最大100w行数据
return int(math.Min(float64(w.Limit), 1000000))
}
func transform(record []string) []interface{} {
result := make([]interface{}, len(record))
for i2, s := range record {
result[i2] = s
}
return result
}

29
cmd/cmd/merge_test.go Executable file
View File

@ -0,0 +1,29 @@
package cmd
import (
"github.com/stretchr/testify/assert"
"os"
"testing"
)
func TestMerge_Write(t *testing.T) {
m := NewMerge(Reader{}, Writer{"xx.xlsx", 2})
m.WriteTitle([]interface{}{"姓名", "年龄"})
m.Write([]interface{}{"张三", 12})
m.Write([]interface{}{"李四", 14})
m.Write([]interface{}{"王五", 15})
m.Write([]interface{}{"王五", 15})
m.Save()
}
func TestMerge_Save(t *testing.T) {
m := NewMerge(
Reader{Path: os.TempDir() + "/370245221", Index: 5},
Writer{File: "/var/gop/excel-export/excel/营销系统-订单列表-20240415-20240415-0.xlsx", Limit: 10000},
)
err := m.Merge()
assert.NoError(t, err)
}

16
cmd/cmd/root.go Executable file
View File

@ -0,0 +1,16 @@
package cmd
import (
"github.com/spf13/cobra"
)
var rootCmd = &cobra.Command{
Use: "export-tool",
Short: "导出直充系统订单数据",
SilenceUsage: true,
SilenceErrors: true,
}
func Execute() {
cobra.CheckErr(rootCmd.Execute())
}

9
cmd/cmd/runtime/err/2024-04-16 Executable file
View File

@ -0,0 +1,9 @@
【2024-04-16 11:38:35】mergeCsvToExcel:耗时 18.764µs
【2024-04-16 11:39:33】mergeCsvToExcel:耗时 16.157µs
【2024-04-16 11:41:58】mergeCsvToExcel:耗时 1.37093709s

View File

@ -0,0 +1,12 @@
【2024-04-16 11:47:34】mergeCsvToExcel:耗时 2m4.84858093s
【2024-04-16 11:55:36】mergeCsvToExcel:耗时 4m43.292729127s
【2024-04-16 11:57:15】mergeCsvToExcel:耗时 3.610311367s
【2024-04-16 12:00:08】mergeCsvToExcel:耗时 1.354335132s

View File

@ -0,0 +1,9 @@
【2024-04-17 13:45:04】mergeCsvToExcel:耗时 16.956095ms
【2024-04-17 13:51:15】mergeCsvToExcel:耗时 10.010482ms
【2024-04-17 13:51:57】mergeCsvToExcel:耗时 8.119687ms

View File

@ -0,0 +1,6 @@
【2024-04-18 21:12:06】mergeCsvToExcel:耗时 32.742096ms
【2024-04-18 21:33:04】mergeCsvToExcel:耗时 11.730204769s

BIN
cmd/cmd/sss_0.xlsx Executable file

Binary file not shown.

29
cmd/cmd/system.go Executable file
View File

@ -0,0 +1,29 @@
package cmd
import (
"excel_export/biz/config"
"github.com/spf13/cobra"
)
func init() {
rootCmd.AddCommand(systemCmd)
}
var systemCmd = &cobra.Command{
Use: "system",
Short: "支持的系统",
SilenceUsage: true,
SilenceErrors: true,
Run: systemRun,
}
func systemRun(cmd *cobra.Command, args []string) {
c := config.DefaultConfig
cmd.Println("支持的系统:")
for _, system := range c.Systems {
cmd.Printf("%s\n", system.Name)
}
return
}

37
cmd/cmd/uitls.go Executable file
View File

@ -0,0 +1,37 @@
package cmd
import (
"fmt"
"github.com/spf13/cobra"
flag "github.com/spf13/pflag"
"os"
"time"
)
func MustFlagsDateTime(cmd *cobra.Command, key string) time.Time {
t, err := ParseFlagsDateTime(cmd.Flags(), key)
cobra.CheckErr(err)
return *t
}
func ParseFlagsDateTime(set *flag.FlagSet, key string) (*time.Time, error) {
val, err := set.GetString(key)
if err != nil {
return nil, fmt.Errorf("获取参数异常:%w", err)
}
beginTime, err := time.ParseInLocation("2006-01-02 15:04:05", val, time.Local)
if err != nil {
return nil, fmt.Errorf("不是有效的时间格式:%w", err)
}
return &beginTime, nil
}
func CmdOutput(cmd *cobra.Command, format string, opts ...interface{}) {
fmt.Fprintf(cmd.OutOrStdout(), format, opts...)
}
func CmdError(cmd *cobra.Command, format string, opts ...interface{}) {
fmt.Fprintf(cmd.OutOrStderr(), format, opts...)
os.Exit(1)
}

18
cmd/main.go Executable file
View File

@ -0,0 +1,18 @@
package main
import (
"excel_export/router"
"fmt"
"github.com/gin-gonic/gin"
)
func main() {
r := gin.Default()
router.App(r)
err := r.Run("127.0.0.1:3030")
if err != nil {
fmt.Println("开启服务失败:%w", err)
return
}
}

BIN
cmd/server Executable file

Binary file not shown.

BIN
cmd/server2 Executable file

Binary file not shown.

21
go.mod Normal file
View File

@ -0,0 +1,21 @@
module excel_export
go 1.16
require (
github.com/flytam/filenamify v1.1.2 // direct
github.com/spf13/cobra v1.6.1 // direct
github.com/xuri/excelize/v2 v2.7.1 // direct
gorm.io/driver/mysql v1.4.5 // direct
gorm.io/gorm v1.24.3 // direct
)
require (
github.com/spf13/pflag v1.0.5
github.com/stretchr/testify v1.8.3
)
require (
github.com/gin-gonic/gin v1.9.1
github.com/spf13/viper v1.15.0
)

1407
go.sum Normal file

File diff suppressed because it is too large Load Diff

8
impl/form.go Normal file
View File

@ -0,0 +1,8 @@
package impl
type OrderJsonQuery struct {
BeginTime string `form:"begin_time" json:"begin_time" binding:"required" msg:"begin_time缺失"`
EndTime string `form:"end_time" json:"end_time" binding:"required" msg:"end_time缺失"`
Condition [][3]interface{} `form:"condition" json:"condition" `
Slice int `form:"slice" json:"slice" binding:"required" msg:"slice缺失"`
}

23
pkg/e/code.go Normal file
View File

@ -0,0 +1,23 @@
package e
const (
SUCCESS = 200
ERROR = 500
INVALID_PARAMS = 400
ERROR_EXIST_TAG = 10001
ERROR_NOT_EXIST_TAG = 10002
ERROR_NOT_EXIST_ARTICLE = 10003
ERROR_AUTH_CHECK_TOKEN_FAIL = 20001
ERROR_AUTH_CHECK_TOKEN_TIMEOUT = 20002
ERROR_AUTH_TOKEN = 20003
ERROR_AUTH = 20004
CREATE_CONFIG_FAIL = 50001
GET_CONFIG_FAIL = 50002
UNKNOWN_ERROT = 60001
UPDATE_CONFIG_FAIL = 60002
MISSION_NOT_FOUND = 7003
)

27
pkg/e/msg.go Normal file
View File

@ -0,0 +1,27 @@
package e
var MsgFlags = map[int]string{
SUCCESS: "ok",
ERROR: "fail",
INVALID_PARAMS: "请求参数错误",
ERROR_EXIST_TAG: "已存在该标签名称",
ERROR_NOT_EXIST_TAG: "该标签不存在",
ERROR_NOT_EXIST_ARTICLE: "该文章不存在",
ERROR_AUTH_CHECK_TOKEN_FAIL: "Token鉴权失败",
ERROR_AUTH_CHECK_TOKEN_TIMEOUT: "Token已超时",
ERROR_AUTH_TOKEN: "Token生成失败",
CREATE_CONFIG_FAIL: "生成配置文件失败",
UNKNOWN_ERROT: "未知错误",
GET_CONFIG_FAIL: "获取配置文件失败",
UPDATE_CONFIG_FAIL: "修改配置文件失败",
MISSION_NOT_FOUND: "进度文件未找到",
}
func GetMsg(code int) string {
msg, ok := MsgFlags[code]
if ok {
return msg
}
return MsgFlags[ERROR]
}

123
pkg/func.go Normal file
View File

@ -0,0 +1,123 @@
package pkg
import (
"encoding/json"
"excel_export/biz/config"
"fmt"
"os"
"sort"
"strings"
"time"
)
func GetTaskId() string {
return fmt.Sprintf("%d", time.Now().UnixNano())
}
func HideStringMiddle(value string, front int, bak int) string {
if len(value) < front+bak {
return value
}
prefix := value[:front]
suffix := value[len(value)-bak:]
// 中间部分用*号替代
middle := strings.Repeat("*", len(value)-(front+bak))
// 拼接结果
return prefix + middle + suffix
}
func MergeMaps(m1, m2 map[string]string) map[string]string {
merged := make(map[string]string, len(m1)+len(m2))
// 复制 m1 到 merged
for k, v := range m1 {
merged[k] = v
}
// 将 m2 的键值对合并到 merged 中,如果键已存在则覆盖
for k, v := range m2 {
merged[k] = v
}
return merged
}
func SortFileWithModTime(dir string) []config.FileInfoWithModTime {
// 获取目录中的文件信息
d, _ := os.Open(dir)
defer d.Close()
files, _ := d.ReadDir(0)
var fileInfoList []config.FileInfoWithModTime
// 填充切片
for _, file := range files {
fileInfo, err := file.Info()
if err != nil {
fmt.Println("Error getting file info:", err)
continue
}
fileInfoList = append(fileInfoList, config.FileInfoWithModTime{FileInfo: fileInfo, ModTime: fileInfo.ModTime()})
}
// 根据修改时间对切片进行排序
sort.Slice(fileInfoList, func(i, j int) bool {
return fileInfoList[i].ModTime.After(fileInfoList[j].ModTime)
})
return fileInfoList
}
type OrderExportData struct {
FileName string `json:"file_name"`
Process int `json:"process"`
Status int8 `json:"status"`
Time string `json:"time"`
}
func SortFileWithStatus(dir string) []config.FileInfoStatus {
// 获取目录中的文件信息
d, _ := os.Open(dir)
defer d.Close()
files, _ := d.ReadDir(0)
var fileInfoList []config.FileInfoStatus
// 填充切片
for _, file := range files {
fileName := file.Name()
fileInfo, _ := file.Info()
bytes, _ := os.ReadFile(dir + "/" + fileName)
var info OrderExportData
_ = json.Unmarshal(bytes, &info)
times, _ := time.Parse(time.DateTime, info.Time)
fileInfoList = append(fileInfoList, config.FileInfoStatus{FileInfo: fileInfo, Status: info.Status, Time: times})
}
// 根据修改时间对切片进行排序
sort.Slice(fileInfoList, func(i, j int) bool {
return fileInfoList[i].Time.After(fileInfoList[j].Time)
})
return fileInfoList
}
// InterfaceSliceToStringSlice 将 []interface{} 转换为 []string
func InterfaceSliceToStringSlice(in []interface{}) ([]string, error) {
var out []string
for _, item := range in {
// 检查 item 是否为 string 类型
if str, ok := item.(string); ok {
out = append(out, str)
} else {
// 如果 item 不是 string 类型,则返回一个错误
return nil, fmt.Errorf("non-string value found in slice: %v", item)
}
}
return out, nil
}

25
pkg/img.go Normal file
View File

@ -0,0 +1,25 @@
package pkg
import (
"bytes"
)
func DetectImageFormat(data []byte) string {
switch {
case bytes.HasPrefix(data, []byte{0xFF, 0xD8}): // JPEG
return ".jpeg"
case bytes.HasPrefix(data, []byte{0x89, 0x50, 0x4E, 0x47, 0x0D, 0x0A, 0x1A, 0x0A}): // PNG
return ".png"
case bytes.HasPrefix(data, []byte{0x47, 0x49, 0x46, 0x38}): // GIF
return ".gif"
// 添加其他图片格式的检测逻辑...
default:
return ".jpg"
}
}
func GetFormat(body []byte) string {
buffer := body[:10] // 读取前10个字节通常足够检测大多数图片格式
format := DetectImageFormat(buffer)
return format
}

88
pkg/key_transcode.go Normal file
View File

@ -0,0 +1,88 @@
package pkg
import (
"fmt"
"math/big"
"strings"
)
var BASE = []string{
"A", "B", "C", "D", "E", "F", "G", "H", "J", "K", "L", "M", "N", "P", "Q", "R", "S", "T", "U", "V", "W", "X", "Y", "Z",
"a", "b", "c", "d", "e", "f", "g", "h", "j", "k", "l", "m", "n", "p", "q", "r", "s", "t", "u", "v", "w", "x", "y", "z",
"0", "1", "2", "3", "4", "5", "6", "7", "8", "9",
}
type Converter struct {
base []string
baseCount *big.Int
}
func NewConverter() *Converter {
c := &Converter{
base: BASE,
baseCount: big.NewInt(int64(len(BASE))),
}
return c
}
func (c *Converter) DeBase(id string) *big.Int {
dedicate := make(map[string]int)
for i, v := range c.base {
dedicate[v] = i
}
id = strings.TrimLeft(id, c.base[0])
id = reverseString(id)
v := big.NewInt(0)
bigBaseCount := new(big.Int).Set(c.baseCount)
bigIndex := new(big.Int)
for i, char := range id {
index := dedicate[string(char)]
bigIndex.SetInt64(int64(index))
pow := new(big.Int).Exp(bigBaseCount, big.NewInt(int64(i)), nil)
mul := new(big.Int).Mul(bigIndex, pow)
v = new(big.Int).Add(v, mul)
}
return v
}
func (c *Converter) EnBase(num *big.Int, pad int, format int) string {
arr := make([]string, 0)
zero := big.NewInt(0)
bigBaseCount := new(big.Int).Set(c.baseCount)
for num.Cmp(zero) != 0 {
mod := new(big.Int).Mod(num, bigBaseCount)
arr = append(arr, c.base[mod.Int64()])
num = new(big.Int).Div(num, bigBaseCount)
}
for len(arr) < pad {
arr = append(arr, c.base[0])
}
reverseStringSlice(arr)
if format == 1 {
return strings.Join(arr, "")
} else {
return fmt.Sprintf("%v", arr)
}
}
func reverseString(s string) string {
runes := []rune(s)
for i, j := 0, len(runes)-1; i < j; i, j = i+1, j-1 {
runes[i], runes[j] = runes[j], runes[i]
}
return string(runes)
}
func reverseStringSlice(s []string) {
for i, j := 0, len(s)-1; i < j; i, j = i+1, j-1 {
s[i], s[j] = s[j], s[i]
}
}

154
pkg/log.go Normal file
View File

@ -0,0 +1,154 @@
package pkg
import (
"encoding/json"
"fmt"
"log"
"os"
"time"
)
func ErrLog(errContent string) error {
// 获取当前程序运行的路径
log.Printf(errContent)
logFile, err := errLogFile()
if err != nil {
log.Fatal(err)
}
// 将错误信息写入日志文件
file, err := os.OpenFile(logFile, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
log.Fatal(err)
}
defer file.Close()
_, err = file.WriteString(fmt.Sprintf("\n【%s】%s\n", time.Now().Format(time.DateTime), errContent))
if err != nil {
log.Fatal(err)
}
return nil
}
func ProcessLog(processInfo string) error {
// 获取当前程序运行的路径
log.Printf(processInfo)
processPath, err := processLogFile()
if err != nil {
log.Fatal(err)
}
// 将错误信息写入日志文件
file, err := os.OpenFile(processPath, os.O_APPEND|os.O_CREATE|os.O_WRONLY, 0644)
if err != nil {
log.Fatal(err)
}
defer file.Close()
_, err = file.WriteString(fmt.Sprintf("\n【%s】%s\n", time.Now().Format(time.DateTime), processInfo))
if err != nil {
log.Fatal(err)
}
return nil
}
func MissionLog(adminIdStr string, sysName string, jobName string, taskIdStr string, excelName string, process int, timeStr string) error {
// 获取当前程序运行的路径
missionPath, err := MissionLogFile(adminIdStr, sysName, jobName, taskIdStr)
if err != nil {
log.Fatal(err)
}
// 将错误信息写入日志文件
file, err := os.OpenFile(missionPath, os.O_RDWR|os.O_TRUNC|os.O_CREATE, 0766)
if err != nil {
log.Fatal(err)
}
defer file.Close()
bytes, _ := os.ReadFile(missionPath)
var info OrderExportData
_ = json.Unmarshal(bytes, &info)
status := 0
if process == 100 {
status = 1
}
nameMap := map[string]interface{}{
"process": process,
"file_name": excelName,
"status": status,
}
if timeStr == "" {
nameMap["time"] = info.Time
} else {
nameMap["time"] = timeStr
}
processInfo, err := json.Marshal(nameMap)
jsonInfo := string(processInfo)
_, err = file.WriteString(jsonInfo)
if err != nil {
log.Fatal(err)
}
return nil
}
func MissionLogFile(adminIdStr string, sysName string, jobName string, taskIdStr string) (string, error) {
path, err := MissionLogPath(adminIdStr, sysName, jobName)
if err != nil {
return "", err
}
return fmt.Sprintf("%s/%s", path, taskIdStr), nil
}
func MissionLogPath(adminIdStr string, sysName string, jobName string) (string, error) {
logPath, err := runtimePath()
if err != nil {
return "", err
}
path := fmt.Sprintf("%s/%s/%s/%s", logPath, sysName, jobName, adminIdStr)
err = CheckDir(path)
if err != nil {
return "", err
}
return path, nil
}
func errLogFile() (string, error) {
logPath, err := runtimePath()
if err != nil {
return "", err
}
path := fmt.Sprintf("%s/%s", logPath, "err")
err = CheckDir(path)
if err != nil {
return "", err
}
return fmt.Sprintf("%s/%s", path, time.Now().Format(time.DateOnly)), nil
}
func processLogFile() (string, error) {
logPath, err := runtimePath()
if err != nil {
return "", err
}
path := fmt.Sprintf("%s/%s", logPath, "process")
err = CheckDir(path)
if err != nil {
return "", err
}
return fmt.Sprintf("%s/%s", path, time.Now().Format(time.DateOnly)), nil
}
func runtimePath() (string, error) {
path, err := os.Getwd()
return fmt.Sprintf("%s/%s", path, "runtime"), err
}
func CheckDir(path string) error {
// 判断目录是否存在
if _, err := os.Stat(path); os.IsNotExist(err) {
// 如果目录不存在,则创建它
err = os.MkdirAll(path, os.ModePerm)
if err != nil {
return err
}
} else if err != nil {
// 如果Stat返回了其他错误比如权限问题
return err
}
return nil
}

61
pkg/path.go Normal file
View File

@ -0,0 +1,61 @@
package pkg
import (
"fmt"
"os"
"path"
"regexp"
"runtime"
)
func GetRootDir() string {
_, filename, _, _ := runtime.Caller(0)
return fmt.Sprintf("%s/", path.Dir(path.Dir(filename)))
}
func GetPath(path string, pathType string) string {
directory := fmt.Sprintf("%s/%s/", path, pathType)
err := CheckOrCreatePathOrFile(directory)
if err != nil {
panic(err)
}
return directory
}
func MissionPathFile(taskIdStr string) (string, error) {
pathDir, _ := os.Getwd()
return fmt.Sprintf("%s/%s/%s", pathDir, "zip", taskIdStr), nil
}
func CheckOrCreatePathOrFile(directoryOrFile string) error {
_, err := os.Stat(GetRootDir() + directoryOrFile)
if err != nil {
if os.IsNotExist(err) {
//判断是文件还是文件夹
filePattern := `^.*\.[^\\/]*$`
fileRe := regexp.MustCompile(filePattern)
isFile := fileRe.MatchString(directoryOrFile)
if isFile {
file, err := os.Create(directoryOrFile)
if err != nil {
return err
}
defer file.Close()
} else {
err = os.MkdirAll(directoryOrFile, 0755)
if err != nil {
return err
}
}
} else {
return err
}
}
return nil
}
type TreeList struct {
Path string `json:"path"`
File []string `json:"file"`
Children []*TreeList `json:"children"`
}

39
pkg/response.go Normal file
View File

@ -0,0 +1,39 @@
package pkg
import (
"excel_export/pkg/e"
"fmt"
"github.com/gin-gonic/gin"
"net/http"
)
type Gin struct {
C *gin.Context
}
type ResponseStruct struct {
Code int `json:"code"`
Msg string `json:"msg"`
Data interface{} `json:"data"`
}
func (g *Gin) Success(data interface{}) {
g.C.JSON(http.StatusOK, ResponseStruct{
Code: e.SUCCESS,
Msg: e.GetMsg(e.SUCCESS),
Data: data,
})
}
func (g *Gin) Error(code int, extraInfo string) {
g.C.AbortWithStatusJSON(http.StatusBadRequest, ResponseStruct{
Code: code,
Msg: fmt.Sprintf("%s%s", e.GetMsg(code), extraInfo),
Data: "",
})
}
func Response(c *gin.Context) *Gin {
r := Gin{C: c}
return &r
}

198
router/api/v1/export.go Normal file
View File

@ -0,0 +1,198 @@
package v1
import (
"encoding/json"
"excel_export/biz/config"
"excel_export/cmd/cmd"
"excel_export/impl"
"excel_export/pkg"
"excel_export/pkg/e"
"fmt"
"github.com/gin-gonic/gin"
"math"
"os"
"strconv"
"time"
)
func Export(c *gin.Context) {
var (
Config *config.Config
OrderJsonQuery impl.OrderJsonQuery
)
sysName := c.Param("sys")
jobName := c.Param("job")
adminId := c.Query("admin_id")
if err := c.ShouldBindJSON(&OrderJsonQuery); err != nil {
pkg.Response(c).Error(e.INVALID_PARAMS, "参数错误:"+err.Error())
return
}
path, _ := os.Getwd()
Config = config.LoadConfig(path + "/config")
ee := cmd.NewCsv(Config)
begin, err := time.Parse(time.DateTime, OrderJsonQuery.BeginTime)
if err != nil {
pkg.Response(c).Error(e.INVALID_PARAMS, "begin_time参数错误:"+err.Error())
return
}
end, err := time.Parse(time.DateTime, OrderJsonQuery.EndTime)
if err != nil {
pkg.Response(c).Error(e.INVALID_PARAMS, "end_time参数错误:"+err.Error())
return
}
if err != nil {
pkg.Response(c).Error(e.INVALID_PARAMS, "无效的sql语句:"+err.Error())
return
}
if err != nil {
fmt.Println(err)
}
// 导出任务
taskIdStr := pkg.GetTaskId()
pkg.Response(c).Success(taskIdStr)
go func() {
defer func() {
if err := recover(); err != nil {
fmt.Printf("error: %v\n", err)
}
}()
if err := ee.ExportMarket(adminId, sysName, jobName, begin, end, OrderJsonQuery.Slice, OrderJsonQuery.Condition, taskIdStr); err != nil {
pkg.ErrLog("订单导出错误:" + err.Error())
}
}()
}
func Download(c *gin.Context) {
var (
Config *config.Config
)
sysName := c.Param("sys")
jobName := c.Param("job")
taskId := c.Param("task_id")
adminId := c.Query("admin_id")
file, err := pkg.MissionLogFile(adminId, sysName, jobName, taskId)
if err != nil {
pkg.Response(c).Error(e.MISSION_NOT_FOUND, "未找到相关信息:"+err.Error())
return
}
bytes, err := os.ReadFile(file)
info := make(map[string]interface{})
_ = json.Unmarshal(bytes, &info)
path, _ := os.Getwd()
Config = config.LoadConfig(path + "/config")
ee := cmd.NewCsv(Config)
ee.TaskIdStr = taskId
ee.JobName = jobName
ee.SysName = sysName
ee.Name = info["file_name"].(string)
c.File(ee.ZipFile())
}
func TaskProcess(c *gin.Context) {
sysName := c.Param("sys")
jobName := c.Param("job")
taskId := c.Param("task_id")
adminId := c.Query("admin_id")
file, err := pkg.MissionLogFile(adminId, sysName, jobName, taskId)
if err != nil {
pkg.Response(c).Error(e.MISSION_NOT_FOUND, "未找到相关信息:"+err.Error())
return
}
bytes, err := os.ReadFile(file)
if err != nil {
pkg.Response(c).Error(e.MISSION_NOT_FOUND, "任务进度获取失败:"+err.Error())
return
}
pkg.Response(c).Success(string(bytes))
}
func AllTaskProcess(c *gin.Context) {
var (
data []map[string]interface{}
res config.ResPage
)
sysName := c.Param("sys")
jobName := c.Param("job")
page := c.Query("page")
num := c.Query("num")
aminId := c.Query("admin_id")
path, err := pkg.MissionLogPath(aminId, sysName, jobName)
if err != nil {
pkg.Response(c).Error(e.MISSION_NOT_FOUND, "未找到项目文件:"+err.Error())
return
}
entries := pkg.SortFileWithStatus(path)
count := len(entries)
pageInt, _ := strconv.ParseInt(page, 10, 64)
numInt, _ := strconv.ParseInt(num, 10, 64)
begin := (pageInt - 1) * numInt
entEnd := begin + numInt
if count < int(entEnd) {
entEnd = int64(count)
}
entries = entries[begin:entEnd]
for _, entry := range entries {
if entry.FileInfo == nil {
break
}
info := make(map[string]interface{})
if entry.IsDir() {
continue
}
info["task_id"] = entry.Name()
file := fmt.Sprintf("%s/%s", path, entry.Name())
bytes, _ := os.ReadFile(file)
_ = json.Unmarshal(bytes, &info)
fileOs, _ := os.Stat(file)
info["update_time"] = fileOs.ModTime().Format(time.DateTime)
data = append(data, info)
}
res = config.ResPage{
Page: int(pageInt),
PageSize: int(numInt),
Total: count,
Data: data,
LastPage: int(math.Ceil(float64(count) / float64(numInt))),
}
pkg.Response(c).Success(res)
}
func DelTask(c *gin.Context) {
sysName := c.Param("sys")
jobName := c.Param("job")
taskId := c.Param("task_id")
adminId := c.Query("admin_id")
file, err := pkg.MissionPathFile(taskId)
if err != nil {
pkg.Response(c).Error(e.MISSION_NOT_FOUND, "未找到任务信息:"+err.Error())
return
}
err = os.RemoveAll(file)
if err != nil {
pkg.Response(c).Error(e.MISSION_NOT_FOUND, "删除失败:"+err.Error())
return
}
logFile, err := pkg.MissionLogFile(adminId, sysName, jobName, taskId)
if err != nil {
pkg.Response(c).Error(e.MISSION_NOT_FOUND, "未找到任务信息:"+err.Error())
return
}
err = os.RemoveAll(logFile)
if err != nil {
pkg.Response(c).Error(e.MISSION_NOT_FOUND, "删除失败:"+err.Error())
return
}
pkg.Response(c).Success(taskId)
}

25
router/app.go Normal file
View File

@ -0,0 +1,25 @@
package router
import (
v1 "excel_export/router/api/v1"
"github.com/gin-gonic/gin"
)
func App(r *gin.Engine) {
r.Use(gin.Logger()) //cmd日志输出
r.Use(gin.Recovery())
export := r.Group("/export/v1/")
//项目管理
export.POST(":sys/:job", v1.Export)
export.GET(":sys/:job/:task_id", v1.Download)
export.GET("process/:sys/:job/:task_id", v1.TaskProcess)
export.GET("process/:sys/:job", v1.AllTaskProcess)
export.POST(":sys/:job/:task_id", v1.DelTask)
}

11
run.sh Executable file
View File

@ -0,0 +1,11 @@
#!/bin/bash
PROJECT="order-export"
VERSION="latest"
PORT="3030"
docker stop "$PROJECT" && docker rm "$PROJECT" && docker rmi "${PROJECT}:${VERSION}"
# 使用 ${} 来避免潜在的变量扩展问题,尤其是当变量名后面紧跟其他字符时
docker build -t "${PROJECT}:${VERSION}" .
docker run -it -p "${PORT}:${PORT}" --name "$PROJECT" "${PROJECT}:${VERSION}"

BIN
server Executable file

Binary file not shown.