1155 lines
29 KiB
Go
1155 lines
29 KiB
Go
![]() |
package main
|
|||
|
|
|||
|
import (
|
|||
|
"bytes"
|
|||
|
"encoding/json"
|
|||
|
"flag"
|
|||
|
"fmt"
|
|||
|
"github.com/oylshe1314/framework/errors"
|
|||
|
"github.com/oylshe1314/framework/log"
|
|||
|
"github.com/oylshe1314/framework/util"
|
|||
|
"github.com/xuri/excelize/v2"
|
|||
|
"os"
|
|||
|
"path"
|
|||
|
"path/filepath"
|
|||
|
"reflect"
|
|||
|
"sort"
|
|||
|
"strconv"
|
|||
|
"strings"
|
|||
|
"time"
|
|||
|
)
|
|||
|
|
|||
|
type codeField struct {
|
|||
|
sort uint32
|
|||
|
name string
|
|||
|
tags string
|
|||
|
desc string
|
|||
|
|
|||
|
sourceType string
|
|||
|
golangType string
|
|||
|
csharpType string
|
|||
|
formatName string
|
|||
|
}
|
|||
|
|
|||
|
var goTypeMap = map[string]string{
|
|||
|
"bool": "bool",
|
|||
|
"bool[]": "[]bool",
|
|||
|
"int": "int",
|
|||
|
"int[]": "[]int",
|
|||
|
"long": "int64",
|
|||
|
"long[]": "[]int64",
|
|||
|
"float": "float64",
|
|||
|
"float[]": "[]float64",
|
|||
|
"double": "float64",
|
|||
|
"double[]": "[]float64",
|
|||
|
"string": "string",
|
|||
|
"string[]": "[]string",
|
|||
|
"date": "int64",
|
|||
|
"date[]": "[]int64",
|
|||
|
"time": "int64",
|
|||
|
"time[]": "[]int64",
|
|||
|
"datetime": "int64",
|
|||
|
"datetime[]": "[]int64",
|
|||
|
"Vector3": "[3]float64",
|
|||
|
"Vector3[]": "[][3]float64",
|
|||
|
}
|
|||
|
|
|||
|
var csTypeMap = map[string]string{
|
|||
|
"bool": "bool",
|
|||
|
"bool[]": "bool[]",
|
|||
|
"int": "int",
|
|||
|
"int[]": "int[]",
|
|||
|
"long": "long",
|
|||
|
"long[]": "long[]",
|
|||
|
"float": "float",
|
|||
|
"float[]": "float[]",
|
|||
|
"double": "double",
|
|||
|
"double[]": "double[]",
|
|||
|
"string": "string",
|
|||
|
"string[]": "string[]",
|
|||
|
"date": "long",
|
|||
|
"date[]": "long[]",
|
|||
|
"time": "long",
|
|||
|
"time[]": "long[]",
|
|||
|
"datetime": "long",
|
|||
|
"datetime[]": "long[]",
|
|||
|
"Vector3": "Vector3",
|
|||
|
"Vector3[]": "Vector3[]",
|
|||
|
}
|
|||
|
|
|||
|
type exportResult struct {
|
|||
|
codeFields map[string]*codeField
|
|||
|
jsonObjects []map[string]interface{}
|
|||
|
}
|
|||
|
|
|||
|
func printUsage() {
|
|||
|
var cmdLine = flag.CommandLine
|
|||
|
var order = []string{"in", "out", "tag", "go", "cs", "clear", "format"}
|
|||
|
|
|||
|
fmt.Printf("Usage of %s:\n", os.Args[0])
|
|||
|
for _, s := range order {
|
|||
|
var f = cmdLine.Lookup(s)
|
|||
|
if f != nil {
|
|||
|
name, usage := flag.UnquoteUsage(f)
|
|||
|
if len(name) > 0 {
|
|||
|
fmt.Printf(" -%s %s\n", f.Name, name)
|
|||
|
fmt.Printf(" %s\n\n", usage)
|
|||
|
} else {
|
|||
|
fmt.Printf(" -%s %s\n\n", f.Name, usage)
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
func main() {
|
|||
|
|
|||
|
var clrJson, fmtJson bool
|
|||
|
var inDir, outDir, jsonTag, goDir, csDir string
|
|||
|
|
|||
|
flag.StringVar(&inDir, "in", "", "Specify the input directory of configuration files")
|
|||
|
flag.StringVar(&outDir, "out", "", "Specify the output directory for the exported json files")
|
|||
|
flag.StringVar(&jsonTag, "tag", "", "Specify the tag 'server' or 'client' for the exported json files")
|
|||
|
flag.StringVar(&goDir, "go", "", "Specify an output directory if it's going to generate go code files")
|
|||
|
flag.StringVar(&csDir, "cs", "", "Specify an output directory if it's going to generate csharp code files")
|
|||
|
flag.BoolVar(&clrJson, "clear", false, "Specify if clear the output directory")
|
|||
|
flag.BoolVar(&fmtJson, "format", false, "Specify if format the output json files")
|
|||
|
|
|||
|
flag.Usage = printUsage
|
|||
|
flag.Parse()
|
|||
|
|
|||
|
if inDir == "" || outDir == "" || (jsonTag != "" && jsonTag != "server" && jsonTag != "client") {
|
|||
|
flag.Usage()
|
|||
|
return
|
|||
|
}
|
|||
|
|
|||
|
des, err := os.ReadDir(outDir)
|
|||
|
if err != nil {
|
|||
|
log.DefaultLogger.Error(err)
|
|||
|
return
|
|||
|
}
|
|||
|
|
|||
|
if clrJson {
|
|||
|
log.DefaultLogger.Info("Clearing the output directory...")
|
|||
|
for _, de := range des {
|
|||
|
if de.IsDir() {
|
|||
|
continue
|
|||
|
}
|
|||
|
|
|||
|
if !strings.HasSuffix(de.Name(), ".json") {
|
|||
|
continue
|
|||
|
}
|
|||
|
|
|||
|
err = os.Remove(path.Join(outDir, de.Name()))
|
|||
|
if err != nil {
|
|||
|
log.DefaultLogger.Error(err)
|
|||
|
return
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
dirs, err := os.ReadDir(inDir)
|
|||
|
if err != nil {
|
|||
|
log.DefaultLogger.Error(err)
|
|||
|
return
|
|||
|
}
|
|||
|
|
|||
|
log.DefaultLogger.Info("Reading all configuration tables...")
|
|||
|
|
|||
|
var b = time.Now().Unix()
|
|||
|
var tables []string
|
|||
|
var tableFiles = make(map[string][]string)
|
|||
|
for _, dir := range dirs {
|
|||
|
files, err := readFiles(inDir, dir)
|
|||
|
if err != nil {
|
|||
|
log.DefaultLogger.Error(err)
|
|||
|
return
|
|||
|
}
|
|||
|
|
|||
|
tables = append(tables, dir.Name())
|
|||
|
tableFiles[dir.Name()] = files
|
|||
|
}
|
|||
|
|
|||
|
//readTables(inDir)
|
|||
|
|
|||
|
var goTables []string
|
|||
|
var csTables []string
|
|||
|
for _, table := range tables {
|
|||
|
files := tableFiles[table]
|
|||
|
if files == nil {
|
|||
|
continue
|
|||
|
}
|
|||
|
|
|||
|
table = util.UpperCamelCase(table)
|
|||
|
log.DefaultLogger.Infof("Exporting %s...", table)
|
|||
|
if len(files) == 0 {
|
|||
|
log.DefaultLogger.Warn("Empty file list")
|
|||
|
continue
|
|||
|
}
|
|||
|
|
|||
|
result, err := exportFiles(inDir, files)
|
|||
|
if err != nil {
|
|||
|
log.DefaultLogger.Errorf("Export failed, %v", err)
|
|||
|
continue
|
|||
|
}
|
|||
|
|
|||
|
if len(result.jsonObjects) == 0 {
|
|||
|
log.DefaultLogger.Warn("Empty object list")
|
|||
|
continue
|
|||
|
}
|
|||
|
|
|||
|
err = writeJsonFile(table, outDir, jsonTag, fmtJson, result)
|
|||
|
if err != nil {
|
|||
|
log.DefaultLogger.Errorf("Write %s.json, %v", table, err)
|
|||
|
continue
|
|||
|
}
|
|||
|
|
|||
|
if goDir != "" {
|
|||
|
err = writeGoTable(table, goDir, result)
|
|||
|
if err == nil {
|
|||
|
goTables = append(goTables, table)
|
|||
|
log.DefaultLogger.Infof("Done")
|
|||
|
} else {
|
|||
|
log.DefaultLogger.Errorf("Write %s.go failed, %v", table, err)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if csDir != "" {
|
|||
|
log.DefaultLogger.Infof("Writing %s.cs...", table)
|
|||
|
err = writeCsTable(table, csDir, result)
|
|||
|
if err == nil {
|
|||
|
csTables = append(csTables, table)
|
|||
|
log.DefaultLogger.Infof("Done")
|
|||
|
} else {
|
|||
|
log.DefaultLogger.Errorf("Write %s.cs failed, %v", table, err)
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if goDir != "" {
|
|||
|
log.DefaultLogger.Info("Generating go tables")
|
|||
|
err = writeGoTables(goTables, goDir)
|
|||
|
if err == nil {
|
|||
|
log.DefaultLogger.Infof("Done")
|
|||
|
} else {
|
|||
|
log.DefaultLogger.Errorf("Generate tables.go failed, %v", err)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if csDir != "" {
|
|||
|
log.DefaultLogger.Info("Generating c-sharp tables")
|
|||
|
err = writeCsTables(csTables, csDir)
|
|||
|
if err == nil {
|
|||
|
log.DefaultLogger.Infof("Done")
|
|||
|
} else {
|
|||
|
log.DefaultLogger.Errorf("Generate Tables.cs failed, %v", err)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
var e = time.Now().Unix()
|
|||
|
log.DefaultLogger.Infof("All configuration tables export finished, times: %ds", e-b)
|
|||
|
}
|
|||
|
|
|||
|
func filterFields(jsonTag string, doSort bool, result *exportResult) (fields []*codeField) {
|
|||
|
if jsonTag == "" {
|
|||
|
fields = util.MapValues(result.codeFields)
|
|||
|
} else {
|
|||
|
for _, field := range result.codeFields {
|
|||
|
if strings.Contains(field.tags, jsonTag) {
|
|||
|
fields = append(fields, field)
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
if doSort {
|
|||
|
sort.Slice(fields, func(i, j int) bool {
|
|||
|
return fields[i].sort < fields[j].sort
|
|||
|
})
|
|||
|
}
|
|||
|
return
|
|||
|
}
|
|||
|
|
|||
|
// toStructSlice 这个函数用于把一个map slice(既是JSON Object Array)转为一个动态struct pointer slice
|
|||
|
// 因为map的key顺序不是固定的,JSON序列化时也无法排序,导致每次输出的JSON文件会有变化,版本控制提交更新时容易冲突
|
|||
|
// 但是JSON序列化结构体的字段时是有序的,所以把JSON对象map转成了一个动态的结构体再序列化(!!!绝对是馊主意)
|
|||
|
// 或者自己写个函数序列化JSON字符串.
|
|||
|
func toStructSlice(jsonTag string, result *exportResult) (any, error) {
|
|||
|
var fields = filterFields(jsonTag, true, result)
|
|||
|
if len(fields) == 0 {
|
|||
|
return nil, nil
|
|||
|
}
|
|||
|
|
|||
|
var sfs []reflect.StructField
|
|||
|
for _, field := range fields {
|
|||
|
var tipe reflect.Type
|
|||
|
switch field.golangType {
|
|||
|
case "bool":
|
|||
|
tipe = reflect.TypeOf(true)
|
|||
|
case "[]bool":
|
|||
|
tipe = reflect.TypeOf([]bool{})
|
|||
|
case "int":
|
|||
|
tipe = reflect.TypeOf(int(0))
|
|||
|
case "[]int":
|
|||
|
tipe = reflect.TypeOf([]int{})
|
|||
|
case "int64":
|
|||
|
tipe = reflect.TypeOf(int64(0))
|
|||
|
case "[]int64":
|
|||
|
tipe = reflect.TypeOf([]int64{})
|
|||
|
case "float64":
|
|||
|
tipe = reflect.TypeOf(float64(0))
|
|||
|
case "[]float64":
|
|||
|
tipe = reflect.TypeOf([]float64{})
|
|||
|
case "string":
|
|||
|
tipe = reflect.TypeOf("")
|
|||
|
case "[]string":
|
|||
|
tipe = reflect.TypeOf([]string{})
|
|||
|
case "[3]float64":
|
|||
|
tipe = reflect.TypeOf([3]float64{})
|
|||
|
case "[][3]float64":
|
|||
|
tipe = reflect.TypeOf([][3]float64{})
|
|||
|
}
|
|||
|
sfs = append(sfs, reflect.StructField{Name: field.formatName, Type: tipe, Tag: reflect.StructTag(fmt.Sprintf(`json:"%s,omitempty"`, field.name))})
|
|||
|
}
|
|||
|
|
|||
|
var spt = reflect.PointerTo(reflect.StructOf(sfs))
|
|||
|
var spsv = reflect.MakeSlice(reflect.SliceOf(spt), len(result.jsonObjects), len(result.jsonObjects))
|
|||
|
|
|||
|
for si, jsonObject := range result.jsonObjects {
|
|||
|
var spv, err = util.NewReflectValueFromJson(jsonObject, spt)
|
|||
|
if err != nil {
|
|||
|
return nil, err
|
|||
|
}
|
|||
|
spsv.Index(si).Set(spv)
|
|||
|
}
|
|||
|
return spsv.Interface(), nil
|
|||
|
}
|
|||
|
|
|||
|
// toJsonString 写的直接序列化JSON字符串的函数
|
|||
|
func toJsonString(jsonTag string, result *exportResult) ([]byte, error) {
|
|||
|
var fields = filterFields(jsonTag, true, result)
|
|||
|
if len(fields) == 0 {
|
|||
|
return nil, nil
|
|||
|
}
|
|||
|
|
|||
|
var buf bytes.Buffer
|
|||
|
|
|||
|
buf.WriteString("[")
|
|||
|
for oi, jo := range result.jsonObjects {
|
|||
|
if oi > 0 {
|
|||
|
buf.WriteString(",")
|
|||
|
}
|
|||
|
|
|||
|
buf.WriteString("{")
|
|||
|
for fi, field := range fields {
|
|||
|
var jv = jo[field.name]
|
|||
|
if jv != nil {
|
|||
|
if fi > 0 {
|
|||
|
buf.WriteString(",")
|
|||
|
}
|
|||
|
buf.WriteString("\"")
|
|||
|
buf.WriteString(field.name)
|
|||
|
buf.WriteString("\":")
|
|||
|
vs, err := json.Marshal(jv)
|
|||
|
if err != nil {
|
|||
|
return nil, err
|
|||
|
}
|
|||
|
|
|||
|
buf.WriteString(string(vs))
|
|||
|
}
|
|||
|
}
|
|||
|
buf.WriteString("}")
|
|||
|
}
|
|||
|
buf.WriteString("]\n")
|
|||
|
|
|||
|
return buf.Bytes(), nil
|
|||
|
}
|
|||
|
|
|||
|
func writeJsonFile(table, outDir, jsonTag string, fmtJson bool, result *exportResult) error {
|
|||
|
|
|||
|
//---------------------- one --------------------------
|
|||
|
//var newObjs []map[string]interface{}
|
|||
|
//if jsonTag == "" {
|
|||
|
// newObjs = result.jsonObjects
|
|||
|
//} else {
|
|||
|
// var fields = filterFields(jsonTag, true, result)
|
|||
|
// if len(fields) == 0 {
|
|||
|
// return nil
|
|||
|
// }
|
|||
|
//
|
|||
|
// for _, jsonObj := range result.jsonObjects {
|
|||
|
// var newObj = map[string]interface{}{}
|
|||
|
// for _, field := range fields {
|
|||
|
// var v, ok = jsonObj[field.name]
|
|||
|
// if ok {
|
|||
|
// newObj[field.name] = v
|
|||
|
// }
|
|||
|
// }
|
|||
|
// if len(newObj) > 0 {
|
|||
|
// newObjs = append(newObjs, newObj)
|
|||
|
// }
|
|||
|
// }
|
|||
|
//}
|
|||
|
//
|
|||
|
//var jsonFile = filepath.Join(outDir, table+".json")
|
|||
|
//log.DefaultLogger.Infof("Writing %s...", jsonFile)
|
|||
|
//
|
|||
|
//file, err := os.OpenFile(jsonFile, os.O_WRONLY|os.O_TRUNC|os.O_CREATE, 0666)
|
|||
|
//if err != nil {
|
|||
|
// return err
|
|||
|
//}
|
|||
|
//
|
|||
|
//defer file.Close()
|
|||
|
//
|
|||
|
//var encoder = json.NewEncoder(file)
|
|||
|
//if fmtJson {
|
|||
|
// encoder.SetIndent("", " ")
|
|||
|
//}
|
|||
|
//
|
|||
|
//return encoder.Encode(newObjs)
|
|||
|
//-----------------------------------------------------
|
|||
|
|
|||
|
//---------------------- two --------------------------
|
|||
|
//sps, err := toStructSlice(jsonTag, result)
|
|||
|
//if err != nil {
|
|||
|
// return err
|
|||
|
//}
|
|||
|
//
|
|||
|
//if sps == nil {
|
|||
|
// return nil
|
|||
|
//}
|
|||
|
//
|
|||
|
//var jsonFile = filepath.Join(outDir, table+".json")
|
|||
|
//log.DefaultLogger.Infof("Writing %s...", jsonFile)
|
|||
|
//
|
|||
|
//file, err := os.OpenFile(jsonFile, os.O_WRONLY|os.O_TRUNC|os.O_CREATE, 0666)
|
|||
|
//if err != nil {
|
|||
|
// return err
|
|||
|
//}
|
|||
|
//
|
|||
|
//defer file.Close()
|
|||
|
//
|
|||
|
//var encoder = json.NewEncoder(file)
|
|||
|
//if fmtJson {
|
|||
|
// encoder.SetIndent("", " ")
|
|||
|
//}
|
|||
|
//
|
|||
|
//return encoder.Encode(sps)
|
|||
|
//-----------------------------------------------------
|
|||
|
|
|||
|
//---------------------- thr --------------------------
|
|||
|
js, err := toJsonString(jsonTag, result)
|
|||
|
if err != nil {
|
|||
|
return err
|
|||
|
}
|
|||
|
|
|||
|
if js == nil {
|
|||
|
return nil
|
|||
|
}
|
|||
|
|
|||
|
if fmtJson {
|
|||
|
var buf bytes.Buffer
|
|||
|
err = json.Indent(&buf, js, "", " ")
|
|||
|
if err != nil {
|
|||
|
return err
|
|||
|
}
|
|||
|
|
|||
|
js = buf.Bytes()
|
|||
|
}
|
|||
|
|
|||
|
var jsonFile = filepath.Join(outDir, table+".json")
|
|||
|
log.DefaultLogger.Infof("Writing %s...", jsonFile)
|
|||
|
|
|||
|
file, err := os.OpenFile(jsonFile, os.O_WRONLY|os.O_TRUNC|os.O_CREATE, 0666)
|
|||
|
if err != nil {
|
|||
|
return err
|
|||
|
}
|
|||
|
|
|||
|
defer file.Close()
|
|||
|
|
|||
|
_, err = file.Write(js)
|
|||
|
if err != nil {
|
|||
|
return err
|
|||
|
}
|
|||
|
|
|||
|
return nil
|
|||
|
//-----------------------------------------------------
|
|||
|
}
|
|||
|
|
|||
|
func writeGoTables(tables []string, goDir string) error {
|
|||
|
var tablesFile = filepath.Join(goDir, "tables.go")
|
|||
|
|
|||
|
log.DefaultLogger.Infof("Writing %s...", tablesFile)
|
|||
|
var buf bytes.Buffer
|
|||
|
|
|||
|
var maxTableNameLength = 0
|
|||
|
for _, table := range tables {
|
|||
|
if len(table) > maxTableNameLength {
|
|||
|
maxTableNameLength = len(table)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
var fmtStr = fmt.Sprintf("\t%%-%ds %%sTable\n", maxTableNameLength)
|
|||
|
|
|||
|
buf.WriteString("package data\n")
|
|||
|
buf.WriteRune('\n')
|
|||
|
buf.WriteString("type tables struct {\n")
|
|||
|
for _, table := range tables {
|
|||
|
buf.WriteString(fmt.Sprintf(fmtStr, table, table))
|
|||
|
}
|
|||
|
buf.WriteString("}\n")
|
|||
|
|
|||
|
return os.WriteFile(tablesFile, buf.Bytes(), 0666)
|
|||
|
}
|
|||
|
|
|||
|
func writeGoTable(table, goDir string, result *exportResult) error {
|
|||
|
var goFile = filepath.Join(goDir, util.LowerSnakeCase(table)+".go")
|
|||
|
log.DefaultLogger.Infof("Writing %s...", goFile)
|
|||
|
|
|||
|
var buf bytes.Buffer
|
|||
|
|
|||
|
var structName = table
|
|||
|
|
|||
|
var idFieldName = "id"
|
|||
|
var idField *codeField
|
|||
|
var maxNameLength = 0
|
|||
|
var maxTypeLength = 0
|
|||
|
var maxSrcNameLength = 0
|
|||
|
var serverFields []*codeField
|
|||
|
for _, field := range result.codeFields {
|
|||
|
if !strings.Contains(field.tags, "server") {
|
|||
|
continue
|
|||
|
}
|
|||
|
|
|||
|
if len(field.formatName) > maxNameLength {
|
|||
|
maxNameLength = len(field.formatName)
|
|||
|
}
|
|||
|
if len(field.golangType) > maxTypeLength {
|
|||
|
maxTypeLength = len(field.golangType)
|
|||
|
}
|
|||
|
if len(field.name) > maxSrcNameLength {
|
|||
|
maxSrcNameLength = len(field.name)
|
|||
|
}
|
|||
|
if field.name == idFieldName {
|
|||
|
idField = field
|
|||
|
switch idField.golangType {
|
|||
|
case "int":
|
|||
|
case "int64":
|
|||
|
case "string":
|
|||
|
default:
|
|||
|
return errors.Error("the type of id field just can be int, long, float, double, or string")
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
serverFields = append(serverFields, field)
|
|||
|
}
|
|||
|
|
|||
|
if len(serverFields) == 0 {
|
|||
|
return errors.Error("the table does not have any fields for server")
|
|||
|
}
|
|||
|
|
|||
|
if idField == nil {
|
|||
|
return errors.Error("the table should have an id field")
|
|||
|
}
|
|||
|
|
|||
|
sort.Slice(serverFields, func(i, j int) bool {
|
|||
|
return serverFields[i].sort < serverFields[j].sort
|
|||
|
})
|
|||
|
|
|||
|
var fmtStr = fmt.Sprintf("\t%%-%ds %%-%ds `json:\"%%s\"`", maxNameLength, maxTypeLength)
|
|||
|
|
|||
|
buf.WriteString("package data\n")
|
|||
|
buf.WriteRune('\n')
|
|||
|
buf.WriteString(fmt.Sprintf("type %s struct {\n", structName))
|
|||
|
for _, field := range serverFields {
|
|||
|
buf.WriteString(fmt.Sprintf(fmtStr, field.formatName, field.golangType, field.name))
|
|||
|
var spaces = maxSrcNameLength - len(field.name) + 1
|
|||
|
for i := 0; i < spaces; i++ {
|
|||
|
buf.WriteByte(' ')
|
|||
|
}
|
|||
|
buf.WriteString(fmt.Sprintf("//%s\n", field.desc))
|
|||
|
}
|
|||
|
buf.WriteString("}\n")
|
|||
|
buf.WriteRune('\n')
|
|||
|
|
|||
|
buf.WriteString(fmt.Sprintf("func (data *%s) id() %s {\n", structName, idField.golangType))
|
|||
|
buf.WriteString(fmt.Sprintf("\treturn data.%s\n", idField.formatName))
|
|||
|
buf.WriteString("}\n")
|
|||
|
buf.WriteRune('\n')
|
|||
|
|
|||
|
buf.WriteString(fmt.Sprintf("type %sTable struct {\n", structName))
|
|||
|
buf.WriteString(fmt.Sprintf("\ttable[%s, *%s]\n", idField.golangType, structName))
|
|||
|
buf.WriteString("}\n")
|
|||
|
|
|||
|
return os.WriteFile(goFile, buf.Bytes(), 0666)
|
|||
|
}
|
|||
|
|
|||
|
func writeCsTables(tables []string, csDir string) error {
|
|||
|
var tablesFile = filepath.Join(csDir, "Tables.cs")
|
|||
|
log.DefaultLogger.Infof("Writing %s...", tablesFile)
|
|||
|
|
|||
|
var buf bytes.Buffer
|
|||
|
|
|||
|
buf.WriteString("namespace DataTables\n")
|
|||
|
buf.WriteString("{\n")
|
|||
|
buf.WriteString("\tpublic static class Tables\n")
|
|||
|
buf.WriteString("\t{\n")
|
|||
|
for _, table := range tables {
|
|||
|
buf.WriteString(fmt.Sprintf("\t\tpublic static %sTable %s\n", table, table))
|
|||
|
buf.WriteString("\t\t{\n")
|
|||
|
buf.WriteString("\t\t\tprivate set { }\n")
|
|||
|
buf.WriteString("\t\t\tget\n")
|
|||
|
buf.WriteString("\t\t\t{\n")
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\t\tif (%s == null)\n", table))
|
|||
|
buf.WriteString("\t\t\t\t{\n")
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\t\t\t%s = new %sTable();\n", table, table))
|
|||
|
buf.WriteString("\t\t\t\t}\n")
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\t\treturn %s;\n", table))
|
|||
|
buf.WriteString("\t\t\t}\n")
|
|||
|
buf.WriteString("\t\t}\n")
|
|||
|
buf.WriteString("\n")
|
|||
|
}
|
|||
|
buf.WriteString("\t}\n")
|
|||
|
buf.WriteString("}\n")
|
|||
|
|
|||
|
return os.WriteFile(tablesFile, buf.Bytes(), 0666)
|
|||
|
}
|
|||
|
|
|||
|
func writeCsTable(table, csDir string, result *exportResult) error {
|
|||
|
|
|||
|
var structName = table
|
|||
|
var filename = util.UpperCamelCase(table) + ".cs"
|
|||
|
|
|||
|
var idFieldName = "id"
|
|||
|
var idField *codeField
|
|||
|
var clientFields []*codeField
|
|||
|
for _, field := range result.codeFields {
|
|||
|
if !strings.Contains(field.tags, "client") {
|
|||
|
continue
|
|||
|
}
|
|||
|
|
|||
|
if field.name == idFieldName {
|
|||
|
idField = field
|
|||
|
switch idField.csharpType {
|
|||
|
case "int":
|
|||
|
case "long":
|
|||
|
case "string":
|
|||
|
default:
|
|||
|
return errors.Error("the field type of id should be int, float32, float64, or string")
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
clientFields = append(clientFields, field)
|
|||
|
}
|
|||
|
|
|||
|
if len(clientFields) == 0 {
|
|||
|
return errors.Error("the table does not have a field for client")
|
|||
|
}
|
|||
|
|
|||
|
if idField == nil {
|
|||
|
return errors.Error("the table should have an id field")
|
|||
|
}
|
|||
|
|
|||
|
sort.Slice(clientFields, func(i, j int) bool {
|
|||
|
return clientFields[i].sort < clientFields[j].sort
|
|||
|
})
|
|||
|
|
|||
|
var buf bytes.Buffer
|
|||
|
|
|||
|
buf.WriteString("using UnityEngine;\n")
|
|||
|
buf.WriteString("using System.Collections.Generic;\n")
|
|||
|
buf.WriteRune('\n')
|
|||
|
buf.WriteString("namespace DataTables\n")
|
|||
|
buf.WriteString("{\n")
|
|||
|
buf.WriteString(fmt.Sprintf("\tpublic class %s\n", structName))
|
|||
|
buf.WriteString("\t{\n")
|
|||
|
for i, field := range clientFields {
|
|||
|
if i > 0 {
|
|||
|
buf.WriteRune('\n')
|
|||
|
}
|
|||
|
buf.WriteString("\t\t/// <summary>\n")
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t/// %s\n", field.desc))
|
|||
|
buf.WriteString("\t\t/// </summary>\n")
|
|||
|
buf.WriteString(fmt.Sprintf("\t\tpublic %s %s;\n", field.csharpType, field.formatName))
|
|||
|
}
|
|||
|
buf.WriteString("\t}\n")
|
|||
|
buf.WriteRune('\n')
|
|||
|
buf.WriteString(fmt.Sprintf("\tpublic class %sTable : Table<%s, %s>\n", structName, idField.csharpType, structName))
|
|||
|
buf.WriteString("\t{\n")
|
|||
|
//buf.WriteString(fmt.Sprintf("\t\tinternal static %sTable Instance = new %sTable();\n", structName, structName))
|
|||
|
buf.WriteString(fmt.Sprintf("\t\tinternal %sTable()\n", structName))
|
|||
|
buf.WriteString("\t\t{\n")
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\tL = new List<%s>()\n", structName))
|
|||
|
buf.WriteString("\t\t\t{\n")
|
|||
|
for _, jsonObject := range result.jsonObjects {
|
|||
|
var idValue = jsonObject[idField.name]
|
|||
|
if idValue == nil {
|
|||
|
return errors.Error("the table does not have an id value")
|
|||
|
}
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\t\tnew %s()\n", structName))
|
|||
|
buf.WriteString("\t\t\t\t{\n")
|
|||
|
for _, field := range clientFields {
|
|||
|
var fieldValue = jsonObject[field.name]
|
|||
|
if isDefaultValue(fieldValue) {
|
|||
|
continue
|
|||
|
}
|
|||
|
switch field.csharpType {
|
|||
|
case "bool":
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\t\t\t%s = %v,\n", field.formatName, fieldValue.(bool)))
|
|||
|
case "bool[]":
|
|||
|
var ary = fieldValue.([]interface{})
|
|||
|
if len(ary) > 0 {
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\t\t\t%s = new bool[]{", field.formatName))
|
|||
|
for _, v := range ary {
|
|||
|
buf.WriteString(fmt.Sprintf("%v, ", v.(bool)))
|
|||
|
}
|
|||
|
buf.WriteString("},\n")
|
|||
|
}
|
|||
|
case "int":
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\t\t\t%s = %v,\n", field.formatName, fieldValue.(int)))
|
|||
|
case "int[]":
|
|||
|
var ary = fieldValue.([]interface{})
|
|||
|
if len(ary) > 0 {
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\t\t\t%s = new int[]{", field.formatName))
|
|||
|
for _, v := range ary {
|
|||
|
buf.WriteString(fmt.Sprintf("%v, ", v.(int)))
|
|||
|
}
|
|||
|
buf.WriteString("},\n")
|
|||
|
}
|
|||
|
case "float":
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\t\t\t%s = %vf,\n", field.formatName, fieldValue.(float64)))
|
|||
|
case "float[]":
|
|||
|
var ary = fieldValue.([]interface{})
|
|||
|
if len(ary) > 0 {
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\t\t\t%s = new float[]{", field.formatName))
|
|||
|
for _, v := range ary {
|
|||
|
buf.WriteString(fmt.Sprintf("%vf, ", v.(float64)))
|
|||
|
}
|
|||
|
buf.WriteString("},\n")
|
|||
|
}
|
|||
|
case "double":
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\t\t\t%s = %v,\n", field.formatName, fieldValue.(float64)))
|
|||
|
case "double[]":
|
|||
|
var ary = fieldValue.([]interface{})
|
|||
|
if len(ary) > 0 {
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\t\t\t%s = new double[]{", field.formatName))
|
|||
|
for _, v := range ary {
|
|||
|
buf.WriteString(fmt.Sprintf("%v, ", v.(float64)))
|
|||
|
}
|
|||
|
buf.WriteString("},\n")
|
|||
|
}
|
|||
|
case "string":
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\t\t\t%s = \"%v\",\n", field.formatName, fieldValue.(string)))
|
|||
|
case "string[]":
|
|||
|
var ary = fieldValue.([]interface{})
|
|||
|
if len(ary) > 0 {
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\t\t\t%s = new string[]{", field.formatName))
|
|||
|
for _, v := range ary {
|
|||
|
buf.WriteString(fmt.Sprintf("\"%v\", ", v.(string)))
|
|||
|
}
|
|||
|
buf.WriteString("},\n")
|
|||
|
}
|
|||
|
case "Vector3":
|
|||
|
var vv = fieldValue.([]interface{})
|
|||
|
switch len(vv) {
|
|||
|
case 2:
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\t\t\t%s = new Vector3(%vf, %vf, 0f),\n", field.formatName, vv[0].(float64), vv[1].(float64)))
|
|||
|
case 3:
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\t\t\t%s = new Vector3(%vf, %vf, %vf),\n", field.formatName, vv[0].(float64), vv[1].(float64), vv[2].(float64)))
|
|||
|
default:
|
|||
|
return errors.Errorf("the value of field '%s' invalid for Vector3, id: %v", field.name, idValue)
|
|||
|
}
|
|||
|
case "Vector3[]":
|
|||
|
var ary = fieldValue.([]interface{})
|
|||
|
if len(ary) > 0 {
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\t\t\t%s = new Vector3[]{", field.formatName))
|
|||
|
for _, av := range ary {
|
|||
|
if av == nil {
|
|||
|
continue
|
|||
|
}
|
|||
|
var vv = av.([]interface{})
|
|||
|
if len(vv) > 0 {
|
|||
|
switch len(vv) {
|
|||
|
case 2:
|
|||
|
buf.WriteString(fmt.Sprintf("new Vector3(%vf, %vf, 0f), ", vv[0].(float64), vv[1].(float64)))
|
|||
|
case 3:
|
|||
|
buf.WriteString(fmt.Sprintf("new Vector3(%vf, %vf, %vf), ", vv[0].(float64), vv[1].(float64), vv[2].(float64)))
|
|||
|
default:
|
|||
|
return errors.Errorf("the value of field '%s' invalid for Vector3, id: %v", field.name, idValue)
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
buf.WriteString("},\n")
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
buf.WriteString("\t\t\t\t},\n")
|
|||
|
}
|
|||
|
buf.WriteString("\t\t\t};\n")
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\tM = new Dictionary<int, %s>();\n", structName))
|
|||
|
buf.WriteString("\t\t\tforeach (var d in L)\n")
|
|||
|
buf.WriteString("\t\t\t{\n")
|
|||
|
buf.WriteString(fmt.Sprintf("\t\t\t\tM[d.%s] = d;\n", idField.formatName))
|
|||
|
buf.WriteString("\t\t\t}\n")
|
|||
|
buf.WriteString("\t\t}\n")
|
|||
|
buf.WriteString("\t}\n")
|
|||
|
buf.WriteString("}\n")
|
|||
|
|
|||
|
return os.WriteFile(filepath.Join(csDir, filename), buf.Bytes(), 0666)
|
|||
|
}
|
|||
|
|
|||
|
func readFiles(inDir string, dir os.DirEntry) ([]string, error) {
|
|||
|
var files []string
|
|||
|
|
|||
|
var dirPath = filepath.Join(inDir, dir.Name())
|
|||
|
|
|||
|
dirs, err := os.ReadDir(dirPath)
|
|||
|
if err != nil {
|
|||
|
return nil, err
|
|||
|
}
|
|||
|
|
|||
|
for _, sub := range dirs {
|
|||
|
if sub.IsDir() {
|
|||
|
subFiles, err := readFiles(dirPath, sub)
|
|||
|
if err != nil {
|
|||
|
return nil, err
|
|||
|
}
|
|||
|
|
|||
|
files = append(files, subFiles...)
|
|||
|
} else {
|
|||
|
if path.Ext(sub.Name()) != ".xlsx" {
|
|||
|
continue
|
|||
|
}
|
|||
|
if sub.Name()[0] == '.' || sub.Name()[0] == '~' {
|
|||
|
continue
|
|||
|
}
|
|||
|
files = append(files, filepath.Join(dir.Name(), sub.Name()))
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
return files, nil
|
|||
|
}
|
|||
|
|
|||
|
func exportFiles(inDir string, files []string) (*exportResult, error) {
|
|||
|
var result exportResult
|
|||
|
|
|||
|
for _, file := range files {
|
|||
|
var filename = filepath.Join(inDir, file)
|
|||
|
log.DefaultLogger.Infof("Reading %s...", filename)
|
|||
|
f, err := excelize.OpenFile(filename)
|
|||
|
if err != nil {
|
|||
|
return nil, err
|
|||
|
}
|
|||
|
|
|||
|
if f.SheetCount == 0 {
|
|||
|
return nil, errors.Errorf("empty document %s", filename)
|
|||
|
}
|
|||
|
|
|||
|
for i := 0; i < f.SheetCount; i++ {
|
|||
|
var sheetName = f.GetSheetName(i)
|
|||
|
if strings.Contains(sheetName, "help") {
|
|||
|
continue
|
|||
|
}
|
|||
|
err = readSheet(f, sheetName, &result)
|
|||
|
if err != nil {
|
|||
|
return nil, err
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
_ = f.Close()
|
|||
|
}
|
|||
|
return &result, nil
|
|||
|
}
|
|||
|
|
|||
|
func checkHeadStruct(types, fields, tags []string, codeObjects map[string]*codeField) error {
|
|||
|
for i, field := range fields {
|
|||
|
if !strings.Contains(tags[i], "server") && !strings.Contains(tags[i], "client") {
|
|||
|
continue
|
|||
|
}
|
|||
|
|
|||
|
ff, ok := codeObjects[strings.TrimSpace(field)]
|
|||
|
if !ok {
|
|||
|
return errors.Errorf("inconsistent structures of multiple sheets for the same configuration table, unexpected field '%s'", field)
|
|||
|
}
|
|||
|
|
|||
|
if strings.TrimSpace(types[i]) != ff.sourceType {
|
|||
|
return errors.Errorf("inconsistent type of the field '%s' in multiple sheets", field)
|
|||
|
}
|
|||
|
|
|||
|
if tags[i] != ff.tags {
|
|||
|
return errors.Errorf("inconsistent tags of the field '%s' in multiple sheets", field)
|
|||
|
}
|
|||
|
}
|
|||
|
return nil
|
|||
|
}
|
|||
|
|
|||
|
func getColumn(ci int) string {
|
|||
|
var col, _ = excelize.ColumnNumberToName(ci + 1)
|
|||
|
return col
|
|||
|
}
|
|||
|
|
|||
|
func readSheet(excelFile *excelize.File, sheetName string, result *exportResult) error {
|
|||
|
rows, err := excelFile.GetRows(sheetName)
|
|||
|
if err != nil {
|
|||
|
return err
|
|||
|
}
|
|||
|
|
|||
|
if len(rows) == 0 {
|
|||
|
return nil
|
|||
|
}
|
|||
|
|
|||
|
if len(rows) < 4 {
|
|||
|
return errors.Errorf("sheet '%s' format error, the rows are less 4", sheetName)
|
|||
|
}
|
|||
|
|
|||
|
var descs = rows[0]
|
|||
|
var types = rows[1]
|
|||
|
var fields = rows[2]
|
|||
|
var tags = rows[3]
|
|||
|
|
|||
|
if len(types) != len(fields) && len(tags) != len(fields) {
|
|||
|
return errors.Errorf("sheet '%s' format error, inconsistent length of types, fields, and tags", sheetName)
|
|||
|
}
|
|||
|
|
|||
|
if result.codeFields != nil {
|
|||
|
err = checkHeadStruct(types, fields, tags, result.codeFields)
|
|||
|
if err != nil {
|
|||
|
return errors.Errorf("sheet '%s' format error, %v", sheetName, err)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
rows = rows[4:]
|
|||
|
for ri, row := range rows {
|
|||
|
var object = make(map[string]interface{})
|
|||
|
for fi, field := range fields {
|
|||
|
if !strings.Contains(tags[fi], "server") && !strings.Contains(tags[fi], "client") {
|
|||
|
continue
|
|||
|
}
|
|||
|
|
|||
|
var fieldName = strings.TrimSpace(field)
|
|||
|
var fieldType = strings.TrimSpace(types[fi])
|
|||
|
|
|||
|
if fieldName == "" || fieldType == "" {
|
|||
|
continue
|
|||
|
}
|
|||
|
|
|||
|
var srcType = strings.TrimSpace(fieldType)
|
|||
|
goType, ok := goTypeMap[srcType]
|
|||
|
if !ok {
|
|||
|
return errors.Errorf("sheet '%s' row %d column %s can not found go type", sheetName, ri+5, getColumn(fi))
|
|||
|
}
|
|||
|
|
|||
|
csType, ok := csTypeMap[srcType]
|
|||
|
if !ok {
|
|||
|
return errors.Errorf("sheet '%s' row %d column %s can not found cs type", sheetName, ri+5, getColumn(fi))
|
|||
|
}
|
|||
|
|
|||
|
var rowVal = ""
|
|||
|
if fi < len(row) {
|
|||
|
rowVal = row[fi]
|
|||
|
}
|
|||
|
|
|||
|
value, err := readValue(rowVal, srcType)
|
|||
|
if err != nil {
|
|||
|
return errors.Errorf("sheet '%s' row %d column %s read value failed, %v", sheetName, ri+5, getColumn(fi), err)
|
|||
|
}
|
|||
|
|
|||
|
if !isDefaultValue(value) {
|
|||
|
object[fieldName] = value
|
|||
|
}
|
|||
|
|
|||
|
if result.codeFields == nil {
|
|||
|
result.codeFields = map[string]*codeField{}
|
|||
|
}
|
|||
|
|
|||
|
if result.codeFields[fieldName] == nil {
|
|||
|
result.codeFields[fieldName] = &codeField{
|
|||
|
sort: uint32(fi),
|
|||
|
name: fieldName,
|
|||
|
tags: tags[fi],
|
|||
|
desc: descs[fi],
|
|||
|
sourceType: fieldType,
|
|||
|
golangType: goType,
|
|||
|
csharpType: csType,
|
|||
|
formatName: util.UpperCamelCase(fieldName),
|
|||
|
}
|
|||
|
}
|
|||
|
}
|
|||
|
if len(object) > 0 {
|
|||
|
result.jsonObjects = append(result.jsonObjects, object)
|
|||
|
}
|
|||
|
}
|
|||
|
return nil
|
|||
|
}
|
|||
|
|
|||
|
func readValue(value, valType string) (interface{}, error) {
|
|||
|
return parseValue(value, valType)
|
|||
|
}
|
|||
|
|
|||
|
func isDefaultValue(value interface{}) bool {
|
|||
|
switch v := value.(type) {
|
|||
|
case bool:
|
|||
|
return !v
|
|||
|
case int, int64, float32, float64:
|
|||
|
return v == 0
|
|||
|
case string:
|
|||
|
default:
|
|||
|
return value == nil
|
|||
|
}
|
|||
|
return false
|
|||
|
}
|
|||
|
|
|||
|
func parseValue(value, valType string) (interface{}, error) {
|
|||
|
switch valType {
|
|||
|
case "bool":
|
|||
|
return boolParser(value)
|
|||
|
case "bool[]":
|
|||
|
return splitValue(value, ",", boolParser)
|
|||
|
case "int":
|
|||
|
return intParser(value)
|
|||
|
case "int[]":
|
|||
|
return splitValue(value, ",", intParser)
|
|||
|
case "float":
|
|||
|
return doubleParser(value)
|
|||
|
case "float[]":
|
|||
|
return splitValue(value, ",", doubleParser)
|
|||
|
case "double":
|
|||
|
return doubleParser(value)
|
|||
|
case "double[]":
|
|||
|
return splitValue(value, ",", doubleParser)
|
|||
|
case "string":
|
|||
|
return stringParser(value)
|
|||
|
case "string[]":
|
|||
|
return splitValue(value, ",", stringParser)
|
|||
|
case "date":
|
|||
|
return dateParser(value)
|
|||
|
case "date[]":
|
|||
|
return splitValue(value, ",", dateParser)
|
|||
|
case "time":
|
|||
|
return timeParser(value)
|
|||
|
case "time[]":
|
|||
|
return splitValue(value, ",", timeParser)
|
|||
|
case "datetime":
|
|||
|
return datetimeParser(value)
|
|||
|
case "datetime[]":
|
|||
|
return splitValue(value, ",", datetimeParser)
|
|||
|
case "Vector3":
|
|||
|
return vector3Parser(value)
|
|||
|
case "Vector3[]":
|
|||
|
return splitValue(value, "|", vector3Parser)
|
|||
|
default:
|
|||
|
return stringParser(value)
|
|||
|
}
|
|||
|
}
|
|||
|
|
|||
|
func splitValue(value, sep string, parser func(string) (interface{}, error)) (interface{}, error) {
|
|||
|
if value == "" {
|
|||
|
return nil, nil
|
|||
|
}
|
|||
|
|
|||
|
var values []interface{}
|
|||
|
var items = strings.Split(value, sep)
|
|||
|
for _, item := range items {
|
|||
|
parsed, err := parser(item)
|
|||
|
if err != nil {
|
|||
|
return nil, err
|
|||
|
}
|
|||
|
if parsed != nil {
|
|||
|
values = append(values, parsed)
|
|||
|
}
|
|||
|
}
|
|||
|
if len(values) == 0 {
|
|||
|
return nil, nil
|
|||
|
}
|
|||
|
return values, nil
|
|||
|
}
|
|||
|
|
|||
|
func boolParser(item string) (interface{}, error) {
|
|||
|
item = strings.TrimSpace(item)
|
|||
|
if item == "" {
|
|||
|
return false, nil
|
|||
|
}
|
|||
|
return strconv.ParseBool(item)
|
|||
|
}
|
|||
|
func intParser(item string) (interface{}, error) {
|
|||
|
item = strings.TrimLeft(strings.TrimSpace(item), "0")
|
|||
|
if item == "" {
|
|||
|
return 0, nil
|
|||
|
}
|
|||
|
return strconv.Atoi(item)
|
|||
|
}
|
|||
|
|
|||
|
func doubleParser(item string) (interface{}, error) {
|
|||
|
item = strings.TrimLeft(strings.TrimSpace(item), "0")
|
|||
|
if item == "" {
|
|||
|
return float64(0), nil
|
|||
|
}
|
|||
|
if item[0] == '.' {
|
|||
|
if len(item) == 1 {
|
|||
|
return float64(0), nil
|
|||
|
} else {
|
|||
|
item = "0" + item
|
|||
|
}
|
|||
|
}
|
|||
|
return strconv.ParseFloat(item, 64)
|
|||
|
}
|
|||
|
|
|||
|
func stringParser(item string) (interface{}, error) {
|
|||
|
return strings.TrimSpace(item), nil
|
|||
|
}
|
|||
|
|
|||
|
func dateParser(item string) (interface{}, error) {
|
|||
|
if item == "" {
|
|||
|
return int64(0), nil
|
|||
|
}
|
|||
|
|
|||
|
t, err := time.ParseInLocation(time.DateOnly, item, util.UTC8())
|
|||
|
if err != nil {
|
|||
|
return nil, err
|
|||
|
}
|
|||
|
|
|||
|
return t.Unix(), nil
|
|||
|
}
|
|||
|
|
|||
|
func timeParser(item string) (interface{}, error) {
|
|||
|
if item == "" {
|
|||
|
return int64(0), nil
|
|||
|
}
|
|||
|
|
|||
|
t, err := time.ParseInLocation(time.TimeOnly, item, util.UTC8())
|
|||
|
if err != nil {
|
|||
|
return nil, err
|
|||
|
}
|
|||
|
|
|||
|
return int64(3600*t.Hour() + 60*t.Minute()*t.Second()), nil
|
|||
|
}
|
|||
|
|
|||
|
func datetimeParser(item string) (interface{}, error) {
|
|||
|
if item == "" {
|
|||
|
return int64(0), nil
|
|||
|
}
|
|||
|
|
|||
|
t, err := time.ParseInLocation(time.DateTime, item, util.UTC8())
|
|||
|
if err != nil {
|
|||
|
return nil, err
|
|||
|
}
|
|||
|
|
|||
|
return t.Unix(), nil
|
|||
|
}
|
|||
|
|
|||
|
func vector3Parser(item string) (interface{}, error) {
|
|||
|
if item == "" {
|
|||
|
return nil, nil
|
|||
|
}
|
|||
|
|
|||
|
vectorValue, err := splitValue(item, ",", doubleParser)
|
|||
|
if err != nil {
|
|||
|
return nil, err
|
|||
|
}
|
|||
|
|
|||
|
var ary = vectorValue.([]interface{})
|
|||
|
for i := len(ary); i < 3; i++ {
|
|||
|
ary = append(ary, 0)
|
|||
|
}
|
|||
|
|
|||
|
return vectorValue, nil
|
|||
|
}
|