From 415d51808c567885f82e0b877b9856226b0b56ad Mon Sep 17 00:00:00 2001 From: lbbniu Date: Fri, 16 Jun 2023 13:48:34 +0800 Subject: [PATCH 1/2] perf: refactor tars2go --- tars/protocol/res/Makefile | 9 +- tars/tools/tars2go/ast/ast.go | 243 +++++ tars/tools/tars2go/{ => gencode}/gen_go.go | 462 ++++------ tars/tools/tars2go/{lex.go => lexer/lexer.go} | 223 +---- tars/tools/tars2go/main.go | 55 +- tars/tools/tars2go/options/options.go | 79 ++ tars/tools/tars2go/parse.go | 865 ------------------ tars/tools/tars2go/parse/parse.go | 704 ++++++++++++++ tars/tools/tars2go/token/token.go | 143 +++ tars/tools/tars2go/utils/utils.go | 29 + tars/tools/tars2go/version.go | 4 - tars/tools/tars2go/version/version.go | 4 + 12 files changed, 1430 insertions(+), 1390 deletions(-) create mode 100644 tars/tools/tars2go/ast/ast.go rename tars/tools/tars2go/{ => gencode}/gen_go.go (77%) rename tars/tools/tars2go/{lex.go => lexer/lexer.go} (54%) mode change 100755 => 100644 create mode 100644 tars/tools/tars2go/options/options.go delete mode 100755 tars/tools/tars2go/parse.go create mode 100644 tars/tools/tars2go/parse/parse.go create mode 100644 tars/tools/tars2go/token/token.go create mode 100644 tars/tools/tars2go/utils/utils.go delete mode 100644 tars/tools/tars2go/version.go create mode 100644 tars/tools/tars2go/version/version.go diff --git a/tars/protocol/res/Makefile b/tars/protocol/res/Makefile index 71e54214..004f7352 100644 --- a/tars/protocol/res/Makefile +++ b/tars/protocol/res/Makefile @@ -1,9 +1,2 @@ -OS=$(shell uname -s) all: - tars2go -without-trace=true -add-servant=false -tarsPath github.com/TarsCloud/TarsGo/tars *.tars -ifeq ($(OS),Darwin) - sed -i '' 's|"endpointf"|"github.com/TarsCloud/TarsGo/tars/protocol/res/endpointf"|g' queryf/QueryF.tars.go -else - sed -i 's|"endpointf"|"github.com/TarsCloud/TarsGo/tars/protocol/res/endpointf"|g' queryf/QueryF.tars.go -endif - #override file + tars2go -without-trace=true -add-servant=false -tarsPath github.com/TarsCloud/TarsGo/tars -module github.com/TarsCloud/TarsGo/tars/protocol/res *.tars diff --git a/tars/tools/tars2go/ast/ast.go b/tars/tools/tars2go/ast/ast.go new file mode 100644 index 00000000..be145d24 --- /dev/null +++ b/tars/tools/tars2go/ast/ast.go @@ -0,0 +1,243 @@ +package ast + +import ( + "errors" + "strings" + + "github.com/TarsCloud/TarsGo/tars/tools/tars2go/token" + "github.com/TarsCloud/TarsGo/tars/tools/tars2go/utils" +) + +// VarType contains variable type(token) +type VarType struct { + Type token.Type // basic type + Unsigned bool // whether unsigned + TypeSt string // custom type name, such as an enumerated struct,at this time Type=token.Name + CType token.Type // make sure which type of custom type is,token.Enum, token.Struct + TypeK *VarType // vector's member variable,the key of map + TypeV *VarType // the value of map + TypeL int64 // length of array +} + +// StructMember member struct. +type StructMember struct { + Tag int32 + Require bool + Type *VarType + Key string // after the uppercase converted key + OriginKey string // original key + Default string + DefType token.Type +} + +// StructMemberSorter When serializing, make sure the tags are ordered. +type StructMemberSorter []StructMember + +func (a StructMemberSorter) Len() int { return len(a) } +func (a StructMemberSorter) Swap(i, j int) { a[i], a[j] = a[j], a[i] } +func (a StructMemberSorter) Less(i, j int) bool { return a[i].Tag < a[j].Tag } + +// StructInfo record struct information. +type StructInfo struct { + Name string + OriginName string //original name + Mb []StructMember + DependModule map[string]bool + DependModuleWithJce map[string]string +} + +// ArgInfo record argument information. +type ArgInfo struct { + Name string + OriginName string //original name + IsOut bool + Type *VarType +} + +// FunInfo record function information. +type FunInfo struct { + Name string // after the uppercase converted name + OriginName string // original name + HasRet bool + RetType *VarType + Args []ArgInfo +} + +// InterfaceInfo record interface information. +type InterfaceInfo struct { + Name string + OriginName string // original name + Fun []FunInfo + DependModule map[string]bool + DependModuleWithJce map[string]string +} + +// EnumMember record member information. +type EnumMember struct { + Key string + Type int + Value int32 //type 0 + Name string //type 1 +} + +// EnumInfo record EnumMember information include name. +type EnumInfo struct { + Module string + Name string + OriginName string // original name + Mb []EnumMember +} + +// ConstInfo record const information. +type ConstInfo struct { + Type *VarType + Name string + OriginName string // original name + Value string +} + +// HashKeyInfo record hash key information. +type HashKeyInfo struct { + Name string + Member []string +} + +type ModuleInfo struct { + Source string + // proto file name(not include .tars) + ProtoName string + Name string + OriginName string + Include []string + + Struct []StructInfo + HashKey []HashKeyInfo + Enum []EnumInfo + Const []ConstInfo + Interface []InterfaceInfo + + // have parsed include file + IncModule []*ModuleInfo +} + +// Rename module +func (p *ModuleInfo) Rename(moduleUpper bool) { + p.OriginName = p.Name + if moduleUpper { + p.Name = utils.UpperFirstLetter(p.Name) + } +} + +// FindTNameType Looking for the true type of user-defined identifier +func (p *ModuleInfo) FindTNameType(tname string) (token.Type, string, string) { + for _, v := range p.Struct { + if p.Name+"::"+v.Name == tname { + return token.Struct, p.Name, p.ProtoName + } + } + + for _, v := range p.Enum { + if p.Name+"::"+v.Name == tname { + return token.Enum, p.Name, p.ProtoName + } + } + + for _, pInc := range p.IncModule { + ret, mod, protoName := pInc.FindTNameType(tname) + if ret != token.Name { + return ret, mod, protoName + } + } + // not find + return token.Name, p.Name, p.ProtoName +} + +func (p *ModuleInfo) FindEnumName(ename string, moduleCycle bool) (*EnumMember, *EnumInfo, error) { + if strings.Contains(ename, "::") { + vec := strings.Split(ename, "::") + if len(vec) >= 2 { + ename = vec[1] + } + } + var cmb *EnumMember + var cenum *EnumInfo + for ek, enum := range p.Enum { + for mk, mb := range enum.Mb { + if mb.Key != ename { + continue + } + if cmb == nil { + cmb = &enum.Mb[mk] + cenum = &p.Enum[ek] + } else { + return nil, nil, errors.New(ename + " name conflict [" + cenum.Name + "::" + cmb.Key + " or " + enum.Name + "::" + mb.Key) + } + } + } + var err error + for _, pInc := range p.IncModule { + if cmb == nil { + cmb, cenum, err = pInc.FindEnumName(ename, moduleCycle) + if err != nil { + return cmb, cenum, err + } + } else { + break + } + } + if cenum != nil && cenum.Module == "" { + if moduleCycle { + cenum.Module = p.ProtoName + "_" + p.Name + } else { + cenum.Module = p.Name + } + } + return cmb, cenum, nil +} + +// Rename struct +// struct Name { 1 require Mb type} +func (st *StructInfo) Rename() { + st.OriginName = st.Name + st.Name = utils.UpperFirstLetter(st.Name) + for i := range st.Mb { + st.Mb[i].OriginKey = st.Mb[i].Key + st.Mb[i].Key = utils.UpperFirstLetter(st.Mb[i].Key) + } +} + +// Rename interface +// interface Name { Fun } +func (itf *InterfaceInfo) Rename() { + itf.OriginName = itf.Name + itf.Name = utils.UpperFirstLetter(itf.Name) + for i := range itf.Fun { + itf.Fun[i].Rename() + } +} + +func (en *EnumInfo) Rename() { + en.OriginName = en.Name + en.Name = utils.UpperFirstLetter(en.Name) + for i := range en.Mb { + en.Mb[i].Key = utils.UpperFirstLetter(en.Mb[i].Key) + } +} + +func (cst *ConstInfo) Rename() { + cst.OriginName = cst.Name + cst.Name = utils.UpperFirstLetter(cst.Name) +} + +// Rename func +// type Fun (arg ArgType), in case keyword and name conflicts,argname need to capitalize. +// Fun (type int32) +func (fun *FunInfo) Rename() { + fun.OriginName = fun.Name + fun.Name = utils.UpperFirstLetter(fun.Name) + for i := range fun.Args { + fun.Args[i].OriginName = fun.Args[i].Name + // func args donot upper firs + //fun.Args[i].Name = utils.UpperFirstLetter(fun.Args[i].Name) + } +} diff --git a/tars/tools/tars2go/gen_go.go b/tars/tools/tars2go/gencode/gen_go.go similarity index 77% rename from tars/tools/tars2go/gen_go.go rename to tars/tools/tars2go/gencode/gen_go.go index f96e2016..d896dc5b 100755 --- a/tars/tools/tars2go/gen_go.go +++ b/tars/tools/tars2go/gencode/gen_go.go @@ -1,8 +1,7 @@ -package main +package gencode import ( "bytes" - "flag" "fmt" "go/format" "io/ioutil" @@ -11,74 +10,40 @@ import ( "runtime" "strconv" "strings" + "sync" + + "github.com/TarsCloud/TarsGo/tars/tools/tars2go/ast" + "github.com/TarsCloud/TarsGo/tars/tools/tars2go/options" + "github.com/TarsCloud/TarsGo/tars/tools/tars2go/parse" + "github.com/TarsCloud/TarsGo/tars/tools/tars2go/token" + "github.com/TarsCloud/TarsGo/tars/tools/tars2go/utils" + "github.com/TarsCloud/TarsGo/tars/tools/tars2go/version" ) -var gE = flag.Bool("E", false, "Generate code before fmt for troubleshooting") -var gAddServant = flag.Bool("add-servant", true, "Generate AddServant function") -var gModuleCycle = flag.Bool("module-cycle", false, "support jce module cycle include(do not support jce file cycle include)") -var gModuleUpper = flag.Bool("module-upper", false, "native module names are supported, otherwise the system will upper the first letter of the module name") -var gJsonOmitEmpty = flag.Bool("json-omitempty", false, "Generate json omitempty support") -var dispatchReporter = flag.Bool("dispatch-reporter", false, "Dispatch reporter support") -var debug = flag.Bool("debug", false, "enable debug mode") - -var gFileMap map[string]bool - -func init() { - gFileMap = make(map[string]bool) -} +var ( + fileMap sync.Map +) // GenGo record go code information. type GenGo struct { - I []string // imports with path + opt *options.Options code bytes.Buffer vc int // var count. Used to generate unique variable names - path string - tarsPath string - module string + filepath string prefix string - p *Parse + module *ast.ModuleInfo // proto file name(not include .tars) ProtoName string } -// NewGenGo build up a new path -func NewGenGo(path string, module string, outdir string) *GenGo { - if outdir != "" { - b := []byte(outdir) - last := b[len(b)-1:] - if string(last) != "/" { - outdir += "/" - } - } - - return &GenGo{path: path, module: module, prefix: outdir, ProtoName: path2ProtoName(path)} -} - -func path2ProtoName(path string) string { - iBegin := strings.LastIndex(path, "/") - if iBegin == -1 || iBegin >= len(path)-1 { - iBegin = 0 - } else { - iBegin++ - } - iEnd := strings.LastIndex(path, ".tars") - if iEnd == -1 { - iEnd = len(path) +// NewGenGo build up a new filepath +func NewGenGo(opt *options.Options, filepath string) *GenGo { + if opt.Outdir != "" && !strings.HasSuffix(opt.Outdir, "/") { + opt.Outdir += "/" } - return path[iBegin:iEnd] -} - -// Initial capitalization -func upperFirstLetter(s string) string { - if len(s) == 0 { - return "" - } - if len(s) == 1 { - return strings.ToUpper(string(s[0])) - } - return strings.ToUpper(string(s[0])) + s[1:] + return &GenGo{opt: opt, filepath: filepath, prefix: opt.Outdir, ProtoName: utils.Path2ProtoName(filepath)} } func getShortTypeName(src string) string { @@ -105,66 +70,6 @@ func genForHead(vc string) string { return ` for ` + i + `,` + e + ` := int32(0), length;` + i + `<` + e + `;` + i + `++ ` } -// === rename area === -// 0. rename module -func (p *Parse) rename() { - p.OriginModule = p.Module - if *gModuleUpper { - p.Module = upperFirstLetter(p.Module) - } -} - -// 1. struct rename -// struct Name { 1 require Mb type} -func (st *StructInfo) rename() { - st.OriginName = st.Name - st.Name = upperFirstLetter(st.Name) - for i := range st.Mb { - st.Mb[i].OriginKey = st.Mb[i].Key - st.Mb[i].Key = upperFirstLetter(st.Mb[i].Key) - } -} - -// 1. interface rename -// interface Name { Fun } -func (itf *InterfaceInfo) rename() { - itf.OriginName = itf.Name - itf.Name = upperFirstLetter(itf.Name) - for i := range itf.Fun { - itf.Fun[i].rename() - } -} - -func (en *EnumInfo) rename() { - en.OriginName = en.Name - en.Name = upperFirstLetter(en.Name) - for i := range en.Mb { - en.Mb[i].Key = upperFirstLetter(en.Mb[i].Key) - } -} - -func (cst *ConstInfo) rename() { - cst.OriginName = cst.Name - cst.Name = upperFirstLetter(cst.Name) -} - -// 2. func rename -// type Fun (arg ArgType), in case keyword and name conflicts,argname need to capitalize. -// Fun (type int32) -func (fun *FunInfo) rename() { - fun.OriginName = fun.Name - fun.Name = upperFirstLetter(fun.Name) - for i := range fun.Args { - fun.Args[i].OriginName = fun.Args[i].Name - // func args donot upper firs - //fun.Args[i].Name = upperFirstLetter(fun.Args[i].Name) - } -} - -// 3. genType rename all Type - -// === rename end === - // Gen to parse file. func (gen *GenGo) Gen() { defer func() { @@ -175,38 +80,38 @@ func (gen *GenGo) Gen() { } }() - gen.p = ParseFile(gen.path, make([]string, 0)) + gen.module = parse.NewParse(gen.opt, gen.filepath, make([]string, 0)) gen.genAll() } func (gen *GenGo) genAll() { - if gFileMap[gen.path] { + if _, ok := fileMap.Load(gen.filepath); ok { // already compiled return } - gFileMap[gen.path] = true + fileMap.Store(gen.filepath, struct{}{}) - gen.p.rename() - gen.genInclude(gen.p.IncParse) + gen.module.Rename(gen.opt.ModuleUpper) + gen.genInclude(gen.module.IncModule) gen.code.Reset() gen.genHead() gen.genPackage() - for _, v := range gen.p.Enum { + for _, v := range gen.module.Enum { gen.genEnum(&v) } - gen.genConst(gen.p.Const) + gen.genConst(gen.module.Const) - for _, v := range gen.p.Struct { + for _, v := range gen.module.Struct { gen.genStruct(&v) } - if len(gen.p.Enum) > 0 || len(gen.p.Const) > 0 || len(gen.p.Struct) > 0 { - gen.saveToSourceFile(path2ProtoName(gen.path) + ".go") + if len(gen.module.Enum) > 0 || len(gen.module.Const) > 0 || len(gen.module.Struct) > 0 { + gen.saveToSourceFile(utils.Path2ProtoName(gen.filepath) + ".go") } - for _, v := range gen.p.Interface { + for _, v := range gen.module.Interface { gen.genInterface(&v) } } @@ -220,10 +125,10 @@ func (gen *GenGo) saveToSourceFile(filename string) { var err error prefix := gen.prefix - if !*gE { + if !gen.opt.E { beauty, err = format.Source(gen.code.Bytes()) if err != nil { - if *debug { + if gen.opt.Debug { fmt.Println("------------------") fmt.Println(string(gen.code.Bytes())) fmt.Println("------------------") @@ -238,10 +143,10 @@ func (gen *GenGo) saveToSourceFile(filename string) { fmt.Println(string(beauty)) } else { var mkPath string - if *gModuleCycle == true { - mkPath = prefix + gen.ProtoName + "/" + gen.p.Module + if gen.opt.ModuleCycle { + mkPath = prefix + gen.ProtoName + "/" + gen.module.Name } else { - mkPath = prefix + gen.p.Module + mkPath = prefix + gen.module.Name } err = os.MkdirAll(mkPath, 0766) @@ -264,24 +169,24 @@ func (gen *GenGo) genVariableName(prefix, name string) string { } func (gen *GenGo) genHead() { - gen.code.WriteString(`// Package ` + gen.p.Module + ` comment -// This file was generated by tars2go ` + VERSION + ` -// Generated from ` + filepath.Base(gen.path) + ` + gen.code.WriteString(`// Package ` + gen.module.Name + ` comment +// This file was generated by tars2go ` + version.VERSION + ` +// Generated from ` + filepath.Base(gen.filepath) + ` `) } func (gen *GenGo) genPackage() { - gen.code.WriteString("package " + gen.p.Module + "\n\n") + gen.code.WriteString("package " + gen.module.Name + "\n\n") gen.code.WriteString(` import ( "fmt" `) - gen.code.WriteString(`"` + gen.tarsPath + "/protocol/codec\"\n") + gen.code.WriteString(`"` + gen.opt.TarsPath + "/protocol/codec\"\n") mImports := make(map[string]bool) - for _, st := range gen.p.Struct { - if *gModuleCycle == true { + for _, st := range gen.module.Struct { + if gen.opt.ModuleCycle { for k, v := range st.DependModuleWithJce { gen.genStructImport(k, v, mImports) } @@ -308,7 +213,7 @@ func (gen *GenGo) genStructImport(module string, protoName string, mImports map[ var moduleStr string var jcePath string var moduleAlia string - if *gModuleCycle == true { + if gen.opt.ModuleCycle { moduleStr = module[len(protoName)+1:] jcePath = protoName + "/" moduleAlia = module + " " @@ -316,15 +221,15 @@ func (gen *GenGo) genStructImport(module string, protoName string, mImports map[ moduleStr = module } - for _, p := range gen.I { + for _, p := range gen.opt.Imports { if strings.HasSuffix(p, "/"+moduleStr) { mImports[`"`+p+`"`] = true return } } - if *gModuleUpper { - moduleAlia = upperFirstLetter(moduleAlia) + if gen.opt.ModuleUpper { + moduleAlia = utils.UpperFirstLetter(moduleAlia) } // example: @@ -337,8 +242,8 @@ func (gen *GenGo) genStructImport(module string, protoName string, mImports map[ // MyApp // TarsTest/MyApp var modulePath string - if gen.module != "" { - mf := filepath.Clean(filepath.Join(gen.module, gen.prefix)) + if gen.opt.Module != "" { + mf := filepath.Clean(filepath.Join(gen.opt.Module, gen.prefix)) if runtime.GOOS == "windows" { mf = strings.ReplaceAll(mf, string(os.PathSeparator), string('/')) } @@ -349,8 +254,8 @@ func (gen *GenGo) genStructImport(module string, protoName string, mImports map[ mImports[moduleAlia+`"`+modulePath+`"`] = true } -func (gen *GenGo) genIFPackage(itf *InterfaceInfo) { - gen.code.WriteString("package " + gen.p.Module + "\n\n") +func (gen *GenGo) genIFPackage(itf *ast.InterfaceInfo) { + gen.code.WriteString("package " + gen.module.Name + "\n\n") gen.code.WriteString(` import ( "bytes" @@ -359,23 +264,24 @@ import ( "unsafe" "encoding/json" `) - if *gAddServant { - gen.code.WriteString(`"` + gen.tarsPath + "\"\n") + tarsPath := gen.opt.TarsPath + if gen.opt.AddServant { + gen.code.WriteString(`"` + tarsPath + "\"\n") } - gen.code.WriteString(`"` + gen.tarsPath + "/protocol/res/requestf\"\n") - gen.code.WriteString(`m "` + gen.tarsPath + "/model\"\n") - gen.code.WriteString(`"` + gen.tarsPath + "/protocol/codec\"\n") - gen.code.WriteString(`"` + gen.tarsPath + "/protocol/tup\"\n") - gen.code.WriteString(`"` + gen.tarsPath + "/protocol/res/basef\"\n") - gen.code.WriteString(`"` + gen.tarsPath + "/util/tools\"\n") - gen.code.WriteString(`"` + gen.tarsPath + "/util/endpoint\"\n") - gen.code.WriteString(`"` + gen.tarsPath + "/util/current\"\n") - if !withoutTrace { - gen.code.WriteString("tarstrace \"" + gen.tarsPath + "/util/trace\"\n") + gen.code.WriteString(`"` + tarsPath + "/protocol/res/requestf\"\n") + gen.code.WriteString(`m "` + tarsPath + "/model\"\n") + gen.code.WriteString(`"` + tarsPath + "/protocol/codec\"\n") + gen.code.WriteString(`"` + tarsPath + "/protocol/tup\"\n") + gen.code.WriteString(`"` + tarsPath + "/protocol/res/basef\"\n") + gen.code.WriteString(`"` + tarsPath + "/util/tools\"\n") + gen.code.WriteString(`"` + tarsPath + "/util/endpoint\"\n") + gen.code.WriteString(`"` + tarsPath + "/util/current\"\n") + if !gen.opt.WithoutTrace { + gen.code.WriteString("tarstrace \"" + tarsPath + "/util/trace\"\n") } - if *gModuleCycle == true { + if gen.opt.ModuleCycle == true { for k, v := range itf.DependModuleWithJce { gen.genIFImport(k, v) } @@ -400,22 +306,22 @@ func (gen *GenGo) genIFImport(module string, protoName string) { var moduleStr string var jcePath string var moduleAlia string - if *gModuleCycle == true { + if gen.opt.ModuleCycle { moduleStr = module[len(protoName)+1:] jcePath = protoName + "/" moduleAlia = module + " " } else { moduleStr = module } - for _, p := range gen.I { + for _, p := range gen.opt.Imports { if strings.HasSuffix(p, "/"+moduleStr) { gen.code.WriteString(`"` + p + `"` + "\n") return } } - if *gModuleUpper { - moduleAlia = upperFirstLetter(moduleAlia) + if gen.opt.ModuleUpper { + moduleAlia = utils.UpperFirstLetter(moduleAlia) } // example: @@ -428,8 +334,8 @@ func (gen *GenGo) genIFImport(module string, protoName string) { // MyApp // TarsTest/MyApp var modulePath string - if gen.module != "" { - mf := filepath.Clean(filepath.Join(gen.module, gen.prefix)) + if gen.opt.Module != "" { + mf := filepath.Clean(filepath.Join(gen.opt.Module, gen.prefix)) if runtime.GOOS == "windows" { mf = strings.ReplaceAll(mf, string(os.PathSeparator), string('/')) } @@ -440,73 +346,73 @@ func (gen *GenGo) genIFImport(module string, protoName string) { gen.code.WriteString(moduleAlia + `"` + modulePath + `"` + "\n") } -func (gen *GenGo) genType(ty *VarType) string { +func (gen *GenGo) genType(ty *ast.VarType) string { ret := "" switch ty.Type { - case tkTBool: + case token.TBool: ret = "bool" - case tkTInt: + case token.TInt: if ty.Unsigned { ret = "uint32" } else { ret = "int32" } - case tkTShort: + case token.TShort: if ty.Unsigned { ret = "uint16" } else { ret = "int16" } - case tkTByte: + case token.TByte: if ty.Unsigned { ret = "uint8" } else { ret = "int8" } - case tkTLong: + case token.TLong: if ty.Unsigned { ret = "uint64" } else { ret = "int64" } - case tkTFloat: + case token.TFloat: ret = "float32" - case tkTDouble: + case token.TDouble: ret = "float64" - case tkTString: + case token.TString: ret = "string" - case tkTVector: + case token.TVector: ret = "[]" + gen.genType(ty.TypeK) - case tkTMap: + case token.TMap: ret = "map[" + gen.genType(ty.TypeK) + "]" + gen.genType(ty.TypeV) - case tkName: + case token.Name: ret = strings.Replace(ty.TypeSt, "::", ".", -1) vec := strings.Split(ty.TypeSt, "::") for i := range vec { - if *gModuleUpper { - vec[i] = upperFirstLetter(vec[i]) + if gen.opt.ModuleUpper { + vec[i] = utils.UpperFirstLetter(vec[i]) } else { if i == (len(vec) - 1) { - vec[i] = upperFirstLetter(vec[i]) + vec[i] = utils.UpperFirstLetter(vec[i]) } } } ret = strings.Join(vec, ".") - case tkTArray: + case token.TArray: ret = "[" + fmt.Sprintf("%v", ty.TypeL) + "]" + gen.genType(ty.TypeK) default: - gen.genErr("Unknown Type " + TokenMap[ty.Type]) + gen.genErr("Unknown Type " + token.Value(ty.Type)) } return ret } -func (gen *GenGo) genStructDefine(st *StructInfo) { +func (gen *GenGo) genStructDefine(st *ast.StructInfo) { c := &gen.code c.WriteString("// " + st.Name + " struct implement\n") c.WriteString("type " + st.Name + " struct {\n") for _, v := range st.Mb { - if *gJsonOmitEmpty { + if gen.opt.JsonOmitEmpty { c.WriteString("\t" + v.Key + " " + gen.genType(v.Type) + " `json:\"" + v.OriginKey + ",omitempty\"`\n") } else { c.WriteString("\t" + v.Key + " " + gen.genType(v.Type) + " `json:\"" + v.OriginKey + "\"`\n") @@ -515,13 +421,13 @@ func (gen *GenGo) genStructDefine(st *StructInfo) { c.WriteString("}\n") } -func (gen *GenGo) genFunResetDefault(st *StructInfo) { +func (gen *GenGo) genFunResetDefault(st *ast.StructInfo) { c := &gen.code c.WriteString("func (st *" + st.Name + ") ResetDefault() {\n") for _, v := range st.Mb { - if v.Type.CType == tkStruct { + if v.Type.CType == token.Struct { c.WriteString("st." + v.Key + ".ResetDefault()\n") } if v.Default == "" { @@ -532,7 +438,7 @@ func (gen *GenGo) genFunResetDefault(st *StructInfo) { c.WriteString("}\n") } -func (gen *GenGo) genWriteSimpleList(mb *StructMember, prefix string, hasRet bool) { +func (gen *GenGo) genWriteSimpleList(mb *ast.StructMember, prefix string, hasRet bool) { c := &gen.code tag := strconv.Itoa(int(mb.Tag)) unsign := "Int8" @@ -552,11 +458,11 @@ err = buf.WriteSlice` + unsign + `(` + gen.genVariableName(prefix, mb.Key) + `) `) } -func (gen *GenGo) genWriteVector(mb *StructMember, prefix string, hasRet bool) { +func (gen *GenGo) genWriteVector(mb *ast.StructMember, prefix string, hasRet bool) { c := &gen.code // SimpleList - if mb.Type.TypeK.Type == tkTByte && !mb.Type.TypeK.Unsigned { + if mb.Type.TypeK.Type == token.TByte && !mb.Type.TypeK.Unsigned { gen.genWriteSimpleList(mb, prefix, hasRet) return } @@ -573,7 +479,7 @@ for _, v := range ` + gen.genVariableName(prefix, mb.Key) + ` { `) // for _, v := range can nesting for _, v := range,does not conflict, support multidimensional arrays - dummy := &StructMember{} + dummy := &ast.StructMember{} dummy.Type = mb.Type.TypeK dummy.Key = "v" gen.genWriteVar(dummy, "", hasRet) @@ -581,11 +487,11 @@ for _, v := range ` + gen.genVariableName(prefix, mb.Key) + ` { c.WriteString("}\n") } -func (gen *GenGo) genWriteArray(mb *StructMember, prefix string, hasRet bool) { +func (gen *GenGo) genWriteArray(mb *ast.StructMember, prefix string, hasRet bool) { c := &gen.code // SimpleList - if mb.Type.TypeK.Type == tkTByte && !mb.Type.TypeK.Unsigned { + if mb.Type.TypeK.Type == token.TByte && !mb.Type.TypeK.Unsigned { gen.genWriteSimpleList(mb, prefix, hasRet) return } @@ -602,7 +508,7 @@ for _, v := range ` + gen.genVariableName(prefix, mb.Key) + ` { `) // for _, v := range can nesting for _, v := range,does not conflict, support multidimensional arrays - dummy := &StructMember{} + dummy := &ast.StructMember{} dummy.Type = mb.Type.TypeK dummy.Key = "v" gen.genWriteVar(dummy, "", hasRet) @@ -610,7 +516,7 @@ for _, v := range ` + gen.genVariableName(prefix, mb.Key) + ` { c.WriteString("}\n") } -func (gen *GenGo) genWriteStruct(mb *StructMember, prefix string, hasRet bool) { +func (gen *GenGo) genWriteStruct(mb *ast.StructMember, prefix string, hasRet bool) { c := &gen.code tag := strconv.Itoa(int(mb.Tag)) c.WriteString(` @@ -619,7 +525,7 @@ err = ` + prefix + mb.Key + `.WriteBlock(buf, ` + tag + `) `) } -func (gen *GenGo) genWriteMap(mb *StructMember, prefix string, hasRet bool) { +func (gen *GenGo) genWriteMap(mb *ast.StructMember, prefix string, hasRet bool) { c := &gen.code tag := strconv.Itoa(int(mb.Tag)) vc := strconv.Itoa(gen.vc) @@ -634,12 +540,12 @@ for k` + vc + `, v` + vc + ` := range ` + gen.genVariableName(prefix, mb.Key) + `) // for _, v := range can nesting for _, v := range,does not conflict, support multidimensional arrays - dummy := &StructMember{} + dummy := &ast.StructMember{} dummy.Type = mb.Type.TypeK dummy.Key = "k" + vc gen.genWriteVar(dummy, "", hasRet) - dummy = &StructMember{} + dummy = &ast.StructMember{} dummy.Type = mb.Type.TypeV dummy.Key = "v" + vc dummy.Tag = 1 @@ -648,18 +554,18 @@ for k` + vc + `, v` + vc + ` := range ` + gen.genVariableName(prefix, mb.Key) + c.WriteString("}\n") } -func (gen *GenGo) genWriteVar(v *StructMember, prefix string, hasRet bool) { +func (gen *GenGo) genWriteVar(v *ast.StructMember, prefix string, hasRet bool) { c := &gen.code switch v.Type.Type { - case tkTVector: + case token.TVector: gen.genWriteVector(v, prefix, hasRet) - case tkTArray: + case token.TArray: gen.genWriteArray(v, prefix, hasRet) - case tkTMap: + case token.TMap: gen.genWriteMap(v, prefix, hasRet) - case tkName: - if v.Type.CType == tkEnum { + case token.Name: + if v.Type.CType == token.Enum { // tkEnum enumeration processing tag := strconv.Itoa(int(v.Tag)) c.WriteString(` @@ -672,13 +578,13 @@ err = buf.WriteInt32(int32(` + gen.genVariableName(prefix, v.Key) + `),` + tag + default: tag := strconv.Itoa(int(v.Tag)) c.WriteString(` -err = buf.Write` + upperFirstLetter(gen.genType(v.Type)) + `(` + gen.genVariableName(prefix, v.Key) + `, ` + tag + `) +err = buf.Write` + utils.UpperFirstLetter(gen.genType(v.Type)) + `(` + gen.genVariableName(prefix, v.Key) + `, ` + tag + `) ` + errString(hasRet) + ` `) } } -func (gen *GenGo) genFunWriteBlock(st *StructInfo) { +func (gen *GenGo) genFunWriteBlock(st *ast.StructInfo) { c := &gen.code // WriteBlock function head @@ -704,7 +610,7 @@ func (st *` + st.Name + `) WriteBlock(buf *codec.Buffer, tag byte) error { `) } -func (gen *GenGo) genFunWriteTo(st *StructInfo) { +func (gen *GenGo) genFunWriteTo(st *ast.StructInfo) { c := &gen.code c.WriteString(`// WriteTo encode struct to buffer @@ -720,7 +626,7 @@ func (st *` + st.Name + `) WriteTo(buf *codec.Buffer) (err error) { `) } -func (gen *GenGo) genReadSimpleList(mb *StructMember, prefix string, hasRet bool) { +func (gen *GenGo) genReadSimpleList(mb *ast.StructMember, prefix string, hasRet bool) { c := &gen.code unsign := "Int8" if mb.Type.TypeK.Unsigned { @@ -738,7 +644,7 @@ err = readBuf.ReadSlice` + unsign + `(&` + prefix + mb.Key + `, length, true) `) } -func (gen *GenGo) genReadVector(mb *StructMember, prefix string, hasRet bool) { +func (gen *GenGo) genReadVector(mb *ast.StructMember, prefix string, hasRet bool) { c := &gen.code errStr := errString(hasRet) @@ -770,7 +676,7 @@ if ty == codec.LIST { ` + genForHead(vc) + `{ `) - dummy := &StructMember{} + dummy := &ast.StructMember{} dummy.Type = mb.Type.TypeK dummy.Key = mb.Key + "[i" + vc + "]" gen.genReadVar(dummy, prefix, hasRet) @@ -778,7 +684,7 @@ if ty == codec.LIST { c.WriteString(`} } else if ty == codec.SimpleList { `) - if mb.Type.TypeK.Type == tkTByte { + if mb.Type.TypeK.Type == token.TByte { gen.genReadSimpleList(mb, prefix, hasRet) } else { c.WriteString(`err = fmt.Errorf("not support SimpleList type") @@ -792,7 +698,7 @@ if ty == codec.LIST { `) } -func (gen *GenGo) genReadArray(mb *StructMember, prefix string, hasRet bool) { +func (gen *GenGo) genReadArray(mb *ast.StructMember, prefix string, hasRet bool) { c := &gen.code errStr := errString(hasRet) @@ -824,7 +730,7 @@ if ty == codec.LIST { ` + genForHead(vc) + `{ `) - dummy := &StructMember{} + dummy := &ast.StructMember{} dummy.Type = mb.Type.TypeK dummy.Key = mb.Key + "[i" + vc + "]" gen.genReadVar(dummy, prefix, hasRet) @@ -832,7 +738,7 @@ if ty == codec.LIST { c.WriteString(`} } else if ty == codec.SimpleList { `) - if mb.Type.TypeK.Type == tkTByte { + if mb.Type.TypeK.Type == token.TByte { gen.genReadSimpleList(mb, prefix, hasRet) } else { c.WriteString(`err = fmt.Errorf("not support SimpleList type") @@ -846,7 +752,7 @@ if ty == codec.LIST { `) } -func (gen *GenGo) genReadStruct(mb *StructMember, prefix string, hasRet bool) { +func (gen *GenGo) genReadStruct(mb *ast.StructMember, prefix string, hasRet bool) { c := &gen.code tag := strconv.Itoa(int(mb.Tag)) require := "false" @@ -859,7 +765,7 @@ err = ` + prefix + mb.Key + `.ReadBlock(readBuf, ` + tag + `, ` + require + `) `) } -func (gen *GenGo) genReadMap(mb *StructMember, prefix string, hasRet bool) { +func (gen *GenGo) genReadMap(mb *ast.StructMember, prefix string, hasRet bool) { c := &gen.code tag := strconv.Itoa(int(mb.Tag)) errStr := errString(hasRet) @@ -891,12 +797,12 @@ err = readBuf.ReadInt32(&length, 0, true) var v` + vc + ` ` + gen.genType(mb.Type.TypeV) + ` `) - dummy := &StructMember{} + dummy := &ast.StructMember{} dummy.Type = mb.Type.TypeK dummy.Key = "k" + vc gen.genReadVar(dummy, "", hasRet) - dummy = &StructMember{} + dummy = &ast.StructMember{} dummy.Type = mb.Type.TypeV dummy.Key = "v" + vc dummy.Tag = 1 @@ -908,18 +814,18 @@ err = readBuf.ReadInt32(&length, 0, true) `) } -func (gen *GenGo) genReadVar(v *StructMember, prefix string, hasRet bool) { +func (gen *GenGo) genReadVar(v *ast.StructMember, prefix string, hasRet bool) { c := &gen.code switch v.Type.Type { - case tkTVector: + case token.TVector: gen.genReadVector(v, prefix, hasRet) - case tkTArray: + case token.TArray: gen.genReadArray(v, prefix, hasRet) - case tkTMap: + case token.TMap: gen.genReadMap(v, prefix, hasRet) - case tkName: - if v.Type.CType == tkEnum { + case token.Name: + if v.Type.CType == token.Enum { require := "false" if v.Require { require = "true" @@ -939,13 +845,13 @@ err = readBuf.ReadInt32((*int32)(&` + prefix + v.Key + `),` + tag + `, ` + requi } tag := strconv.Itoa(int(v.Tag)) c.WriteString(` -err = readBuf.Read` + upperFirstLetter(gen.genType(v.Type)) + `(&` + prefix + v.Key + `, ` + tag + `, ` + require + `) +err = readBuf.Read` + utils.UpperFirstLetter(gen.genType(v.Type)) + `(&` + prefix + v.Key + `, ` + tag + `, ` + require + `) ` + errString(hasRet) + ` `) } } -func (gen *GenGo) genFunReadFrom(st *StructInfo) { +func (gen *GenGo) genFunReadFrom(st *ast.StructInfo) { c := &gen.code c.WriteString(`// ReadFrom reads from readBuf and put into struct. @@ -974,7 +880,7 @@ func (st *` + st.Name + `) ReadFrom(readBuf *codec.Reader) error { `) } -func (gen *GenGo) genFunReadBlock(st *StructInfo) { +func (gen *GenGo) genFunReadBlock(st *ast.StructInfo) { c := &gen.code c.WriteString(`// ReadBlock reads struct from the given tag , require or optional. @@ -1011,9 +917,9 @@ func (st *` + st.Name + `) ReadBlock(readBuf *codec.Reader, tag byte, require bo `) } -func (gen *GenGo) genStruct(st *StructInfo) { +func (gen *GenGo) genStruct(st *ast.StructInfo) { gen.vc = 0 - st.rename() + st.Rename() gen.genStructDefine(st) gen.genFunResetDefault(st) @@ -1025,16 +931,16 @@ func (gen *GenGo) genStruct(st *StructInfo) { gen.genFunWriteBlock(st) } -func (gen *GenGo) makeEnumName(en *EnumInfo, mb *EnumMember) string { - return upperFirstLetter(en.Name) + "_" + upperFirstLetter(mb.Key) +func (gen *GenGo) makeEnumName(en *ast.EnumInfo, mb *ast.EnumMember) string { + return utils.UpperFirstLetter(en.Name) + "_" + utils.UpperFirstLetter(mb.Key) } -func (gen *GenGo) genEnum(en *EnumInfo) { +func (gen *GenGo) genEnum(en *ast.EnumInfo) { if len(en.Mb) == 0 { return } - en.rename() + en.Rename() c := &gen.code c.WriteString("type " + en.Name + " int32\n") @@ -1073,7 +979,7 @@ func (gen *GenGo) genEnum(en *EnumInfo) { c.WriteString(")\n") } -func (gen *GenGo) genConst(cst []ConstInfo) { +func (gen *GenGo) genConst(cst []ast.ConstInfo) { if len(cst) == 0 { return } @@ -1082,31 +988,25 @@ func (gen *GenGo) genConst(cst []ConstInfo) { c.WriteString("//const as define in tars file\n") c.WriteString("const (\n") - for _, v := range gen.p.Const { - v.rename() + for _, v := range gen.module.Const { + v.Rename() c.WriteString(v.Name + " " + gen.genType(v.Type) + " = " + v.Value + "\n") } c.WriteString(")\n") } -func (gen *GenGo) genInclude(ps []*Parse) { - for _, v := range ps { - gen2 := &GenGo{ - path: v.Source, - module: gen.module, - prefix: gen.prefix, - tarsPath: gTarsPath, - ProtoName: path2ProtoName(v.Source), - } - gen2.p = v - gen2.genAll() +func (gen *GenGo) genInclude(modules []*ast.ModuleInfo) { + for _, module := range modules { + genModule := NewGenGo(gen.opt, module.Name+module.Source) + genModule.module = module + genModule.genAll() } } -func (gen *GenGo) genInterface(itf *InterfaceInfo) { +func (gen *GenGo) genInterface(itf *ast.InterfaceInfo) { gen.code.Reset() - itf.rename() + itf.Rename() gen.genHead() gen.genIFPackage(itf) @@ -1121,7 +1021,7 @@ func (gen *GenGo) genInterface(itf *InterfaceInfo) { gen.saveToSourceFile(itf.Name + ".tars.go") } -func (gen *GenGo) genIFProxy(itf *InterfaceInfo) { +func (gen *GenGo) genIFProxy(itf *ast.InterfaceInfo) { c := &gen.code c.WriteString("// " + itf.Name + " struct\n") c.WriteString("type " + itf.Name + ` struct { @@ -1152,7 +1052,7 @@ func (obj *` + itf.Name + `) Endpoints() []*endpoint.Endpoint { } `) - if *gAddServant { + if gen.opt.AddServant { c.WriteString(`// AddServant adds servant for the service. func (obj *` + itf.Name + `) AddServant(imp ` + itf.Name + `Servant, servantObj string) { tars.AddServant(obj, imp, servantObj) @@ -1172,7 +1072,7 @@ func (obj *` + itf.Name + `) AddServantWithContext(imp ` + itf.Name + `ServantWi } } -func (gen *GenGo) genIFProxyFun(interfName string, fun *FunInfo, withContext bool, isOneWay bool) { +func (gen *GenGo) genIFProxyFun(interfName string, fun *ast.FunInfo, withContext bool, isOneWay bool) { c := &gen.code if withContext { if isOneWay { @@ -1227,7 +1127,7 @@ func (gen *GenGo) genIFProxyFun(interfName string, fun *FunInfo, withContext boo if v.IsOut { isOut = true } - dummy := &StructMember{} + dummy := &ast.StructMember{} dummy.Type = v.Type dummy.Key = v.Name dummy.Tag = int32(k + 1) @@ -1241,7 +1141,7 @@ func (gen *GenGo) genIFProxyFun(interfName string, fun *FunInfo, withContext boo errStr := errString(fun.HasRet) // trace - if !isOneWay && !withoutTrace { + if !isOneWay && !gen.opt.WithoutTrace { c.WriteString(` trace, ok := current.GetTarsTrace(tarsCtx) if ok && trace.Call() { @@ -1294,7 +1194,7 @@ tarsResp := new(requestf.ResponsePacket)`) c.WriteString("readBuf := codec.NewReader(tools.Int8ToByte(tarsResp.SBuffer))") } if fun.HasRet && !isOneWay { - dummy := &StructMember{} + dummy := &ast.StructMember{} dummy.Type = fun.RetType dummy.Key = "ret" dummy.Tag = 0 @@ -1305,7 +1205,7 @@ tarsResp := new(requestf.ResponsePacket)`) if !isOneWay { for k, v := range fun.Args { if v.IsOut { - dummy := &StructMember{} + dummy := &ast.StructMember{} dummy.Type = v.Type dummy.Key = "(*" + v.Name + ")" dummy.Tag = int32(k + 1) @@ -1313,7 +1213,7 @@ tarsResp := new(requestf.ResponsePacket)`) gen.genReadVar(dummy, "", fun.HasRet) } } - if withContext && !withoutTrace { + if withContext && !gen.opt.WithoutTrace { traceParamFlag := "traceParamFlag := trace.NeedTraceParam(tarstrace.EstCR, uint(0))" if isOut || fun.HasRet { traceParamFlag = "traceParamFlag := trace.NeedTraceParam(tarstrace.EstCR, uint(readBuf.Len()))" @@ -1383,17 +1283,17 @@ if ok && trace.Call() { c.WriteString("}\n") } -func (gen *GenGo) genArgs(arg *ArgInfo) { +func (gen *GenGo) genArgs(arg *ast.ArgInfo) { c := &gen.code c.WriteString(arg.Name + " ") - if arg.IsOut || arg.Type.CType == tkStruct { + if arg.IsOut || arg.Type.CType == token.Struct { c.WriteString("*") } c.WriteString(gen.genType(arg.Type) + ",") } -func (gen *GenGo) genIFServer(itf *InterfaceInfo) { +func (gen *GenGo) genIFServer(itf *ast.InterfaceInfo) { c := &gen.code c.WriteString("type " + itf.Name + "Servant interface {\n") for _, v := range itf.Fun { @@ -1402,7 +1302,7 @@ func (gen *GenGo) genIFServer(itf *InterfaceInfo) { c.WriteString("}\n") } -func (gen *GenGo) genIFServerWithContext(itf *InterfaceInfo) { +func (gen *GenGo) genIFServerWithContext(itf *ast.InterfaceInfo) { c := &gen.code c.WriteString("type " + itf.Name + "ServantWithContext interface {\n") for _, v := range itf.Fun { @@ -1411,7 +1311,7 @@ func (gen *GenGo) genIFServerWithContext(itf *InterfaceInfo) { c.WriteString("} \n") } -func (gen *GenGo) genIFServerFun(fun *FunInfo) { +func (gen *GenGo) genIFServerFun(fun *ast.FunInfo) { c := &gen.code c.WriteString(fun.Name + "(") for _, v := range fun.Args { @@ -1425,7 +1325,7 @@ func (gen *GenGo) genIFServerFun(fun *FunInfo) { c.WriteString("err error)\n") } -func (gen *GenGo) genIFServerFunWithContext(fun *FunInfo) { +func (gen *GenGo) genIFServerFunWithContext(fun *ast.FunInfo) { c := &gen.code c.WriteString(fun.Name + "(tarsCtx context.Context, ") for _, v := range fun.Args { @@ -1439,7 +1339,7 @@ func (gen *GenGo) genIFServerFunWithContext(fun *FunInfo) { c.WriteString("err error)\n") } -func (gen *GenGo) genIFDispatch(itf *InterfaceInfo) { +func (gen *GenGo) genIFDispatch(itf *ast.InterfaceInfo) { c := &gen.code c.WriteString("// Dispatch is used to call the server side implement for the method defined in the tars file. withContext shows using context or not. \n") c.WriteString("func(obj *" + itf.Name + `) Dispatch(tarsCtx context.Context, val interface{}, tarsReq *requestf.RequestPacket, tarsResp *requestf.ResponsePacket, withContext bool) (err error) { @@ -1506,7 +1406,7 @@ func (gen *GenGo) genIFDispatch(itf *InterfaceInfo) { `) } -func (gen *GenGo) genSwitchCase(tname string, fun *FunInfo) { +func (gen *GenGo) genSwitchCase(tname string, fun *ast.FunInfo) { c := &gen.code c.WriteString(`case "` + fun.OriginName + `":` + "\n") @@ -1514,9 +1414,9 @@ func (gen *GenGo) genSwitchCase(tname string, fun *FunInfo) { outArgsCount := 0 for _, v := range fun.Args { c.WriteString("var " + v.Name + " " + gen.genType(v.Type) + "\n") - if v.Type.Type == tkTMap { + if v.Type.Type == token.TMap { c.WriteString(v.Name + " = make(" + gen.genType(v.Type) + ")\n") - } else if v.Type.Type == tkTVector { + } else if v.Type.Type == token.TVector { c.WriteString(v.Name + " = make(" + gen.genType(v.Type) + ", 0)\n") } if v.IsOut { @@ -1536,7 +1436,7 @@ func (gen *GenGo) genSwitchCase(tname string, fun *FunInfo) { for k, v := range fun.Args { if !v.IsOut { - dummy := &StructMember{} + dummy := &ast.StructMember{} dummy.Type = v.Type dummy.Key = v.Name dummy.Tag = int32(k + 1) @@ -1558,7 +1458,7 @@ func (gen *GenGo) genSwitchCase(tname string, fun *FunInfo) { c.WriteString(`reqTup.GetBuffer("` + v.Name + `", &tupBuffer)` + "\n") c.WriteString("readBuf.Reset(tupBuffer)") - dummy := &StructMember{} + dummy := &ast.StructMember{} dummy.Type = v.Type dummy.Key = v.Name dummy.Tag = 0 @@ -1581,7 +1481,7 @@ func (gen *GenGo) genSwitchCase(tname string, fun *FunInfo) { if !v.IsOut { c.WriteString("{\n") c.WriteString(`jsonStr, _ := json.Marshal(jsonData["` + v.Name + `"])` + "\n") - if v.Type.CType == tkStruct { + if v.Type.CType == token.Struct { c.WriteString(v.Name + ".ResetDefault()\n") } c.WriteString("if err = json.Unmarshal(jsonStr, &" + v.Name + "); err != nil {") @@ -1601,7 +1501,7 @@ func (gen *GenGo) genSwitchCase(tname string, fun *FunInfo) { c.WriteString("\n\n") } - if !withoutTrace { + if !gen.opt.WithoutTrace { c.WriteString(` trace, ok := current.GetTarsTrace(tarsCtx) if ok && trace.Call() { @@ -1634,7 +1534,7 @@ if ok && trace.Call() { imp := val.(` + tname + `Servant) funRet, err = imp.` + fun.Name + `(`) for _, v := range fun.Args { - if v.IsOut || v.Type.CType == tkStruct { + if v.IsOut || v.Type.CType == token.Struct { c.WriteString("&" + v.Name + ",") } else { c.WriteString(v.Name + ",") @@ -1647,7 +1547,7 @@ if ok && trace.Call() { imp := val.(` + tname + `ServantWithContext) funRet, err = imp.` + fun.Name + `(tarsCtx ,`) for _, v := range fun.Args { - if v.IsOut || v.Type.CType == tkStruct { + if v.IsOut || v.Type.CType == token.Struct { c.WriteString("&" + v.Name + ",") } else { c.WriteString(v.Name + ",") @@ -1660,7 +1560,7 @@ if ok && trace.Call() { imp := val.(` + tname + `Servant) err = imp.` + fun.Name + `(`) for _, v := range fun.Args { - if v.IsOut || v.Type.CType == tkStruct { + if v.IsOut || v.Type.CType == token.Struct { c.WriteString("&" + v.Name + ",") } else { c.WriteString(v.Name + ",") @@ -1673,7 +1573,7 @@ if ok && trace.Call() { imp := val.(` + tname + `ServantWithContext) err = imp.` + fun.Name + `(tarsCtx ,`) for _, v := range fun.Args { - if v.IsOut || v.Type.CType == tkStruct { + if v.IsOut || v.Type.CType == token.Struct { c.WriteString("&" + v.Name + ",") } else { c.WriteString(v.Name + ",") @@ -1682,7 +1582,7 @@ if ok && trace.Call() { c.WriteString(") \n}\n") } - if *dispatchReporter { + if gen.opt.DispatchReporter { var inArgStr, outArgStr, retArgStr string if fun.HasRet { retArgStr = "funRet, err" @@ -1691,7 +1591,7 @@ if ok && trace.Call() { } for _, v := range fun.Args { prefix := "" - if v.Type.CType == tkStruct { + if v.Type.CType == token.Struct { prefix = "&" } if v.IsOut { @@ -1717,7 +1617,7 @@ if ok && trace.Call() { `) if fun.HasRet { - dummy := &StructMember{} + dummy := &ast.StructMember{} dummy.Type = fun.RetType dummy.Key = "funRet" dummy.Tag = 0 @@ -1727,7 +1627,7 @@ if ok && trace.Call() { for k, v := range fun.Args { if v.IsOut { - dummy := &StructMember{} + dummy := &ast.StructMember{} dummy.Type = v.Type dummy.Key = v.Name dummy.Tag = int32(k + 1) @@ -1741,7 +1641,7 @@ if ok && trace.Call() { rspTup := tup.NewUniAttribute() `) if fun.HasRet { - dummy := &StructMember{} + dummy := &ast.StructMember{} dummy.Type = fun.RetType dummy.Key = "funRet" dummy.Tag = 0 @@ -1758,7 +1658,7 @@ rspTup := tup.NewUniAttribute() if v.IsOut { c.WriteString(` buf.Reset()`) - dummy := &StructMember{} + dummy := &ast.StructMember{} dummy.Type = v.Type dummy.Key = v.Name dummy.Tag = 0 @@ -1802,7 +1702,7 @@ rspTup := tup.NewUniAttribute() }`) c.WriteString("\n") - if !withoutTrace { + if !gen.opt.WithoutTrace { c.WriteString(` if ok && trace.Call() { var traceParam string diff --git a/tars/tools/tars2go/lex.go b/tars/tools/tars2go/lexer/lexer.go old mode 100755 new mode 100644 similarity index 54% rename from tars/tools/tars2go/lex.go rename to tars/tools/tars2go/lexer/lexer.go index ffa5a1bd..f5032cb0 --- a/tars/tools/tars2go/lex.go +++ b/tars/tools/tars2go/lexer/lexer.go @@ -1,145 +1,18 @@ -package main +package lexer import ( "bytes" "strconv" "strings" -) - -// EOS is byte stream terminator -const EOS = 0 - -// TK is a byte type. -type TK byte - -const ( - tkEos TK = iota - tkBraceLeft //({) - tkBraceRight //} - tkSemi //; - tkEq //= - tkShl //< - tkShr //> - tkComma //, - tkPtl //( - tkPtr //) - tkSquareLeft //[ - tkSquarerRight //] - tkInclude //#include - - tkDummyKeywordBegin - // keyword - tkModule - tkEnum - tkStruct - tkInterface - tkRequire - tkOptional - tkConst - tkUnsigned - tkVoid - tkOut - tkKey - tkTrue - tkFalse - tkDummyKeywordEnd - - tkDummyTypeBegin - // type - tkTInt - tkTBool - tkTShort - tkTByte - tkTLong - tkTFloat - tkTDouble - tkTString - tkTVector - tkTMap - tkTArray - tkDummyTypeEnd - tkName // variable name - // value - tkString - tkInteger - tkFloat + "github.com/TarsCloud/TarsGo/tars/tools/tars2go/token" ) -// TokenMap record token value. -var TokenMap = [...]string{ - tkEos: "", - - tkBraceLeft: "{", - tkBraceRight: "}", - tkSemi: ";", - tkEq: "=", - tkShl: "<", - tkShr: ">", - tkComma: ",", - tkPtl: "(", - tkPtr: ")", - tkSquareLeft: "[", - tkSquarerRight: "]", - tkInclude: "#include", - - // keyword - tkModule: "module", - tkEnum: "enum", - tkStruct: "struct", - tkInterface: "interface", - tkRequire: "require", - tkOptional: "optional", - tkConst: "const", - tkUnsigned: "unsigned", - tkVoid: "void", - tkOut: "out", - tkKey: "key", - tkTrue: "true", - tkFalse: "false", - - // type - tkTInt: "int", - tkTBool: "bool", - tkTShort: "short", - tkTByte: "byte", - tkTLong: "long", - tkTFloat: "float", - tkTDouble: "double", - tkTString: "string", - tkTVector: "vector", - tkTMap: "map", - tkTArray: "array", - - tkName: "", - // value - tkString: "", - tkInteger: "", - tkFloat: "", -} - -// SemInfo is struct. -type SemInfo struct { - I int64 - F float64 - S string -} - -// Token record token information. -type Token struct { - T TK - S *SemInfo - Line int -} - // LexState record lexical state. type LexState struct { current byte lineNumber int - //t Token - //lookahead Token - tokenBuff bytes.Buffer buff *bytes.Buffer @@ -162,19 +35,6 @@ func isLetter(b byte) bool { return (b >= 'a' && b <= 'z') || (b >= 'A' && b <= 'Z') || b == '_' } -func isType(t TK) bool { - return t > tkDummyTypeBegin && t < tkDummyTypeEnd -} - -func isNumberType(t TK) bool { - switch t { - case tkTInt, tkTBool, tkTShort, tkTByte, tkTLong, tkTFloat, tkTDouble: - return true - default: - return false - } -} - func (ls *LexState) lexErr(err string) { line := strconv.Itoa(ls.lineNumber) panic(ls.source + ": " + line + ". " + err) @@ -189,10 +49,10 @@ func (ls *LexState) incLine() { ls.lineNumber++ } -func (ls *LexState) readNumber() (TK, *SemInfo) { +func (ls *LexState) readNumber() (token.Type, *token.SemInfo) { hasDot := false isHex := false - sem := &SemInfo{} + sem := &token.SemInfo{} for isNumber(ls.current) || ls.current == '.' || ls.current == 'x' || ls.current == 'X' || (isHex && isHexNumber(ls.current)) { @@ -211,18 +71,18 @@ func (ls *LexState) readNumber() (TK, *SemInfo) { ls.lexErr(err.Error()) } sem.F = f - return tkFloat, sem + return token.Float, sem } i, err := strconv.ParseInt(sem.S, 0, 64) if err != nil { ls.lexErr(err.Error()) } sem.I = i - return tkInteger, sem + return token.Integer, sem } -func (ls *LexState) readIdent() (TK, *SemInfo) { - sem := &SemInfo{} +func (ls *LexState) readIdent() (token.Type, *token.SemInfo) { + sem := &token.SemInfo{} var last byte // :: Point number processing namespace @@ -244,21 +104,21 @@ func (ls *LexState) readIdent() (TK, *SemInfo) { } } - for i := tkDummyKeywordBegin + 1; i < tkDummyKeywordEnd; i++ { - if TokenMap[i] == sem.S { + for i := token.DummyKeywordBegin + 1; i < token.DummyKeywordEnd; i++ { + if token.Value(i) == sem.S { return i, nil } } - for i := tkDummyTypeBegin + 1; i < tkDummyTypeEnd; i++ { - if TokenMap[i] == sem.S { + for i := token.DummyTypeBegin + 1; i < token.DummyTypeEnd; i++ { + if token.Value(i) == sem.S { return i, nil } } - return tkName, sem + return token.Name, sem } -func (ls *LexState) readSharp() (TK, *SemInfo) { +func (ls *LexState) readSharp() (token.Type, *token.SemInfo) { ls.next() for isLetter(ls.current) { ls.tokenBuff.WriteByte(ls.current) @@ -268,15 +128,14 @@ func (ls *LexState) readSharp() (TK, *SemInfo) { ls.lexErr("not #include") } - return tkInclude, nil + return token.Include, nil } -func (ls *LexState) readString() (TK, *SemInfo) { - - sem := &SemInfo{} +func (ls *LexState) readString() (token.Type, *token.SemInfo) { + sem := &token.SemInfo{} ls.next() for { - if ls.current == EOS { + if ls.current == token.EOF { ls.lexErr(`no match "`) } else if ls.current == '"' { ls.next() @@ -288,20 +147,20 @@ func (ls *LexState) readString() (TK, *SemInfo) { } sem.S = ls.tokenBuff.String() - return tkString, sem + return token.String, sem } func (ls *LexState) readLongComment() { for { switch ls.current { - case EOS: + case token.EOF: ls.lexErr("respect */") return case '\n', '\r': ls.incLine() case '*': ls.next() - if ls.current == EOS { + if ls.current == token.EOF { return } else if ls.current == '/' { ls.next() @@ -317,11 +176,11 @@ func (ls *LexState) next() { var err error ls.current, err = ls.buff.ReadByte() if err != nil { - ls.current = EOS + ls.current = token.EOF } } -func (ls *LexState) llexDefault() (TK, *SemInfo) { +func (ls *LexState) llexDefault() (token.Type, *token.SemInfo) { switch { case isNumber(ls.current): return ls.readNumber() @@ -334,12 +193,12 @@ func (ls *LexState) llexDefault() (TK, *SemInfo) { } // Do lexical analysis. -func (ls *LexState) llex() (TK, *SemInfo) { +func (ls *LexState) lLex() (token.Type, *token.SemInfo) { for { ls.tokenBuff.Reset() switch ls.current { - case EOS: - return tkEos, nil + case token.EOF: + return token.Eof, nil case ' ', '\t', '\f', '\v': ls.next() case '\n', '\r': @@ -347,7 +206,7 @@ func (ls *LexState) llex() (TK, *SemInfo) { case '/': // Comment processing ls.next() if ls.current == '/' { - for !isNewLine(ls.current) && ls.current != EOS { + for !isNewLine(ls.current) && ls.current != token.EOF { ls.next() } } else if ls.current == '*' { @@ -358,37 +217,37 @@ func (ls *LexState) llex() (TK, *SemInfo) { } case '{': ls.next() - return tkBraceLeft, nil + return token.BraceLeft, nil case '}': ls.next() - return tkBraceRight, nil + return token.BraceRight, nil case ';': ls.next() - return tkSemi, nil + return token.Semi, nil case '=': ls.next() - return tkEq, nil + return token.Eq, nil case '<': ls.next() - return tkShl, nil + return token.Shl, nil case '>': ls.next() - return tkShr, nil + return token.Shr, nil case ',': ls.next() - return tkComma, nil + return token.Comma, nil case '(': ls.next() - return tkPtl, nil + return token.Ptl, nil case ')': ls.next() - return tkPtr, nil + return token.Ptr, nil case '[': ls.next() - return tkSquareLeft, nil + return token.SquareLeft, nil case ']': ls.next() - return tkSquarerRight, nil + return token.SquarerRight, nil case '"': return ls.readString() case '#': @@ -401,9 +260,9 @@ func (ls *LexState) llex() (TK, *SemInfo) { } // NextToken return token after lexical analysis. -func (ls *LexState) NextToken() *Token { - tk := &Token{} - tk.T, tk.S = ls.llex() +func (ls *LexState) NextToken() *token.Token { + tk := &token.Token{} + tk.T, tk.S = ls.lLex() tk.Line = ls.lineNumber return tk } diff --git a/tars/tools/tars2go/main.go b/tars/tools/tars2go/main.go index f77d486e..2486950c 100755 --- a/tars/tools/tars2go/main.go +++ b/tars/tools/tars2go/main.go @@ -6,66 +6,21 @@ package main import ( "flag" - "fmt" "os" - "strings" -) - -type importPath []string - -func (t *importPath) String() string { - return strings.Join(*t, ":") -} - -func (t *importPath) Set(value string) error { - *t = append(*t, value) - return nil -} -var ( - gImports importPath - gTarsPath string - gOutdir string - gModule string - gInclude string - includes []string - - withoutTrace bool + "github.com/TarsCloud/TarsGo/tars/tools/tars2go/gencode" + "github.com/TarsCloud/TarsGo/tars/tools/tars2go/options" ) -func printhelp() { - bin := os.Args[0] - if i := strings.LastIndex(bin, "/"); i != -1 { - bin = bin[i+1:] - } - fmt.Printf("Usage: %s [flags] *.tars\n", bin) - fmt.Printf(" %s -I tars/protocol/res/endpoint [-I ...] QueryF.tars\n", bin) - fmt.Printf(" %s -include=\"dir1;dir2;dir3\"\n", bin) - flag.PrintDefaults() -} - func main() { - flag.Usage = printhelp - flag.Var(&gImports, "I", "Specify a specific import path") - flag.StringVar(&gTarsPath, "tarsPath", "github.com/TarsCloud/TarsGo/tars", "Specify the tars source path.") - flag.StringVar(&gOutdir, "outdir", "", "which dir to put generated code") - flag.StringVar(&gModule, "module", "", "current go module path") - flag.StringVar(&gInclude, "include", "", "set search path of tars protocol") - flag.BoolVar(&withoutTrace, "without-trace", false, "不需要调用链追踪逻辑") - flag.Parse() - + opt := options.NewOptions() if flag.NArg() == 0 { - printhelp() + opt.PrintHelp() os.Exit(0) } - includes = strings.FieldsFunc(gInclude, func(r rune) bool { - return r == ';' || r == ',' || r == ':' || r == ' ' - }) for _, filename := range flag.Args() { - gen := NewGenGo(filename, gModule, gOutdir) - gen.I = gImports - gen.tarsPath = gTarsPath + gen := gencode.NewGenGo(opt, filename) gen.Gen() } } diff --git a/tars/tools/tars2go/options/options.go b/tars/tools/tars2go/options/options.go new file mode 100644 index 00000000..b871df25 --- /dev/null +++ b/tars/tools/tars2go/options/options.go @@ -0,0 +1,79 @@ +package options + +import ( + "flag" + "fmt" + "os" + "strings" +) + +type ImportPath []string + +type Options struct { + Imports ImportPath + TarsPath string + Outdir string + Module string + Include string + Includes []string + + WithoutTrace bool + // gen + E bool + AddServant bool + ModuleCycle bool + ModuleUpper bool + JsonOmitEmpty bool + DispatchReporter bool + Debug bool +} + +func NewOptions() *Options { + o := &Options{} + o.initFlags() + return o +} + +func (o *Options) initFlags() { + flag.Usage = o.PrintHelp + flag.Var(&o.Imports, "I", "Specify a specific import path") + flag.StringVar(&o.TarsPath, "tarsPath", "github.com/TarsCloud/TarsGo/tars", "Specify the tars source path.") + flag.StringVar(&o.Outdir, "outdir", "", "which dir to put generated code") + flag.StringVar(&o.Module, "module", "", "current go module path") + flag.StringVar(&o.Include, "include", "", "set search path of tars protocol") + flag.BoolVar(&o.WithoutTrace, "without-trace", false, "no call chain tracking logic required") + + // gen options + flag.BoolVar(&o.E, "E", false, "Generate code before fmt for troubleshooting") + flag.BoolVar(&o.AddServant, "add-servant", true, "Generate AddServant function") + flag.BoolVar(&o.ModuleCycle, "module-cycle", false, "support jce module cycle include(do not support jce file cycle include)") + flag.BoolVar(&o.ModuleUpper, "module-upper", false, "native module names are supported, otherwise the system will upper the first letter of the module name") + flag.BoolVar(&o.JsonOmitEmpty, "json-omitempty", false, "Generate json omitempty support") + flag.BoolVar(&o.DispatchReporter, "dispatch-reporter", false, "Dispatch reporter support") + flag.BoolVar(&o.Debug, "debug", false, "enable debug mode") + flag.Parse() + + o.Includes = strings.FieldsFunc(o.Include, func(r rune) bool { + return r == ';' || r == ',' || r == ':' || r == ' ' + }) +} + +func (o *Options) PrintHelp() { + bin := os.Args[0] + if i := strings.LastIndex(bin, "/"); i != -1 { + bin = bin[i+1:] + } + fmt.Printf("Usage: %s [flags] *.tars\n", bin) + fmt.Printf(" %s -I tars/protocol/res/endpoint [-I ...] QueryF.tars\n", bin) + fmt.Printf(" %s -include=\"dir1;dir2;dir3\"\n", bin) + flag.PrintDefaults() +} + +func (ip *ImportPath) String() string { + return strings.Join(*ip, ":") +} + +func (ip *ImportPath) Set(value string) error { + *ip = append(*ip, value) + return nil +} diff --git a/tars/tools/tars2go/parse.go b/tars/tools/tars2go/parse.go deleted file mode 100755 index 1f62bae8..00000000 --- a/tars/tools/tars2go/parse.go +++ /dev/null @@ -1,865 +0,0 @@ -package main - -import ( - "fmt" - "io/ioutil" - "os" - "path" - "sort" - "strconv" - "strings" -) - -// VarType contains variable type(token) -type VarType struct { - Type TK // basic type - Unsigned bool // whether unsigned - TypeSt string // custom type name, such as an enumerated struct,at this time Type=tkName - CType TK // make sure which type of custom type is,tkEnum, tkStruct - TypeK *VarType // vector's member variable,the key of map - TypeV *VarType // the value of map - TypeL int64 // length of array -} - -// StructMember member struct. -type StructMember struct { - Tag int32 - Require bool - Type *VarType - Key string // after the uppercase converted key - OriginKey string // original key - Default string - DefType TK -} - -// StructMemberSorter When serializing, make sure the tags are ordered. -type StructMemberSorter []StructMember - -func (a StructMemberSorter) Len() int { return len(a) } -func (a StructMemberSorter) Swap(i, j int) { a[i], a[j] = a[j], a[i] } -func (a StructMemberSorter) Less(i, j int) bool { return a[i].Tag < a[j].Tag } - -// StructInfo record struct information. -type StructInfo struct { - Name string - OriginName string //original name - Mb []StructMember - DependModule map[string]bool - DependModuleWithJce map[string]string -} - -// ArgInfo record argument information. -type ArgInfo struct { - Name string - OriginName string //original name - IsOut bool - Type *VarType -} - -//FunInfo record function information. -type FunInfo struct { - Name string // after the uppercase converted name - OriginName string // original name - HasRet bool - RetType *VarType - Args []ArgInfo -} - -// InterfaceInfo record interface information. -type InterfaceInfo struct { - Name string - OriginName string // original name - Fun []FunInfo - DependModule map[string]bool - DependModuleWithJce map[string]string -} - -// EnumMember record member information. -type EnumMember struct { - Key string - Type int - Value int32 //type 0 - Name string //type 1 -} - -// EnumInfo record EnumMember information include name. -type EnumInfo struct { - Module string - Name string - OriginName string // original name - Mb []EnumMember -} - -// ConstInfo record const information. -type ConstInfo struct { - Type *VarType - Name string - OriginName string // original name - Value string -} - -// HashKeyInfo record hash key information. -type HashKeyInfo struct { - Name string - Member []string -} - -// Parse record information of parse file. -type Parse struct { - Source string - - Module string - OriginModule string - Include []string - - Struct []StructInfo - Interface []InterfaceInfo - Enum []EnumInfo - Const []ConstInfo - HashKey []HashKeyInfo - - // have parsed include file - IncParse []*Parse - - lex *LexState - t *Token - lastT *Token - - // jce include chain - IncChain []string - - // proto file name(not include .tars) - ProtoName string - - DependModuleWithJce map[string]bool - fileNames map[string]bool -} - -func (p *Parse) parseErr(err string) { - line := "0" - if p.t != nil { - line = strconv.Itoa(p.t.Line) - } - - panic(p.Source + ": " + line + ". " + err) -} - -func (p *Parse) next() { - p.lastT = p.t - p.t = p.lex.NextToken() -} - -func (p *Parse) expect(t TK) { - p.next() - if p.t.T != t { - p.parseErr("expect " + TokenMap[t]) - } -} - -func (p *Parse) makeUnsigned(utype *VarType) { - switch utype.Type { - case tkTInt, tkTShort, tkTByte: - utype.Unsigned = true - default: - p.parseErr("type " + TokenMap[utype.Type] + " unsigned decoration is not supported") - } -} - -func (p *Parse) parseType() *VarType { - vtype := &VarType{Type: p.t.T} - - switch vtype.Type { - case tkName: - vtype.TypeSt = p.t.S.S - case tkTInt, tkTBool, tkTShort, tkTLong, tkTByte, tkTFloat, tkTDouble, tkTString: - // no nothing - case tkTVector: - p.expect(tkShl) - p.next() - vtype.TypeK = p.parseType() - p.expect(tkShr) - case tkTMap: - p.expect(tkShl) - p.next() - vtype.TypeK = p.parseType() - p.expect(tkComma) - p.next() - vtype.TypeV = p.parseType() - p.expect(tkShr) - case tkUnsigned: - p.next() - utype := p.parseType() - p.makeUnsigned(utype) - return utype - default: - p.parseErr("expert type") - } - return vtype -} - -func (p *Parse) parseEnum() { - enum := EnumInfo{} - p.expect(tkName) - enum.Name = p.t.S.S - for _, v := range p.Enum { - if v.Name == enum.Name { - p.parseErr(enum.Name + " Redefine.") - } - } - p.expect(tkBraceLeft) - -LFOR: - for { - p.next() - switch p.t.T { - case tkBraceRight: - break LFOR - case tkName: - k := p.t.S.S - p.next() - switch p.t.T { - case tkComma: - m := EnumMember{Key: k, Type: 2} - enum.Mb = append(enum.Mb, m) - case tkBraceRight: - m := EnumMember{Key: k, Type: 2} - enum.Mb = append(enum.Mb, m) - break LFOR - case tkEq: - p.next() - switch p.t.T { - case tkInteger: - m := EnumMember{Key: k, Value: int32(p.t.S.I)} - enum.Mb = append(enum.Mb, m) - case tkName: - m := EnumMember{Key: k, Type: 1, Name: p.t.S.S} - enum.Mb = append(enum.Mb, m) - default: - p.parseErr("not expect " + TokenMap[p.t.T]) - } - p.next() - if p.t.T == tkBraceRight { - break LFOR - } else if p.t.T == tkComma { - } else { - p.parseErr("expect , or }") - } - } - } - } - p.expect(tkSemi) - p.Enum = append(p.Enum, enum) -} - -func (p *Parse) parseStructMemberDefault(m *StructMember) { - m.DefType = p.t.T - switch p.t.T { - case tkInteger: - if !isNumberType(m.Type.Type) && m.Type.Type != tkName { - // enum auto defined type ,default value is number. - p.parseErr("type does not accept number") - } - m.Default = p.t.S.S - case tkFloat: - if !isNumberType(m.Type.Type) { - p.parseErr("type does not accept number") - } - m.Default = p.t.S.S - case tkString: - if isNumberType(m.Type.Type) { - p.parseErr("type does not accept string") - } - m.Default = `"` + p.t.S.S + `"` - case tkTrue: - if m.Type.Type != tkTBool { - p.parseErr("default value format error") - } - m.Default = "true" - case tkFalse: - if m.Type.Type != tkTBool { - p.parseErr("default value format error") - } - m.Default = "false" - case tkName: - m.Default = p.t.S.S - default: - p.parseErr("default value format error") - } -} - -func (p *Parse) parseStructMember() *StructMember { - // tag or end - p.next() - if p.t.T == tkBraceRight { - return nil - } - if p.t.T != tkInteger { - p.parseErr("expect tags.") - } - m := &StructMember{} - m.Tag = int32(p.t.S.I) - - // require or optional - p.next() - if p.t.T == tkRequire { - m.Require = true - } else if p.t.T == tkOptional { - m.Require = false - } else { - p.parseErr("expect require or optional") - } - - // type - p.next() - if !isType(p.t.T) && p.t.T != tkName && p.t.T != tkUnsigned { - p.parseErr("expect type") - } else { - m.Type = p.parseType() - } - - // key - p.expect(tkName) - m.Key = p.t.S.S - - p.next() - if p.t.T == tkSemi { - return m - } - if p.t.T == tkSquareLeft { - p.expect(tkInteger) - m.Type = &VarType{Type: tkTArray, TypeK: m.Type, TypeL: p.t.S.I} - p.expect(tkSquarerRight) - p.expect(tkSemi) - return m - } - if p.t.T != tkEq { - p.parseErr("expect ; or =") - } - if p.t.T == tkTMap || p.t.T == tkTVector || p.t.T == tkName { - p.parseErr("map, vector, custom type cannot set default value") - } - - // default - p.next() - p.parseStructMemberDefault(m) - p.expect(tkSemi) - - return m -} - -func (p *Parse) checkTag(st *StructInfo) { - set := make(map[int32]bool) - for _, v := range st.Mb { - if set[v.Tag] { - p.parseErr("tag = " + strconv.Itoa(int(v.Tag)) + ". have duplicates") - } - set[v.Tag] = true - } -} - -func (p *Parse) sortTag(st *StructInfo) { - sort.Sort(StructMemberSorter(st.Mb)) -} - -func (p *Parse) parseStruct() { - st := StructInfo{} - p.expect(tkName) - st.Name = p.t.S.S - for _, v := range p.Struct { - if v.Name == st.Name { - p.parseErr(st.Name + " Redefine.") - } - } - p.expect(tkBraceLeft) - - for { - m := p.parseStructMember() - if m == nil { - break - } - st.Mb = append(st.Mb, *m) - } - p.expect(tkSemi) //semicolon at the end of the struct. - - p.checkTag(&st) - p.sortTag(&st) - - p.Struct = append(p.Struct, st) -} - -func (p *Parse) parseInterfaceFun() *FunInfo { - fun := &FunInfo{} - p.next() - if p.t.T == tkBraceRight { - return nil - } - if p.t.T == tkVoid { - fun.HasRet = false - } else if !isType(p.t.T) && p.t.T != tkName && p.t.T != tkUnsigned { - p.parseErr("expect type") - } else { - fun.HasRet = true - fun.RetType = p.parseType() - } - p.expect(tkName) - fun.Name = p.t.S.S - p.expect(tkPtl) - - p.next() - if p.t.T == tkShr { - return fun - } - - // No parameter function, exit directly. - if p.t.T == tkPtr { - p.expect(tkSemi) - return fun - } - - for { - arg := &ArgInfo{} - if p.t.T == tkOut { - arg.IsOut = true - p.next() - } else { - arg.IsOut = false - } - - arg.Type = p.parseType() - p.next() - if p.t.T == tkName { - arg.Name = p.t.S.S - p.next() - } - - fun.Args = append(fun.Args, *arg) - - if p.t.T == tkComma { - p.next() - } else if p.t.T == tkPtr { - p.expect(tkSemi) - break - } else { - p.parseErr("expect , or )") - } - } - return fun -} - -func (p *Parse) parseInterface() { - itf := &InterfaceInfo{} - p.expect(tkName) - itf.Name = p.t.S.S - for _, v := range p.Interface { - if v.Name == itf.Name { - p.parseErr(itf.Name + " Redefine.") - } - } - p.expect(tkBraceLeft) - - for { - fun := p.parseInterfaceFun() - if fun == nil { - break - } - itf.Fun = append(itf.Fun, *fun) - } - p.expect(tkSemi) //semicolon at the end of struct. - p.Interface = append(p.Interface, *itf) -} - -func (p *Parse) parseConst() { - m := ConstInfo{} - - // type - p.next() - switch p.t.T { - case tkTVector, tkTMap: - p.parseErr("const no supports type vector or map.") - case tkTBool, tkTByte, tkTShort, - tkTInt, tkTLong, tkTFloat, - tkTDouble, tkTString, tkUnsigned: - m.Type = p.parseType() - default: - p.parseErr("expect type.") - } - - p.expect(tkName) - m.Name = p.t.S.S - - p.expect(tkEq) - - // default - p.next() - switch p.t.T { - case tkInteger, tkFloat: - if !isNumberType(m.Type.Type) { - p.parseErr("type does not accept number") - } - m.Value = p.t.S.S - case tkString: - if isNumberType(m.Type.Type) { - p.parseErr("type does not accept string") - } - m.Value = `"` + p.t.S.S + `"` - case tkTrue: - if m.Type.Type != tkTBool { - p.parseErr("default value format error") - } - m.Value = "true" - case tkFalse: - if m.Type.Type != tkTBool { - p.parseErr("default value format error") - } - m.Value = "false" - default: - p.parseErr("default value format error") - } - p.expect(tkSemi) - - p.Const = append(p.Const, m) -} - -func (p *Parse) parseHashKey() { - hashKey := HashKeyInfo{} - p.expect(tkSquareLeft) - p.expect(tkName) - hashKey.Name = p.t.S.S - p.expect(tkComma) - for { - p.expect(tkName) - hashKey.Member = append(hashKey.Member, p.t.S.S) - p.next() - t := p.t - switch t.T { - case tkSquarerRight: - p.expect(tkSemi) - p.HashKey = append(p.HashKey, hashKey) - return - case tkComma: - default: - p.parseErr("expect ] or ,") - } - } -} - -func (p *Parse) parseModuleSegment() { - p.expect(tkBraceLeft) - - for { - p.next() - t := p.t - switch t.T { - case tkBraceRight: - p.expect(tkSemi) - return - case tkConst: - p.parseConst() - case tkEnum: - p.parseEnum() - case tkStruct: - p.parseStruct() - case tkInterface: - p.parseInterface() - case tkKey: - p.parseHashKey() - default: - p.parseErr("not except " + TokenMap[t.T]) - } - } -} - -func (p *Parse) parseModule() { - p.expect(tkName) - - if p.Module != "" { - // 解决一个tars文件中定义多个module - name := p.ProtoName + "_" + p.t.S.S + ".tars" - newp := newParse(name, nil, nil) - newp.IncChain = p.IncChain - newp.lex = p.lex - newp.Include = p.Include - newp.IncParse = p.IncParse - cowp := *p - newp.IncParse = append(newp.IncParse, &cowp) - newp.Module = p.t.S.S - newp.parseModuleSegment() - newp.analyzeDepend() - if p.fileNames[name] { - // merge - for _, incParse := range p.IncParse { - if incParse.ProtoName == newp.ProtoName { - incParse.Struct = append(incParse.Struct, newp.Struct...) - incParse.Interface = append(incParse.Interface, newp.Interface...) - incParse.Enum = append(incParse.Enum, newp.Enum...) - incParse.Const = append(incParse.Const, newp.Const...) - incParse.HashKey = append(incParse.HashKey, newp.HashKey...) - break - } - } - } else { - // 增加已经解析的module - p.IncParse = append(p.IncParse, newp) - p.fileNames[name] = true - } - p.lex = newp.lex - } else { - p.Module = p.t.S.S - p.parseModuleSegment() - } -} - -func (p *Parse) parseInclude() { - p.expect(tkString) - p.Include = append(p.Include, p.t.S.S) -} - -// Looking for the true type of user-defined identifier -func (p *Parse) findTNameType(tname string) (TK, string, string) { - for _, v := range p.Struct { - if p.Module+"::"+v.Name == tname { - return tkStruct, p.Module, p.ProtoName - } - } - - for _, v := range p.Enum { - if p.Module+"::"+v.Name == tname { - return tkEnum, p.Module, p.ProtoName - } - } - - for _, pInc := range p.IncParse { - ret, mod, protoName := pInc.findTNameType(tname) - if ret != tkName { - return ret, mod, protoName - } - } - // not find - return tkName, p.Module, p.ProtoName -} - -func (p *Parse) findEnumName(ename string) (*EnumMember, *EnumInfo) { - if strings.Contains(ename, "::") { - vec := strings.Split(ename, "::") - if len(vec) >= 2 { - ename = vec[1] - } - } - var cmb *EnumMember - var cenum *EnumInfo - for ek, enum := range p.Enum { - for mk, mb := range enum.Mb { - if mb.Key != ename { - continue - } - if cmb == nil { - cmb = &enum.Mb[mk] - cenum = &p.Enum[ek] - } else { - p.parseErr(ename + " name conflict [" + cenum.Name + "::" + cmb.Key + " or " + enum.Name + "::" + mb.Key) - return nil, nil - } - } - } - for _, pInc := range p.IncParse { - if cmb == nil { - cmb, cenum = pInc.findEnumName(ename) - } else { - break - } - } - if cenum != nil && cenum.Module == "" { - if *gModuleCycle == true { - cenum.Module = p.ProtoName + "_" + p.Module - } else { - cenum.Module = p.Module - } - } - return cmb, cenum -} - -func addToSet(m *map[string]bool, module string) { - if *m == nil { - *m = make(map[string]bool) - } - (*m)[module] = true -} - -func addToMap(m *map[string]string, module string, value string) { - if *m == nil { - *m = make(map[string]string) - } - (*m)[module] = value -} - -func (p *Parse) checkDepTName(ty *VarType, dm *map[string]bool, dmj *map[string]string) { - if ty.Type == tkName { - name := ty.TypeSt - if strings.Count(name, "::") == 0 { - name = p.Module + "::" + name - } - - mod := "" - protoName := "" - ty.CType, mod, protoName = p.findTNameType(name) - if ty.CType == tkName { - p.parseErr(ty.TypeSt + " not find define") - } - if *gModuleCycle == true { - if mod != p.Module || protoName != p.ProtoName { - var modStr string - if *gModuleUpper { - modStr = upperFirstLetter(mod) - } else { - modStr = mod - } - addToMap(dmj, protoName+"_"+modStr, protoName) - - if strings.Contains(ty.TypeSt, mod+"::") { - ty.TypeSt = strings.Replace(ty.TypeSt, mod+"::", protoName+"_"+modStr+"::", 1) - } else { - ty.TypeSt = protoName + "_" + modStr + "::" + ty.TypeSt - } - } else { - // the same Module ,do not add self. - ty.TypeSt = strings.Replace(ty.TypeSt, mod+"::", "", 1) - } - } else { - if mod != p.Module { - addToSet(dm, mod) - } else { - // the same Module ,do not add self. - ty.TypeSt = strings.Replace(ty.TypeSt, mod+"::", "", 1) - } - } - } else if ty.Type == tkTVector { - p.checkDepTName(ty.TypeK, dm, dmj) - } else if ty.Type == tkTMap { - p.checkDepTName(ty.TypeK, dm, dmj) - p.checkDepTName(ty.TypeV, dm, dmj) - } -} - -// analysis custom type,whether have definition -func (p *Parse) analyzeTName() { - for i, v := range p.Struct { - for _, v := range v.Mb { - ty := v.Type - p.checkDepTName(ty, &p.Struct[i].DependModule, &p.Struct[i].DependModuleWithJce) - } - } - - for i, v := range p.Interface { - for _, v := range v.Fun { - for _, v := range v.Args { - ty := v.Type - p.checkDepTName(ty, &p.Interface[i].DependModule, &p.Interface[i].DependModuleWithJce) - } - if v.RetType != nil { - p.checkDepTName(v.RetType, &p.Interface[i].DependModule, &p.Interface[i].DependModuleWithJce) - } - } - } -} - -func (p *Parse) analyzeDefault() { - for _, v := range p.Struct { - for i, r := range v.Mb { - if r.Default != "" && r.DefType == tkName { - mb, enum := p.findEnumName(r.Default) - if mb == nil || enum == nil { - p.parseErr("can not find default value" + r.Default) - } - defValue := enum.Name + "_" + upperFirstLetter(mb.Key) - var currModule string - if *gModuleCycle == true { - currModule = p.ProtoName + "_" + p.Module - } else { - currModule = p.Module - } - if len(enum.Module) > 0 && currModule != enum.Module { - defValue = enum.Module + "." + defValue - } - v.Mb[i].Default = defValue - } - } - } -} - -// TODO analysis key[],have quoted the correct struct and member name. -func (p *Parse) analyzeHashKey() { - -} - -func (p *Parse) analyzeDepend() { - for _, v := range p.Include { - relativePath := path.Dir(p.Source) - dependFile := relativePath + "/" + v - pInc := ParseFile(dependFile, p.IncChain) - p.IncParse = append(p.IncParse, pInc) - fmt.Println("parse include: ", v) - } - - p.analyzeDefault() - p.analyzeTName() - p.analyzeHashKey() -} - -func (p *Parse) parse() { -OUT: - for { - p.next() - t := p.t - switch t.T { - case tkEos: - break OUT - case tkInclude: - p.parseInclude() - case tkModule: - p.parseModule() - default: - p.parseErr("Expect include or module.") - } - } - p.analyzeDepend() -} - -func newParse(s string, b []byte, incChain []string) *Parse { - p := &Parse{Source: s, ProtoName: path2ProtoName(s)} - for _, v := range incChain { - if s == v { - panic("jce circular reference: " + s) - } - } - incChain = append(incChain, s) - p.IncChain = incChain - fmt.Println(s, p.IncChain) - - p.lex = NewLexState(s, b) - p.fileNames = map[string]bool{} - return p -} - -// ParseFile parse a file,return grammar tree. -func ParseFile(filePath string, incChain []string) *Parse { - if _, err := os.Stat(filePath); os.IsNotExist(err) { - // 查找tars文件路径 - filename := path.Base(filePath) - for _, include := range includes { - include = strings.TrimRight(include, "/") - filePath = include + "/" + filename - if _, err = os.Stat(filePath); err == nil { - break - } - } - } - b, err := ioutil.ReadFile(filePath) - if err != nil { - fmt.Println("file read error: " + filePath + ". " + err.Error()) - } - - p := newParse(filePath, b, incChain) - p.parse() - - return p -} diff --git a/tars/tools/tars2go/parse/parse.go b/tars/tools/tars2go/parse/parse.go new file mode 100644 index 00000000..e852800a --- /dev/null +++ b/tars/tools/tars2go/parse/parse.go @@ -0,0 +1,704 @@ +package parse + +import ( + "log" + "os" + "path" + "sort" + "strconv" + "strings" + + "github.com/TarsCloud/TarsGo/tars/tools/tars2go/ast" + "github.com/TarsCloud/TarsGo/tars/tools/tars2go/lexer" + "github.com/TarsCloud/TarsGo/tars/tools/tars2go/options" + "github.com/TarsCloud/TarsGo/tars/tools/tars2go/token" + "github.com/TarsCloud/TarsGo/tars/tools/tars2go/utils" +) + +// Parse record information of parse file. +type Parse struct { + opt *options.Options + Module *ast.ModuleInfo + + lex *lexer.LexState + tk *token.Token + lastTk *token.Token + + // jce include chain + IncChain []string + DependModuleWithJce map[string]bool + + fileNames map[string]bool +} + +// NewParse parse a file,return grammar tree. +func NewParse(opt *options.Options, filePath string, incChain []string) *ast.ModuleInfo { + if _, err := os.Stat(filePath); os.IsNotExist(err) { + // 查找tars文件路径 + filename := path.Base(filePath) + for _, include := range opt.Includes { + include = strings.TrimRight(include, "/") + filePath = include + "/" + filename + if _, err = os.Stat(filePath); err == nil { + break + } + } + } + b, err := os.ReadFile(filePath) + if err != nil { + log.Fatalln("file read error: " + filePath + ". " + err.Error()) + } + + p := newParse(opt, filePath, b, incChain) + p.parse() + + return p.Module +} + +func newParse(opt *options.Options, source string, data []byte, incChain []string) *Parse { + for _, v := range incChain { + if source == v { + panic("jce circular reference: " + source) + } + } + incChain = append(incChain, source) + log.Println(source, incChain) + + p := &Parse{ + opt: opt, + Module: &ast.ModuleInfo{ + Source: source, + ProtoName: utils.Path2ProtoName(source), + }, + lex: lexer.NewLexState(source, data), + IncChain: incChain, + fileNames: map[string]bool{}, + } + return p +} + +func (p *Parse) parseErr(err string) { + line := "0" + if p.tk != nil { + line = strconv.Itoa(p.tk.Line) + } + + panic(p.Module.Source + ": " + line + ". " + err) +} + +func (p *Parse) next() { + p.lastTk = p.tk + p.tk = p.lex.NextToken() +} + +func (p *Parse) expect(t token.Type) { + p.next() + if p.tk.T != t { + p.parseErr("expect " + token.Value(t)) + } +} + +func (p *Parse) makeUnsigned(utype *ast.VarType) { + switch utype.Type { + case token.TInt, token.TShort, token.TByte: + utype.Unsigned = true + default: + p.parseErr("type " + token.Value(utype.Type) + " unsigned decoration is not supported") + } +} + +func (p *Parse) parseType() *ast.VarType { + vtype := &ast.VarType{Type: p.tk.T} + + switch vtype.Type { + case token.Name: + vtype.TypeSt = p.tk.S.S + case token.TInt, token.TBool, token.TShort, token.TLong, token.TByte, token.TFloat, token.TDouble, token.TString: + // no nothing + case token.TVector: + p.expect(token.Shl) + p.next() + vtype.TypeK = p.parseType() + p.expect(token.Shr) + case token.TMap: + p.expect(token.Shl) + p.next() + vtype.TypeK = p.parseType() + p.expect(token.Comma) + p.next() + vtype.TypeV = p.parseType() + p.expect(token.Shr) + case token.Unsigned: + p.next() + utype := p.parseType() + p.makeUnsigned(utype) + return utype + default: + p.parseErr("expert type") + } + return vtype +} + +func (p *Parse) parseEnum() { + enum := ast.EnumInfo{} + p.expect(token.Name) + enum.Name = p.tk.S.S + for _, v := range p.Module.Enum { + if v.Name == enum.Name { + p.parseErr(enum.Name + " Redefine.") + } + } + p.expect(token.BraceLeft) + +LFOR: + for { + p.next() + switch p.tk.T { + case token.BraceRight: + break LFOR + case token.Name: + k := p.tk.S.S + p.next() + switch p.tk.T { + case token.Comma: + m := ast.EnumMember{Key: k, Type: 2} + enum.Mb = append(enum.Mb, m) + case token.BraceRight: + m := ast.EnumMember{Key: k, Type: 2} + enum.Mb = append(enum.Mb, m) + break LFOR + case token.Eq: + p.next() + switch p.tk.T { + case token.Integer: + m := ast.EnumMember{Key: k, Value: int32(p.tk.S.I)} + enum.Mb = append(enum.Mb, m) + case token.Name: + m := ast.EnumMember{Key: k, Type: 1, Name: p.tk.S.S} + enum.Mb = append(enum.Mb, m) + default: + p.parseErr("not expect " + token.Value(p.tk.T)) + } + p.next() + if p.tk.T == token.BraceRight { + break LFOR + } else if p.tk.T == token.Comma { + } else { + p.parseErr("expect , or }") + } + } + } + } + p.expect(token.Semi) + p.Module.Enum = append(p.Module.Enum, enum) +} + +func (p *Parse) parseStructMemberDefault(m *ast.StructMember) { + m.DefType = p.tk.T + switch p.tk.T { + case token.Integer: + if !token.IsNumberType(m.Type.Type) && m.Type.Type != token.Name { + // enum auto defined type ,default value is number. + p.parseErr("type does not accept number") + } + m.Default = p.tk.S.S + case token.Float: + if !token.IsNumberType(m.Type.Type) { + p.parseErr("type does not accept number") + } + m.Default = p.tk.S.S + case token.String: + if token.IsNumberType(m.Type.Type) { + p.parseErr("type does not accept string") + } + m.Default = `"` + p.tk.S.S + `"` + case token.True: + if m.Type.Type != token.TBool { + p.parseErr("default value format error") + } + m.Default = "true" + case token.False: + if m.Type.Type != token.TBool { + p.parseErr("default value format error") + } + m.Default = "false" + case token.Name: + m.Default = p.tk.S.S + default: + p.parseErr("default value format error") + } +} + +func (p *Parse) parseStructMember() *ast.StructMember { + // tag or end + p.next() + if p.tk.T == token.BraceRight { + return nil + } + if p.tk.T != token.Integer { + p.parseErr("expect tags.") + } + m := &ast.StructMember{} + m.Tag = int32(p.tk.S.I) + + // require or optional + p.next() + if p.tk.T == token.Require { + m.Require = true + } else if p.tk.T == token.Optional { + m.Require = false + } else { + p.parseErr("expect require or optional") + } + + // type + p.next() + if !token.IsType(p.tk.T) && p.tk.T != token.Name && p.tk.T != token.Unsigned { + p.parseErr("expect type") + } else { + m.Type = p.parseType() + } + + // key + p.expect(token.Name) + m.Key = p.tk.S.S + + p.next() + if p.tk.T == token.Semi { + return m + } + if p.tk.T == token.SquareLeft { + p.expect(token.Integer) + m.Type = &ast.VarType{Type: token.TArray, TypeK: m.Type, TypeL: p.tk.S.I} + p.expect(token.SquarerRight) + p.expect(token.Semi) + return m + } + if p.tk.T != token.Eq { + p.parseErr("expect ; or =") + } + if p.tk.T == token.TMap || p.tk.T == token.TVector || p.tk.T == token.Name { + p.parseErr("map, vector, custom type cannot set default value") + } + + // default + p.next() + p.parseStructMemberDefault(m) + p.expect(token.Semi) + + return m +} + +func (p *Parse) checkTag(st *ast.StructInfo) { + set := make(map[int32]bool) + for _, v := range st.Mb { + if set[v.Tag] { + p.parseErr("tag = " + strconv.Itoa(int(v.Tag)) + ". have duplicates") + } + set[v.Tag] = true + } +} + +func (p *Parse) sortTag(st *ast.StructInfo) { + sort.Sort(ast.StructMemberSorter(st.Mb)) +} + +func (p *Parse) parseStruct() { + st := ast.StructInfo{} + p.expect(token.Name) + st.Name = p.tk.S.S + for _, v := range p.Module.Struct { + if v.Name == st.Name { + p.parseErr(st.Name + " Redefine.") + } + } + p.expect(token.BraceLeft) + + for { + m := p.parseStructMember() + if m == nil { + break + } + st.Mb = append(st.Mb, *m) + } + p.expect(token.Semi) //semicolon at the end of the struct. + + p.checkTag(&st) + p.sortTag(&st) + + p.Module.Struct = append(p.Module.Struct, st) +} + +func (p *Parse) parseInterfaceFun() *ast.FunInfo { + fun := &ast.FunInfo{} + p.next() + if p.tk.T == token.BraceRight { + return nil + } + if p.tk.T == token.Void { + fun.HasRet = false + } else if !token.IsType(p.tk.T) && p.tk.T != token.Name && p.tk.T != token.Unsigned { + p.parseErr("expect type") + } else { + fun.HasRet = true + fun.RetType = p.parseType() + } + p.expect(token.Name) + fun.Name = p.tk.S.S + p.expect(token.Ptl) + + p.next() + if p.tk.T == token.Shr { + return fun + } + + // No parameter function, exit directly. + if p.tk.T == token.Ptr { + p.expect(token.Semi) + return fun + } + + for { + arg := &ast.ArgInfo{} + if p.tk.T == token.Out { + arg.IsOut = true + p.next() + } else { + arg.IsOut = false + } + + arg.Type = p.parseType() + p.next() + if p.tk.T == token.Name { + arg.Name = p.tk.S.S + p.next() + } + + fun.Args = append(fun.Args, *arg) + + if p.tk.T == token.Comma { + p.next() + } else if p.tk.T == token.Ptr { + p.expect(token.Semi) + break + } else { + p.parseErr("expect , or )") + } + } + return fun +} + +func (p *Parse) parseInterface() { + itf := &ast.InterfaceInfo{} + p.expect(token.Name) + itf.Name = p.tk.S.S + for _, v := range p.Module.Interface { + if v.Name == itf.Name { + p.parseErr(itf.Name + " Redefine.") + } + } + p.expect(token.BraceLeft) + + for { + fun := p.parseInterfaceFun() + if fun == nil { + break + } + itf.Fun = append(itf.Fun, *fun) + } + p.expect(token.Semi) //semicolon at the end of struct. + p.Module.Interface = append(p.Module.Interface, *itf) +} + +func (p *Parse) parseConst() { + m := ast.ConstInfo{} + + // type + p.next() + switch p.tk.T { + case token.TVector, token.TMap: + p.parseErr("const no supports type vector or map.") + case token.TBool, token.TByte, token.TShort, + token.TInt, token.TLong, token.TFloat, + token.TDouble, token.TString, token.Unsigned: + m.Type = p.parseType() + default: + p.parseErr("expect type.") + } + + p.expect(token.Name) + m.Name = p.tk.S.S + + p.expect(token.Eq) + + // default + p.next() + switch p.tk.T { + case token.Integer, token.Float: + if !token.IsNumberType(m.Type.Type) { + p.parseErr("type does not accept number") + } + m.Value = p.tk.S.S + case token.String: + if token.IsNumberType(m.Type.Type) { + p.parseErr("type does not accept string") + } + m.Value = `"` + p.tk.S.S + `"` + case token.True: + if m.Type.Type != token.TBool { + p.parseErr("default value format error") + } + m.Value = "true" + case token.False: + if m.Type.Type != token.TBool { + p.parseErr("default value format error") + } + m.Value = "false" + default: + p.parseErr("default value format error") + } + p.expect(token.Semi) + + p.Module.Const = append(p.Module.Const, m) +} + +func (p *Parse) parseHashKey() { + hashKey := ast.HashKeyInfo{} + p.expect(token.SquareLeft) + p.expect(token.Name) + hashKey.Name = p.tk.S.S + p.expect(token.Comma) + for { + p.expect(token.Name) + hashKey.Member = append(hashKey.Member, p.tk.S.S) + p.next() + t := p.tk + switch t.T { + case token.SquarerRight: + p.expect(token.Semi) + p.Module.HashKey = append(p.Module.HashKey, hashKey) + return + case token.Comma: + default: + p.parseErr("expect ] or ,") + } + } +} + +func (p *Parse) parseModuleSegment() { + p.expect(token.BraceLeft) + + for { + p.next() + t := p.tk + switch t.T { + case token.BraceRight: + p.expect(token.Semi) + return + case token.Const: + p.parseConst() + case token.Enum: + p.parseEnum() + case token.Struct: + p.parseStruct() + case token.Interface: + p.parseInterface() + case token.Key: + p.parseHashKey() + default: + p.parseErr("not except " + token.Value(t.T)) + } + } +} + +func (p *Parse) parseModule() { + p.expect(token.Name) + + // 解决一个tars文件中定义多个module + if p.Module.Name != "" { + name := p.Module.ProtoName + "_" + p.tk.S.S + ".tars" + newp := newParse(p.opt, p.Module.Source, nil, nil) + newp.Module.Name = p.tk.S.S + newp.Module.Include = p.Module.Include + m := *p.Module + newp.Module.IncModule = append(newp.Module.IncModule, &m) + newp.lex = p.lex + newp.parseModuleSegment() + newp.analyzeDepend() + if p.fileNames[name] { + // merge + for _, module := range p.Module.IncModule { + if module.Name == newp.Module.Name { + module.Struct = append(module.Struct, newp.Module.Struct...) + module.Interface = append(module.Interface, newp.Module.Interface...) + module.Enum = append(module.Enum, newp.Module.Enum...) + module.Const = append(module.Const, newp.Module.Const...) + module.HashKey = append(module.HashKey, newp.Module.HashKey...) + break + } + } + } else { + // 增加已经解析的module + p.Module.IncModule = append(p.Module.IncModule, newp.Module) + p.fileNames[name] = true + } + p.lex = newp.lex + } else { + p.Module.Name = p.tk.S.S + p.parseModuleSegment() + } +} + +func (p *Parse) parseInclude() { + p.expect(token.String) + p.Module.Include = append(p.Module.Include, p.tk.S.S) +} + +func addToSet(m *map[string]bool, module string) { + if *m == nil { + *m = make(map[string]bool) + } + (*m)[module] = true +} + +func addToMap(m *map[string]string, module string, value string) { + if *m == nil { + *m = make(map[string]string) + } + (*m)[module] = value +} + +func (p *Parse) checkDepTName(ty *ast.VarType, dm *map[string]bool, dmj *map[string]string) { + if ty.Type == token.Name { + name := ty.TypeSt + if strings.Count(name, "::") == 0 { + name = p.Module.Name + "::" + name + } + + mod := "" + protoName := "" + ty.CType, mod, protoName = p.Module.FindTNameType(name) + if ty.CType == token.Name { + p.parseErr(ty.TypeSt + " not find define") + } + if p.opt.ModuleCycle { + if mod != p.Module.Name || protoName != p.Module.ProtoName { + var modStr string + if p.opt.ModuleUpper { + modStr = utils.UpperFirstLetter(mod) + } else { + modStr = mod + } + addToMap(dmj, protoName+"_"+modStr, protoName) + + if strings.Contains(ty.TypeSt, mod+"::") { + ty.TypeSt = strings.Replace(ty.TypeSt, mod+"::", protoName+"_"+modStr+"::", 1) + } else { + ty.TypeSt = protoName + "_" + modStr + "::" + ty.TypeSt + } + } else { + // the same Module ,do not add self. + ty.TypeSt = strings.Replace(ty.TypeSt, mod+"::", "", 1) + } + } else { + if mod != p.Module.Name { + addToSet(dm, mod) + } else { + // the same Module ,do not add self. + ty.TypeSt = strings.Replace(ty.TypeSt, mod+"::", "", 1) + } + } + } else if ty.Type == token.TVector { + p.checkDepTName(ty.TypeK, dm, dmj) + } else if ty.Type == token.TMap { + p.checkDepTName(ty.TypeK, dm, dmj) + p.checkDepTName(ty.TypeV, dm, dmj) + } +} + +// analysis custom type,whether have definition +func (p *Parse) analyzeTName() { + for i, v := range p.Module.Struct { + for _, v := range v.Mb { + ty := v.Type + p.checkDepTName(ty, &p.Module.Struct[i].DependModule, &p.Module.Struct[i].DependModuleWithJce) + } + } + + for i, v := range p.Module.Interface { + for _, v := range v.Fun { + for _, v := range v.Args { + ty := v.Type + p.checkDepTName(ty, &p.Module.Interface[i].DependModule, &p.Module.Interface[i].DependModuleWithJce) + } + if v.RetType != nil { + p.checkDepTName(v.RetType, &p.Module.Interface[i].DependModule, &p.Module.Interface[i].DependModuleWithJce) + } + } + } +} + +func (p *Parse) analyzeDefault() { + for _, v := range p.Module.Struct { + for i, r := range v.Mb { + if r.Default != "" && r.DefType == token.Name { + mb, enum, err := p.Module.FindEnumName(r.Default, p.opt.ModuleCycle) + if err != nil { + p.parseErr(err.Error()) + } + if mb == nil || enum == nil { + p.parseErr("can not find default value" + r.Default) + } + defValue := enum.Name + "_" + utils.UpperFirstLetter(mb.Key) + var currModule string + if p.opt.ModuleCycle { + currModule = p.Module.ProtoName + "_" + p.Module.Name + } else { + currModule = p.Module.Name + } + if len(enum.Module) > 0 && currModule != enum.Module { + defValue = enum.Module + "." + defValue + } + v.Mb[i].Default = defValue + } + } + } +} + +// TODO analysis key[],have quoted the correct struct and member name. +func (p *Parse) analyzeHashKey() { + +} + +func (p *Parse) analyzeDepend() { + for _, v := range p.Module.Include { + relativePath := path.Dir(p.Module.Source) + dependFile := relativePath + "/" + v + pInc := NewParse(p.opt, dependFile, p.IncChain) + p.Module.IncModule = append(p.Module.IncModule, pInc) + log.Println("parse include: ", v) + } + + p.analyzeDefault() + p.analyzeTName() + p.analyzeHashKey() +} + +func (p *Parse) parse() { +OUT: + for { + p.next() + t := p.tk + switch t.T { + case token.Eof: + break OUT + case token.Include: + p.parseInclude() + case token.Module: + p.parseModule() + default: + p.parseErr("Expect include or module.") + } + } + p.analyzeDepend() +} diff --git a/tars/tools/tars2go/token/token.go b/tars/tools/tars2go/token/token.go new file mode 100644 index 00000000..a53132df --- /dev/null +++ b/tars/tools/tars2go/token/token.go @@ -0,0 +1,143 @@ +package token + +// Type is a byte type. +type Type byte + +// SemInfo is struct. +type SemInfo struct { + I int64 + F float64 + S string +} + +// Token record token information. +type Token struct { + T Type + S *SemInfo + Line int +} + +const EOF = 0 + +const ( + Eof Type = iota + BraceLeft //({) + BraceRight //} + Semi //; + Eq //= + Shl //< + Shr //> + Comma //, + Ptl //( + Ptr //) + SquareLeft //[ + SquarerRight //] + Include //#include + + // DummyKeywordBegin keyword + DummyKeywordBegin + Module + Enum + Struct + Interface + Require + Optional + Const + Unsigned + Void + Out + Key + True + False + DummyKeywordEnd + + // DummyTypeBegin type + DummyTypeBegin + TInt + TBool + TShort + TByte + TLong + TFloat + TDouble + TString + TVector + TMap + TArray + DummyTypeEnd + + Name // variable name + // String value + String + Integer + Float +) + +// tokenMap record token value. +var tokenMap = [...]string{ + Eof: "", + + BraceLeft: "{", + BraceRight: "}", + Semi: ";", + Eq: "=", + Shl: "<", + Shr: ">", + Comma: ",", + Ptl: "(", + Ptr: ")", + SquareLeft: "[", + SquarerRight: "]", + Include: "#include", + + // keyword + Module: "module", + Enum: "enum", + Struct: "struct", + Interface: "interface", + Require: "require", + Optional: "optional", + Const: "const", + Unsigned: "unsigned", + Void: "void", + Out: "out", + Key: "key", + True: "true", + False: "false", + + // type + TInt: "int", + TBool: "bool", + TShort: "short", + TByte: "byte", + TLong: "long", + TFloat: "float", + TDouble: "double", + TString: "string", + TVector: "vector", + TMap: "map", + TArray: "array", + + Name: "", + // value + String: "", + Integer: "", + Float: "", +} + +func Value(typ Type) string { + return tokenMap[typ] +} + +func IsType(typ Type) bool { + return typ > DummyTypeBegin && typ < DummyTypeEnd +} + +func IsNumberType(typ Type) bool { + switch typ { + case TInt, TBool, TShort, TByte, TLong, TFloat, TDouble: + return true + default: + return false + } +} diff --git a/tars/tools/tars2go/utils/utils.go b/tars/tools/tars2go/utils/utils.go new file mode 100644 index 00000000..52f71a7b --- /dev/null +++ b/tars/tools/tars2go/utils/utils.go @@ -0,0 +1,29 @@ +package utils + +import "strings" + +// UpperFirstLetter Initial capitalization +func UpperFirstLetter(s string) string { + if len(s) == 0 { + return "" + } + if len(s) == 1 { + return strings.ToUpper(string(s[0])) + } + return strings.ToUpper(string(s[0])) + s[1:] +} + +func Path2ProtoName(path string) string { + iBegin := strings.LastIndex(path, "/") + if iBegin == -1 || iBegin >= len(path)-1 { + iBegin = 0 + } else { + iBegin++ + } + iEnd := strings.LastIndex(path, ".tars") + if iEnd == -1 { + iEnd = len(path) + } + + return path[iBegin:iEnd] +} diff --git a/tars/tools/tars2go/version.go b/tars/tools/tars2go/version.go deleted file mode 100644 index a7417cf5..00000000 --- a/tars/tools/tars2go/version.go +++ /dev/null @@ -1,4 +0,0 @@ -package main - -// VERSION of the tars2go tools. -const VERSION = "1.2.1" diff --git a/tars/tools/tars2go/version/version.go b/tars/tools/tars2go/version/version.go new file mode 100644 index 00000000..b8a55745 --- /dev/null +++ b/tars/tools/tars2go/version/version.go @@ -0,0 +1,4 @@ +package version + +// VERSION of the tars2go tools. +const VERSION = "1.2.2" From 4126b5b665d142ebd1fcc2922d7ca5952500aa8b Mon Sep 17 00:00:00 2001 From: lbbniu Date: Fri, 16 Jun 2023 22:30:23 +0800 Subject: [PATCH 2/2] gen: update base protocol --- tars/protocol/res/adminf/AdminF.tars.go | 2 +- tars/protocol/res/basef/BaseF.go | 2 +- tars/protocol/res/configf/Config.tars.go | 2 +- tars/protocol/res/configf/ConfigF.go | 2 +- tars/protocol/res/endpointf/EndpointF.go | 2 +- tars/protocol/res/logf/Log.tars.go | 2 +- tars/protocol/res/logf/LogF.go | 2 +- tars/protocol/res/nodef/NodeF.go | 2 +- tars/protocol/res/nodef/ServerF.tars.go | 2 +- tars/protocol/res/notifyf/Notify.tars.go | 2 +- tars/protocol/res/notifyf/NotifyF.go | 2 +- tars/protocol/res/propertyf/PropertyF.go | 2 +- tars/protocol/res/propertyf/PropertyF.tars.go | 2 +- tars/protocol/res/queryf/QueryF.tars.go | 2 +- tars/protocol/res/requestf/RequestF.go | 2 +- tars/protocol/res/statf/StatF.go | 2 +- tars/protocol/res/statf/StatF.tars.go | 2 +- 17 files changed, 17 insertions(+), 17 deletions(-) diff --git a/tars/protocol/res/adminf/AdminF.tars.go b/tars/protocol/res/adminf/AdminF.tars.go index a5581238..e6d6a17a 100644 --- a/tars/protocol/res/adminf/AdminF.tars.go +++ b/tars/protocol/res/adminf/AdminF.tars.go @@ -1,5 +1,5 @@ // Package adminf comment -// This file was generated by tars2go 1.2.1 +// This file was generated by tars2go 1.2.2 // Generated from AdminF.tars package adminf diff --git a/tars/protocol/res/basef/BaseF.go b/tars/protocol/res/basef/BaseF.go index 07a699b0..c240e678 100644 --- a/tars/protocol/res/basef/BaseF.go +++ b/tars/protocol/res/basef/BaseF.go @@ -1,5 +1,5 @@ // Package basef comment -// This file was generated by tars2go 1.2.1 +// This file was generated by tars2go 1.2.2 // Generated from BaseF.tars package basef diff --git a/tars/protocol/res/configf/Config.tars.go b/tars/protocol/res/configf/Config.tars.go index f58a7e9f..fa38708c 100644 --- a/tars/protocol/res/configf/Config.tars.go +++ b/tars/protocol/res/configf/Config.tars.go @@ -1,5 +1,5 @@ // Package configf comment -// This file was generated by tars2go 1.2.1 +// This file was generated by tars2go 1.2.2 // Generated from ConfigF.tars package configf diff --git a/tars/protocol/res/configf/ConfigF.go b/tars/protocol/res/configf/ConfigF.go index 8797757b..b375ca27 100644 --- a/tars/protocol/res/configf/ConfigF.go +++ b/tars/protocol/res/configf/ConfigF.go @@ -1,5 +1,5 @@ // Package configf comment -// This file was generated by tars2go 1.2.1 +// This file was generated by tars2go 1.2.2 // Generated from ConfigF.tars package configf diff --git a/tars/protocol/res/endpointf/EndpointF.go b/tars/protocol/res/endpointf/EndpointF.go index 6a6de8b4..a12770a5 100644 --- a/tars/protocol/res/endpointf/EndpointF.go +++ b/tars/protocol/res/endpointf/EndpointF.go @@ -1,5 +1,5 @@ // Package endpointf comment -// This file was generated by tars2go 1.2.1 +// This file was generated by tars2go 1.2.2 // Generated from EndpointF.tars package endpointf diff --git a/tars/protocol/res/logf/Log.tars.go b/tars/protocol/res/logf/Log.tars.go index afdf5740..b22a2be1 100644 --- a/tars/protocol/res/logf/Log.tars.go +++ b/tars/protocol/res/logf/Log.tars.go @@ -1,5 +1,5 @@ // Package logf comment -// This file was generated by tars2go 1.2.1 +// This file was generated by tars2go 1.2.2 // Generated from LogF.tars package logf diff --git a/tars/protocol/res/logf/LogF.go b/tars/protocol/res/logf/LogF.go index 55bc2987..321ee04f 100644 --- a/tars/protocol/res/logf/LogF.go +++ b/tars/protocol/res/logf/LogF.go @@ -1,5 +1,5 @@ // Package logf comment -// This file was generated by tars2go 1.2.1 +// This file was generated by tars2go 1.2.2 // Generated from LogF.tars package logf diff --git a/tars/protocol/res/nodef/NodeF.go b/tars/protocol/res/nodef/NodeF.go index 5e1eddeb..197b2b12 100644 --- a/tars/protocol/res/nodef/NodeF.go +++ b/tars/protocol/res/nodef/NodeF.go @@ -1,5 +1,5 @@ // Package nodef comment -// This file was generated by tars2go 1.2.1 +// This file was generated by tars2go 1.2.2 // Generated from NodeF.tars package nodef diff --git a/tars/protocol/res/nodef/ServerF.tars.go b/tars/protocol/res/nodef/ServerF.tars.go index fbbb14d6..98b71d52 100644 --- a/tars/protocol/res/nodef/ServerF.tars.go +++ b/tars/protocol/res/nodef/ServerF.tars.go @@ -1,5 +1,5 @@ // Package nodef comment -// This file was generated by tars2go 1.2.1 +// This file was generated by tars2go 1.2.2 // Generated from NodeF.tars package nodef diff --git a/tars/protocol/res/notifyf/Notify.tars.go b/tars/protocol/res/notifyf/Notify.tars.go index 2a3ef65b..78c48fb0 100644 --- a/tars/protocol/res/notifyf/Notify.tars.go +++ b/tars/protocol/res/notifyf/Notify.tars.go @@ -1,5 +1,5 @@ // Package notifyf comment -// This file was generated by tars2go 1.2.1 +// This file was generated by tars2go 1.2.2 // Generated from NotifyF.tars package notifyf diff --git a/tars/protocol/res/notifyf/NotifyF.go b/tars/protocol/res/notifyf/NotifyF.go index 5e64df04..86a58336 100644 --- a/tars/protocol/res/notifyf/NotifyF.go +++ b/tars/protocol/res/notifyf/NotifyF.go @@ -1,5 +1,5 @@ // Package notifyf comment -// This file was generated by tars2go 1.2.1 +// This file was generated by tars2go 1.2.2 // Generated from NotifyF.tars package notifyf diff --git a/tars/protocol/res/propertyf/PropertyF.go b/tars/protocol/res/propertyf/PropertyF.go index 4c43ff67..76362f8f 100644 --- a/tars/protocol/res/propertyf/PropertyF.go +++ b/tars/protocol/res/propertyf/PropertyF.go @@ -1,5 +1,5 @@ // Package propertyf comment -// This file was generated by tars2go 1.2.1 +// This file was generated by tars2go 1.2.2 // Generated from PropertyF.tars package propertyf diff --git a/tars/protocol/res/propertyf/PropertyF.tars.go b/tars/protocol/res/propertyf/PropertyF.tars.go index 0c758305..b2d7b5eb 100644 --- a/tars/protocol/res/propertyf/PropertyF.tars.go +++ b/tars/protocol/res/propertyf/PropertyF.tars.go @@ -1,5 +1,5 @@ // Package propertyf comment -// This file was generated by tars2go 1.2.1 +// This file was generated by tars2go 1.2.2 // Generated from PropertyF.tars package propertyf diff --git a/tars/protocol/res/queryf/QueryF.tars.go b/tars/protocol/res/queryf/QueryF.tars.go index 37bb9721..a2659402 100644 --- a/tars/protocol/res/queryf/QueryF.tars.go +++ b/tars/protocol/res/queryf/QueryF.tars.go @@ -1,5 +1,5 @@ // Package queryf comment -// This file was generated by tars2go 1.2.1 +// This file was generated by tars2go 1.2.2 // Generated from QueryF.tars package queryf diff --git a/tars/protocol/res/requestf/RequestF.go b/tars/protocol/res/requestf/RequestF.go index ccce55d0..2a9b0788 100644 --- a/tars/protocol/res/requestf/RequestF.go +++ b/tars/protocol/res/requestf/RequestF.go @@ -1,5 +1,5 @@ // Package requestf comment -// This file was generated by tars2go 1.2.1 +// This file was generated by tars2go 1.2.2 // Generated from RequestF.tars package requestf diff --git a/tars/protocol/res/statf/StatF.go b/tars/protocol/res/statf/StatF.go index 00b446d4..470329dc 100644 --- a/tars/protocol/res/statf/StatF.go +++ b/tars/protocol/res/statf/StatF.go @@ -1,5 +1,5 @@ // Package statf comment -// This file was generated by tars2go 1.2.1 +// This file was generated by tars2go 1.2.2 // Generated from StatF.tars package statf diff --git a/tars/protocol/res/statf/StatF.tars.go b/tars/protocol/res/statf/StatF.tars.go index 6e53c12d..3e6423c2 100644 --- a/tars/protocol/res/statf/StatF.tars.go +++ b/tars/protocol/res/statf/StatF.tars.go @@ -1,5 +1,5 @@ // Package statf comment -// This file was generated by tars2go 1.2.1 +// This file was generated by tars2go 1.2.2 // Generated from StatF.tars package statf