470
|
1 from typing import List,Dict
|
471
|
2 import sys
|
|
3 import json
|
|
4 import re
|
469
|
5
|
|
6 def LoadSchema(file_path : str):
|
|
7 with open(file_path, 'r') as fp:
|
|
8 obj = json.load(fp)
|
|
9 return obj
|
|
10
|
|
11 class Type:
|
470
|
12 def __init__(self, canonicalTypeName:str, kind:str):
|
|
13 allowedTypeKinds = ["primitive","enum","struct","collection"]
|
469
|
14 """dependent type is the list of canonical types this type depends on.
|
|
15 For instance, vector<map<string,int32>> depends on map<string,int32>
|
|
16 that, in turn, depends on string and int32 that, in turn, depend on
|
|
17 nothing"""
|
|
18 self.canonicalTypeName = canonicalTypeName
|
470
|
19 assert(kind in allowedTypeKinds)
|
|
20
|
|
21 def setDependentTypes(self, dependentTypes:List[Type]) -> None:
|
469
|
22 self.dependentTypes = dependentTypes
|
470
|
23
|
|
24 def getDependentTypes(self) -> List[Type]:
|
469
|
25 return self.dependentTypes
|
470
|
26
|
|
27 def getCppTypeName(self) -> str:
|
469
|
28 # C++: prefix map vector and string with std::map, std::vector and std::string
|
|
29 # replace int32 by int32_t
|
|
30 # replace float32 by float
|
|
31 # replace float64 by double
|
|
32 retVal : str = self.canonicalTypeName.replace("map","std::map")
|
|
33 retVal : str = self.canonicalTypeName.replace("vector","std::vector")
|
|
34 retVal : str = self.canonicalTypeName.replace("int32","int32_t")
|
|
35 retVal : str = self.canonicalTypeName.replace("float32","float")
|
|
36 retVal : str = self.canonicalTypeName.replace("float64","double")
|
|
37 return retVal
|
470
|
38
|
|
39 def getTypeScriptTypeName(self) -> str:
|
469
|
40 # TS: replace vector with Array and map with Map
|
|
41 # string remains string
|
|
42 # replace int32 by number
|
|
43 # replace float32 by number
|
|
44 # replace float64 by number
|
|
45 retVal : str = self.canonicalTypeName.replace("map","Map")
|
|
46 retVal : str = self.canonicalTypeName.replace("vector","Array")
|
|
47 retVal : str = self.canonicalTypeName.replace("int32","number")
|
|
48 retVal : str = self.canonicalTypeName.replace("float32","number")
|
|
49 retVal : str = self.canonicalTypeName.replace("float64","number")
|
|
50 retVal : str = self.canonicalTypeName.replace("bool","boolean")
|
|
51 return retVal
|
|
52
|
|
53 class Schema:
|
|
54 def __init__(self, root_prefix : str, defined_types : List[Type]):
|
470
|
55 self.rootName : str = root_prefix
|
|
56 self.definedTypes : str = defined_types
|
|
57
|
|
58 def CheckTypeSchema(definedType : Dict) -> None:
|
|
59 allowedDefinedTypeKinds = ["enum","struct"]
|
|
60 if not definedType.has_key('name'):
|
|
61 raise Exception("type lacks the 'name' key")
|
|
62 name = definedType['name']
|
|
63 if not definedType.has_key('kind'):
|
|
64 raise Exception(f"type {name} lacks the 'kind' key")
|
|
65 kind = definedType['kind']
|
|
66 if not (kind in allowedDefinedTypeKinds):
|
|
67 raise Exception(f"type {name} : kind {kind} is not allowed. It must be one of {allowedDefinedTypeKinds}")
|
|
68
|
|
69 if not definedType.has_key('fields'):
|
|
70 raise Exception("type {name} lacks the 'fields' key")
|
|
71
|
|
72 # generic check on all kinds of types
|
|
73 fields = definedType['fields']
|
|
74 for field in fields:
|
|
75 fieldName = field['name']
|
|
76 if not field.has_key('name'):
|
|
77 raise Exception("field in type {name} lacks the 'name' key")
|
469
|
78
|
470
|
79 # fields in struct must have types
|
|
80 if kind == 'struct':
|
|
81 for field in fields:
|
|
82 fieldName = field['name']
|
|
83 if not field.has_key('type'):
|
|
84 raise Exception(f"field {fieldName} in type {name} lacks the 'type' key")
|
|
85
|
|
86 def CheckSchemaSchema(schema : Dict) -> None:
|
|
87 if not schema.has_key('root_name'):
|
|
88 raise Exception("schema lacks the 'root_name' key")
|
|
89 if not schema.has_key('types'):
|
|
90 raise Exception("schema lacks the 'types' key")
|
|
91 for definedType in schema['types']:
|
|
92 CheckTypeSchema(definedType)
|
|
93
|
|
94 def CreateAndCacheTypeObject(allTypes : Dict[str,Type], typeDict : Dict) -> None:
|
|
95 """This does not set the dependentTypes field"""
|
|
96 typeName : str = typeDict['name']
|
|
97 if allTypes.has_key(typeName):
|
471
|
98 raise Exception(f'Type {typeName} is defined more than once!')
|
470
|
99 else:
|
|
100 typeObject = Type(typeName, typeDict['kind'])
|
|
101 allTypes[typeName] = typeObject
|
468
|
102
|
|
103
|
471
|
104
|
|
105 def EatToken(sentence : str) -> (str,str):
|
|
106 """splits "A,B,C" into "A" and "B,C" where A, B and C are type names
|
|
107 (including templates) like "int32", "TotoTutu", or
|
|
108 "map<map<int32,vector<string>>,map<string,int32>>" """
|
|
109 token = []
|
|
110 if sentence.count('<') != sentence.count('>'):
|
|
111 raise Exception(f"Error in the partial template type list {sentence}. The number of < and > do not match!")
|
|
112
|
|
113 # the template level we're currently in
|
|
114 templateLevel = 0
|
|
115 for i in len(sentence):
|
|
116 if (sentence[i] == ",") and (templateLevel == 0):
|
|
117 return (sentence[0:i],sentence[i+1:])
|
|
118 elif (sentence[i] == "<"):
|
|
119 templateLevel += 1
|
|
120 elif (sentence[i] == ">"):
|
|
121 templateLevel -= 1
|
|
122 return (sentence,"")
|
|
123
|
|
124 def SplitListOfTypes(typeName : str) -> List[str]:
|
|
125 """Splits something like
|
|
126 vector<string>,int32,map<string,map<string,int32>>
|
|
127 in:
|
|
128 - vector<string>
|
|
129 - int32
|
|
130 map<string,map<string,int32>>
|
|
131
|
|
132 This is not possible with a regex so
|
|
133 """
|
|
134 stillStuffToEat : bool = True
|
|
135 tokenList = []
|
|
136 restOfString = typeName
|
|
137 while stillStuffToEat:
|
|
138 firstToken,restOfString = EatToken(restOfString)
|
|
139 tokenList.append(firstToken)
|
|
140 return tokenList
|
|
141
|
|
142 templateRegex = re.compile(r"([a-zA-Z0-9_]*[a-zA-Z0-9_]*)<([a-zA-Z0-9_,:<>]+)>")
|
|
143 def ParseTemplateType(typeName) -> (bool,str,List[str]):
|
470
|
144 """ If the type is a template like "SOMETHING<SOME<THING,EL<SE>>>", then
|
|
145 it returns (true,"SOMETHING","SOME<THING,EL<SE>>")
|
|
146 otherwise it returns (false,"","")"""
|
|
147
|
471
|
148 # let's remove all whitespace from the type
|
|
149 # split without argument uses any whitespace string as separator
|
|
150 # (space, tab, newline, return or formfeed)
|
|
151 typeName = "".join(typeName.split())
|
|
152 matches = templateRegex.match(typeName)
|
|
153 if matches == None:
|
|
154 return (False,"","")
|
|
155 else:
|
|
156 # we need to split with the commas that are outside of the defined types
|
|
157 # simply splitting at commas won't work
|
|
158 listOfDependentTypes = SplitListOfTypes(matches.group(2))
|
|
159 return (True,matches.group(1),listOfDependentTypes)
|
470
|
160
|
471
|
161 def GetPrimitiveType(typeName : str) -> Type:
|
|
162 if allTypes.has_key(typeName):
|
|
163 return allTypes[typeName]
|
|
164 else:
|
|
165 primitiveTypes = ['int32', 'float32', 'float64', 'string']
|
|
166 if not (typeName in primitiveTypes):
|
|
167 raise Exception(f"Type {typeName} is unknown.")
|
|
168 typeObject = Type(typeName,'primitive')
|
|
169 # there are no dependent types in a primitive type --> Type object
|
|
170 # constrution is finished at this point
|
|
171 allTypes[typeName] = typeObject
|
|
172 return typeObject
|
470
|
173
|
471
|
174 def ProcessTypeTree(
|
|
175 ancestors : List[str]
|
|
176 , generationQueue : List[str]
|
|
177 , structTypes : Dict[str,Dict], typeName : str) -> None:
|
|
178 if typeName in ancestors:
|
|
179 raise Exception(f"Cyclic dependency chain found: the last of {ancestors} depends on {typeName} that is already in the list.")
|
470
|
180
|
471
|
181 if not (typeName in generationQueue):
|
|
182 # if we reach this point, it means the type is NOT a struct or an enum.
|
|
183 # it is another (non directly user-defined) type that we must parse and create
|
|
184 # let's do it
|
470
|
185 dependentTypes = []
|
471
|
186 (isTemplate,templateType,parameters) = ParseTemplateType(typeName)
|
|
187 if isTemplate:
|
|
188 dependentTypeNames : List[str] = SplitListOfTypes(parameters)
|
|
189 for dependentTypeName in dependentTypeNames:
|
|
190 # childAncestors = ancestors.copy() NO TEMPLATE ANCESTOR!!!
|
|
191 # childAncestors.append(typeName)
|
|
192 ProcessTypeTree(ancestors, processedTypes, structTypes, dependentTypeName)
|
|
193 else:
|
|
194 if structTypes.has_key(typeName):
|
|
195 ProcesStructType_DepthFirstRecursive(generationQueue, structTypes,
|
|
196 structTypes[typeName])
|
470
|
197
|
471
|
198 def ProcesStructType_DepthFirstRecursive(
|
|
199 generationQueue : List[str], structTypes : Dict[str,Dict]
|
|
200 , typeDict : Dict) -> None:
|
|
201 # let's generate the code according to the
|
|
202 typeName : str = typeDict['name']
|
|
203 if typeDict['kind'] != 'struct':
|
|
204 raise Exception(f"Unexpected kind '{typeDict['kind']}' for " +
|
|
205 "type '{typeName}'")
|
|
206 typeFields : List[Dict] = typeDict['fields']
|
|
207 for typeField in typeFields:
|
|
208 ancestors = [typeName]
|
|
209 ProcessTypeTree(ancestors, generationQueue
|
|
210 , structTypes, typeField['name'])
|
|
211 # now we're pretty sure our dependencies have been processed,
|
|
212 # we can start marking our code for generation
|
|
213 generationQueue.append(typeName)
|
470
|
214
|
|
215 def ProcessSchema(schema : dict) -> None:
|
|
216 CheckSchemaSchema(schema)
|
|
217 rootName : str = schema['root_name']
|
|
218 definedTypes : list = schema['types']
|
|
219
|
471
|
220 # mark already processed types
|
|
221 processedTypes : set[str] = set()
|
470
|
222
|
471
|
223 # the struct names are mapped to their JSON dictionary
|
|
224 structTypes : Dict[str,Dict] = {}
|
470
|
225
|
471
|
226 # the order here is the generation order
|
470
|
227 for definedType in definedTypes:
|
471
|
228 if definedType['kind'] == 'enum':
|
|
229 ProcessEnumerationType(processedTypes, definedType);
|
470
|
230
|
471
|
231 # the order here is NOT the generation order: the types
|
|
232 # will be processed according to their dependency graph
|
|
233 for definedType in definedTypes:
|
|
234 if definedType['kind'] == 'struct':
|
|
235 structTypes[definedType['name']] = definedType
|
|
236 ProcesStructType_DepthFirstRecursive(processedTypes,structTypes,definedType)
|
468
|
237
|
|
238 if __name__ == '__main__':
|
|
239 import argparse
|
|
240 parser = argparse.ArgumentParser(usage = """stonegentool.py [-h] [-o OUT_DIR] [-v] input_schemas
|
|
241 EXAMPLE: python command_gen.py -o "generated_files/" "mainSchema.json,App Specific Commands.json" """)
|
470
|
242 parser.add_argument("input_schema", type=str,
|
|
243 help = "path to the schema file")
|
468
|
244 parser.add_argument("-o", "--out_dir", type=str, default=".",
|
|
245 help = """path of the directory where the files
|
|
246 will be generated. Default is current
|
|
247 working folder""")
|
|
248 parser.add_argument("-v", "--verbosity", action="count", default=0,
|
|
249 help = """increase output verbosity (0 == errors
|
|
250 only, 1 == some verbosity, 2 == nerd
|
|
251 mode""")
|
|
252
|
|
253 args = parser.parse_args()
|
470
|
254 inputSchemaFilename = args.input_schema
|
469
|
255 outDir = args.out_dir
|
|
256
|
470
|
257 print("input schema = " + str(inputSchemaFilename))
|
469
|
258 print("out dir = " + str(outDir))
|
|
259
|
470
|
260 ProcessSchema(LoadSchema(inputSchemaFilename))
|
|
261
|
468
|
262
|
469
|
263 ###################
|
|
264 ## ATTIC ##
|
|
265 ###################
|
|
266
|
|
267 # this works
|
468
|
268
|
469
|
269 if False:
|
|
270 obj = json.loads("""{
|
|
271 "firstName": "Alice",
|
|
272 "lastName": "Hall",
|
|
273 "age": 35
|
|
274 }""")
|
|
275 print(obj)
|