comparison Resources/CodeGeneration/stonegentool.py @ 489:f6b7f113cf27 bgo-commands-codegen

Ongoing work on code generation
author bgo-osimis
date Mon, 18 Feb 2019 07:46:59 +0100
parents 8e40355a172b
children 6470248790db
comparison
equal deleted inserted replaced
486:8e40355a172b 489:f6b7f113cf27
1 import json 1 import json
2 import re 2 import re
3 import sys 3 import sys
4 from typing import ( 4 from typing import (
5 Any, 5 Any,
6 Dict, 6 Dict,
7 Generator, 7 Generator,
8 Iterable, 8 Iterable,
9 Iterator, 9 Iterator,
10 List, 10 List,
11 Match, 11 Match,
12 Optional, 12 Optional,
13 Tuple, 13 Tuple,
14 Union, 14 Union,
15 cast, 15 cast,
16 ) 16 )
17 from io import StringIO 17 from io import StringIO
18 import time 18 import time
19 19
20 """ 20 """
21 1 2 3 4 5 6 7 21 1 2 3 4 5 6 7
22 12345678901234567890123456789012345678901234567890123456789012345678901234567890 22 12345678901234567890123456789012345678901234567890123456789012345678901234567890
23 """ 23 """
24 24
25
25 class GeneratedCode: 26 class GeneratedCode:
26 def __init__(self): 27 def __init__(self):
27 self.cppPreamble = StringIO() # file-wide preamble (#include directives, comment...) 28
28 self.cppEnums = StringIO() 29 # file-wide preamble (#include directives, comment...)
29 self.cppStructs = StringIO() 30 self.cppPreamble = StringIO()
30 self.cppDispatcher = StringIO() 31
31 self.cppHandler = StringIO() 32 self.cppEnums = StringIO()
32 33 self.cppStructs = StringIO()
33 self.tsPreamble = StringIO() # file-wide preamble (module directives, comment...) 34 self.cppDispatcher = StringIO()
34 self.tsEnums = StringIO() 35 self.cppHandler = StringIO()
35 self.tsStructs = StringIO() 36
36 self.tsDispatcher = StringIO() 37 # file-wide preamble (module directives, comment...)
37 self.tsHandler = StringIO() 38 self.tsPreamble = StringIO()
38 39
39 def FlattenToFiles(self,outputDir: str): 40 self.tsEnums = StringIO()
40 raise NotImplementedError() 41 self.tsStructs = StringIO()
42 self.tsDispatcher = StringIO()
43 self.tsHandler = StringIO()
44
45 def FlattenToFiles(self, outputDir: str):
46 raise NotImplementedError()
47
48
49 raise Exception("""
50 $$$$TODO check field names are unique
51 """)
41 52
42 class JsonHelpers: 53 class JsonHelpers:
43 """A set of utilities to perform JSON operations""" 54 """A set of utilities to perform JSON operations"""
44 55
45 @staticmethod 56 @staticmethod
102 retVal = retVal.replace("vector", "std::vector") 113 retVal = retVal.replace("vector", "std::vector")
103 retVal = retVal.replace("int32", "int32_t") 114 retVal = retVal.replace("int32", "int32_t")
104 retVal = retVal.replace("float32", "float") 115 retVal = retVal.replace("float32", "float")
105 retVal = retVal.replace("float64", "double") 116 retVal = retVal.replace("float64", "double")
106 return retVal 117 return retVal
118
107 119
108 def GetTypeScriptTypeNameFromCanonical(canonicalTypeName: str) -> str: 120 def GetTypeScriptTypeNameFromCanonical(canonicalTypeName: str) -> str:
109 # TS: replace vector with Array and map with Map 121 # TS: replace vector with Array and map with Map
110 # string remains string 122 # string remains string
111 # replace int32 by number 123 # replace int32 by number
166 raise Exception("schema lacks the 'root_name' key") 178 raise Exception("schema lacks the 'root_name' key")
167 if not "types" in schema: 179 if not "types" in schema:
168 raise Exception("schema lacks the 'types' key") 180 raise Exception("schema lacks the 'types' key")
169 for definedType in schema["types"]: 181 for definedType in schema["types"]:
170 CheckTypeSchema(definedType) 182 CheckTypeSchema(definedType)
171
172
173 # def CreateAndCacheTypeObject(allTypes : Dict[str,Type], typeDict : Dict) -> None:
174 # """This does not set the dependentTypes field"""
175 # typeName : str = typeDict['name']
176 # if typeName in allTypes:
177 # raise Exception(f'Type {typeName} is defined more than once!')
178 # else:
179 # typeObject = Type(typeName, typeDict['kind'])
180 # allTypes[typeName] = typeObject
181 183
182 184
183 def EatToken(sentence: str) -> Tuple[str, str]: 185 def EatToken(sentence: str) -> Tuple[str, str]:
184 """splits "A,B,C" into "A" and "B,C" where A, B and C are type names 186 """splits "A,B,C" into "A" and "B,C" where A, B and C are type names
185 (including templates) like "int32", "TotoTutu", or 187 (including templates) like "int32", "TotoTutu", or
239 matches = templateRegex.match(typeName) 241 matches = templateRegex.match(typeName)
240 if matches == None: 242 if matches == None:
241 return (False, "", []) 243 return (False, "", [])
242 else: 244 else:
243 m = cast(Match[str], matches) 245 m = cast(Match[str], matches)
244 assert(len(m.groups()) == 2) 246 assert len(m.groups()) == 2
245 # we need to split with the commas that are outside of the defined types 247 # we need to split with the commas that are outside of the
246 # simply splitting at commas won't work 248 # defined types. Simply splitting at commas won't work
247 listOfDependentTypes = SplitListOfTypes(m.group(2)) 249 listOfDependentTypes = SplitListOfTypes(m.group(2))
248 return (True, m.group(1), listOfDependentTypes) 250 return (True, m.group(1), listOfDependentTypes)
249 251
250 252
251 # def GetPrimitiveType(typeName : str) -> Type: 253 # def GetPrimitiveType(typeName : str) -> Type:
266 ancestors: List[str], 268 ancestors: List[str],
267 genOrderQueue: List[str], 269 genOrderQueue: List[str],
268 structTypes: Dict[str, Dict], 270 structTypes: Dict[str, Dict],
269 typeName: str, 271 typeName: str,
270 ) -> None: 272 ) -> None:
271 if typeName in ancestors: 273 if typeName in ancestors:
272 raise Exception( 274 raise Exception(
273 f"Cyclic dependency chain found: the last of {ancestors} " 275 f"Cyclic dependency chain found: the last of {ancestors} "
274 + f"depends on {typeName} that is already in the list." 276 + f"depends on {typeName} that is already in the list."
277 )
278
279 if not (typeName in genOrderQueue):
280 # if we reach this point, it means the type is NOT a struct or an enum.
281 # it is another (non directly user-defined) type that we must parse and
282 # create. Let's do it!
283 (isTemplate, _, dependentTypeNames) = ParseTemplateType(typeName)
284 if isTemplate:
285 for dependentTypeName in dependentTypeNames:
286 # childAncestors = ancestors.copy() NO TEMPLATE ANCESTOR!!!
287 # childAncestors.append(typeName)
288 ProcessTypeTree(
289 ancestors, genOrderQueue, structTypes, dependentTypeName
275 ) 290 )
276 291 else:
277 if not (typeName in genOrderQueue): 292 if typeName in structTypes:
278 # if we reach this point, it means the type is NOT a struct or an enum. 293 ProcessStructType_DepthFirstRecursive(
279 # it is another (non directly user-defined) type that we must parse and 294 genOrderQueue, structTypes, structTypes[typeName]
280 # create. Let's do it!
281 (isTemplate, _, dependentTypeNames) = ParseTemplateType(typeName)
282 if isTemplate:
283 for dependentTypeName in dependentTypeNames:
284 # childAncestors = ancestors.copy() NO TEMPLATE ANCESTOR!!!
285 # childAncestors.append(typeName)
286 ProcessTypeTree(
287 ancestors, genOrderQueue, structTypes, dependentTypeName
288 )
289 else:
290 if typeName in structTypes:
291 ProcessStructType_DepthFirstRecursive(
292 genOrderQueue, structTypes, structTypes[typeName]
293 )
294
295 def ProcessStructType_DepthFirstRecursive(
296 genOrderQueue: List[str], structTypes: Dict[str, Dict], typeDict: Dict) -> None:
297 # let's generate the code according to the
298 typeName: str = typeDict["name"]
299 if typeDict["kind"] != "struct":
300 raise Exception(
301 f"Unexpected kind '{typeDict['kind']}' for " + "type '{typeName}'"
302 ) 295 )
303 typeFields: List[Dict] = typeDict["fields"] 296
304 for typeField in typeFields: 297 def ProcessStructType_DepthFirstRecursive(genOrderQueue: List[str], \
305 ancestors = [typeName] 298 structTypes: Dict[str, Dict], typeDict: Dict) -> None:
306 ProcessTypeTree(ancestors, genOrderQueue, structTypes, typeField["type"]) 299 # let's generate the code according to the
307 # now we're pretty sure our dependencies have been processed, 300 typeName: str = typeDict["name"]
308 # we can start marking our code for generation (it might already have 301 if typeDict["kind"] != "struct":
309 # been done if someone referenced us earlier) 302 raise Exception(
310 if not typeName in genOrderQueue: 303 f"Unexpected kind '{typeDict['kind']}' for " + "type '{typeName}'"
311 genOrderQueue.append(typeName) 304 )
312 305 typeFields: List[Dict] = typeDict["fields"]
313 def ProcessEnumerationType( 306 for typeField in typeFields:
314 outputStreams: GeneratedCode, typeDict: Dict) -> None: 307 ancestors = [typeName]
315 tsText : StringIO = StringIO() 308 ProcessTypeTree(ancestors, genOrderQueue, structTypes, typeField["type"])
316 cppText : StringIO = StringIO() 309 # now we're pretty sure our dependencies have been processed,
310 # we can start marking our code for generation (it might already have
311 # been done if someone referenced us earlier)
312 if not typeName in genOrderQueue:
313 genOrderQueue.append(typeName)
314
315 def ProcessEnumerationType(outputStreams: GeneratedCode, typeDict: Dict) -> None:
316 tsText: StringIO = StringIO()
317 cppText: StringIO = StringIO()
318
319 tsText.write("enum %s\n" % typeDict["name"])
320 tsText.write("{\n")
321
322 cppText.write("enum %s\n" % typeDict["name"])
323 cppText.write("{\n")
324
325 for i in range(len(typeDict["fields"])):
326 field = typeDict["fields"][i]
327 name = field["name"]
328
329 tsText.write(" %s" % name)
330 if i < len(typeDict["fields"]) - 1:
331 tsText.write(",")
332 tsText.write("\n")
333
334 cppText.write(" %s" % name)
335 if i < len(typeDict["fields"]) - 1:
336 cppText.write(",")
337 cppText.write("\n")
338
339 tsText.write("};\n\n")
340 cppText.write("};\n\n")
341
342 outputStreams.tsEnums.write(tsText.getvalue())
343 outputStreams.cppEnums.write(cppText.getvalue())
344
345 def GetSerializationCode(typeName: str,valueName: str, tempName: str)
346 if IsPrimitiveType(typeName):
347 """
348 json::Value val(objectTypeInt...)
349 val.setValue(valueName) <--- val
350 """
351 elif IsArray(typeName)
352 """
353 {
354 json::Value val(objectTypeArray...)
355 for(size_t i = 0; i < {fieldName}.size(); ++i)
356 {
357 json::Value val(objectTypeArray...)
358 }
359 val.setValue(valueName)
360 // <--- the calling code will insert collection/field writing here,
361 // like "parent.set("{fieldName}",val) or parent.append(val)
362 $collectValue
363 }
364 """
365
366
367
368
369 def ProcessStructType(outputStreams: GeneratedCode, typeDict) -> None:
370 tsText: StringIO = StringIO()
371 cppText: StringIO = StringIO()
372
373 tsText.write("class %s\n" % typeDict["name"])
374 tsText.write("{\n")
375
376 cppText.write("struct %s\n" % typeDict["name"])
377 cppText.write("{\n")
378
379 """
317 380
318 tsText.write("enum %s\n" % typeDict['name']) 381 GenerateSerializationCode(typeName,valueName)
319 tsText.write("{\n") 382
320 383 primitives:
321 cppText.write("enum %s\n" % typeDict['name']) 384 -----------
322 cppText.write("{\n") 385 int
323 386 jsonValue val(objectInt);
324 for i in range(len(typeDict['fields'])): 387 val.setValue("$name")
325 field = typeDict['fields'][i] 388 parent.add(("$name",$name)
326 name = field['name'] 389 double
327 390 ...
328 tsText.write(" %s" % name) 391 string
329 if i < len(typeDict['fields'])-1: 392 ...
330 tsText.write(",") 393
331 tsText.write("\n") 394 collections:
332 395 -----------
333 cppText.write(" %s" % name) 396 dict { }
334 if i < len(typeDict['fields'])-1: 397
335 cppText.write(",") 398
336 cppText.write("\n") 399
337 400
401
402 serializeValue()
403 """
404
405 for i in range(len(typeDict["fields"])):
406 field = typeDict["fields"][i]
407 name = field["name"]
408 tsType = GetTypeScriptTypeNameFromCanonical(field["type"])
409 tsText.write(" public %s %s;\n" % (tsType, name))
410 cppType = GetCppTypeNameFromCanonical(field["type"])
411 cppText.write(" %s %s;\n" % (cppType, name))
412
338 tsText.write("};\n\n") 413 tsText.write("};\n\n")
339 cppText.write("};\n\n") 414 cppText.write("};\n\n")
340 415
341 outputStreams.tsEnums.write(tsText.getvalue())
342 outputStreams.cppEnums.write(cppText.getvalue())
343
344 def ProcessStructType(
345 outputStreams: GeneratedCode, typeDict) -> None:
346 tsText : StringIO = StringIO()
347 cppText : StringIO = StringIO()
348
349 tsText.write("class %s\n" % typeDict['name'])
350 tsText.write("{\n")
351
352 cppText.write("struct %s\n" % typeDict['name'])
353 cppText.write("{\n")
354
355 for i in range(len(typeDict['fields'])):
356 field = typeDict['fields'][i]
357 name = field['name']
358 tsType = GetTypeScriptTypeNameFromCanonical(field['type'])
359 tsText.write(" public %s %s;\n" % (tsType, name))
360 cppType = GetCppTypeNameFromCanonical(field['type'])
361 cppText.write(" %s %s;\n" % (cppType, name))
362
363 tsText.write("};\n\n")
364 cppText.write("};\n\n")
365
366 outputStreams.tsStructs.write(tsText.getvalue()) 416 outputStreams.tsStructs.write(tsText.getvalue())
367 outputStreams.cppStructs.write(cppText.getvalue()) 417 outputStreams.cppStructs.write(cppText.getvalue())
368 418
419
369 def WritePreambles(rootName: str, outputStreams: GeneratedCode) -> None: 420 def WritePreambles(rootName: str, outputStreams: GeneratedCode) -> None:
370 outputStreams.cppPreamble.write("""// autogenerated by stonegentool on %s for module %s 421 outputStreams.cppPreamble.write(
422 """// autogenerated by stonegentool on %s for module %s
371 #include <cstdint> 423 #include <cstdint>
372 #include <string> 424 #include <string>
373 #include <vector> 425 #include <vector>
374 #include <map> 426 #include <map>
375 427
376 """ % (time.ctime(),rootName)) 428 """ % (time.ctime(), rootName))
377 429
378 outputStreams.tsPreamble.write("""// autogenerated by stonegentool on %s for module %s 430 outputStreams.tsPreamble.write(
379 """ % (time.ctime(),rootName)) 431 """// autogenerated by stonegentool on %s for module %s
432 """ % (time.ctime(), rootName))
433
380 434
381 def ProcessSchema(schema: dict) -> Tuple[str, GeneratedCode, List[str]]: 435 def ProcessSchema(schema: dict) -> Tuple[str, GeneratedCode, List[str]]:
382 CheckSchemaSchema(schema) 436 CheckSchemaSchema(schema)
383 rootName: str = schema["root_name"] 437 rootName: str = schema["root_name"]
384 definedTypes: list = schema["types"] 438 definedTypes: list = schema["types"]
385 439
386 # this will be filled with the generation queue. That is, the type 440 # this will be filled with the generation queue. That is, the type
387 # names in the order where they must be defined. 441 # names in the order where they must be defined.
388 genOrderQueue: List = [] 442 genOrderQueue: List = []
389 443
390 # the struct names are mapped to their JSON dictionary 444 # the struct names are mapped to their JSON dictionary
391 structTypes: Dict[str, Dict] = {} 445 structTypes: Dict[str, Dict] = {}
392 446
393 outputStreams : GeneratedCode = GeneratedCode() 447 outputStreams: GeneratedCode = GeneratedCode()
394 448
395 WritePreambles(rootName, outputStreams) 449 WritePreambles(rootName, outputStreams)
396 450
397 # the order here is the generation order 451 # the order here is the generation order
398 for definedType in definedTypes: 452 for definedType in definedTypes:
399 if definedType["kind"] == "enum": 453 if definedType["kind"] == "enum":
400 ProcessEnumerationType(outputStreams, definedType) 454 ProcessEnumerationType(outputStreams, definedType)
401 455
402 for definedType in definedTypes: 456 for definedType in definedTypes:
403 if definedType["kind"] == "struct": 457 if definedType["kind"] == "struct":
404 structTypes[definedType["name"]] = definedType 458 structTypes[definedType["name"]] = definedType
405 459
406 # the order here is NOT the generation order: the types 460 # the order here is NOT the generation order: the types
407 # will be processed according to their dependency graph 461 # will be processed according to their dependency graph
408 for definedType in definedTypes: 462 for definedType in definedTypes:
409 if definedType["kind"] == "struct": 463 if definedType["kind"] == "struct":
410 ProcessStructType_DepthFirstRecursive( 464 ProcessStructType_DepthFirstRecursive(
411 genOrderQueue, structTypes, definedType 465 genOrderQueue, structTypes, definedType
412 ) 466 )
413 467
414 for i in range(len(genOrderQueue)): 468 for i in range(len(genOrderQueue)):
415 typeName = genOrderQueue[i] 469 typeName = genOrderQueue[i]
416 typeDict = structTypes[typeName] 470 typeDict = structTypes[typeName]
417 ProcessStructType(outputStreams, typeDict) 471 ProcessStructType(outputStreams, typeDict)
418 472
419 return (rootName, outputStreams, genOrderQueue) 473 return (rootName, outputStreams, genOrderQueue)
420
421 474
422 475
423 def WriteStreamsToFiles(rootName: str, outputStreams: Dict[str, StringIO]) -> None: 476 def WriteStreamsToFiles(rootName: str, outputStreams: Dict[str, StringIO]) -> None:
424 pass 477 pass
425 478
426 if __name__ == "__main__": 479 if __name__ == "__main__":
427 import argparse 480 import argparse
428 481
429 parser = argparse.ArgumentParser( 482 parser = argparse.ArgumentParser(
430 usage="""stonegentool.py [-h] [-o OUT_DIR] [-v] input_schemas 483 usage="""stonegentool.py [-h] [-o OUT_DIR] [-v] input_schemas
431 EXAMPLE: python command_gen.py -o "generated_files/" """ 484 EXAMPLE: python command_gen.py -o "generated_files/" """
432 + """ "mainSchema.json,App Specific Commands.json" """ 485 + """ "mainSchema.json,App Specific Commands.json" """
433 ) 486 )
434 parser.add_argument("input_schema", type=str, help="path to the schema file") 487 parser.add_argument("input_schema", type=str, help="path to the schema file")
435 parser.add_argument( 488 parser.add_argument(
436 "-o", 489 "-o",
437 "--out_dir", 490 "--out_dir",
438 type=str, 491 type=str,
439 default=".", 492 default=".",
440 help="""path of the directory where the files 493 help="""path of the directory where the files
441 will be generated. Default is current 494 will be generated. Default is current
442 working folder""", 495 working folder""",
443 ) 496 )
444 parser.add_argument( 497 parser.add_argument(
445 "-v", 498 "-v",
446 "--verbosity", 499 "--verbosity",
447 action="count", 500 action="count",
448 default=0, 501 default=0,
449 help="""increase output verbosity (0 == errors 502 help="""increase output verbosity (0 == errors
450 only, 1 == some verbosity, 2 == nerd 503 only, 1 == some verbosity, 2 == nerd
451 mode""", 504 mode""",
452 ) 505 )
453 506
454 args = parser.parse_args() 507 args = parser.parse_args()
455 inputSchemaFilename = args.input_schema 508 inputSchemaFilename = args.input_schema
456 outDir = args.out_dir 509 outDir = args.out_dir