Mercurial > hg > orthanc-tests
comparison PerfsDb/run.py @ 156:f1a75985caa8
first Db test framework - work in progress
author | am@osimis.io |
---|---|
date | Thu, 16 Aug 2018 17:13:32 +0200 |
parents | |
children |
comparison
equal
deleted
inserted
replaced
155:e0996602b306 | 156:f1a75985caa8 |
---|---|
1 import argparse | |
2 from ConfigFileBuilder import ConfigFileBuilder | |
3 from TestConfig import TestConfig | |
4 from DbServer import DbServer | |
5 from DbType import DbType | |
6 from DbSize import DbSize | |
7 | |
8 testConfigs = { | |
9 "mysql-small" : TestConfig(label= "mysql-small", dbSize=DbSize.Small, dbServer=DbServer(dbType=DbType.MySQL, port=2000)), | |
10 "mysql-large" : TestConfig(label= "mysql-large", dbSize=DbSize.Large, dbServer=DbServer(dbType=DbType.MySQL, port=2001)), | |
11 "sqlite-small": TestConfig(label= "sqlite-small", dbSize=DbSize.Small, dbType=DbType.Sqlite), | |
12 "pg9-small": TestConfig(label= "pg9-small", dbSize=DbSize.Small, dbServer=DbServer(dbType=DbType.PG9, port=2002)), | |
13 } | |
14 | |
15 selectedTestConfigs = [] | |
16 | |
17 parser = argparse.ArgumentParser(description = "Initializes/Runs/Clears PerfsDb setup.") | |
18 | |
19 # create a cli option for each config | |
20 for testConfigName in testConfigs.keys(): | |
21 parser.add_argument("--" + testConfigName, action = "store_true") | |
22 | |
23 parser.add_argument("--init", help = "initializes DBs", action = "store_true") | |
24 parser.add_argument("--run", help = "runs tests", action = "store_true") | |
25 parser.add_argument("--clear", help = "clear DBs", action = "store_true") | |
26 | |
27 parser.add_argument("--orthanc-path", help = "path to the folder containing Orthanc executable", default=".") | |
28 parser.add_argument("--plugins-path", help = "path to the folder containing Orthanc executable", default=".") | |
29 parser.add_argument("--repeat", help = "number of times to repeat each test to average timings", type=int, default=50) | |
30 | |
31 args = parser.parse_args() | |
32 | |
33 for testConfigName in testConfigs.keys(): | |
34 if args.__dict__[testConfigName.replace("-", "_")]: | |
35 selectedTestConfigs.append(testConfigName) | |
36 | |
37 # if no test config specified, take them all | |
38 if len(selectedTestConfigs) == 0: | |
39 selectedTestConfigs = testConfigs.keys() | |
40 | |
41 # if no action specified, it means only run | |
42 if not (args.init | args.run | args.clear): | |
43 args.init = False | |
44 args.run = True | |
45 args.clear = False | |
46 | |
47 print("***** Orthanc *******") | |
48 print("path :", args.orthanc_path) | |
49 | |
50 | |
51 # libOrthancMySQLIndex.so | |
52 # libOrthancMySQLStorage.so | |
53 # libOrthancPostgreSQLIndex.so | |
54 # libOrthancPostgreSQLStorage.so | |
55 # libOrthancMSSQLIndex.so | |
56 | |
57 results = {} | |
58 | |
59 for configName in selectedTestConfigs: | |
60 testConfig = testConfigs[configName] | |
61 testConfig.setName(configName) | |
62 testConfig.setRepeatCount(args.repeat) | |
63 | |
64 print("======= " + configName + " ========") | |
65 | |
66 if args.clear: | |
67 print("** Clearing Db") | |
68 testConfig.clearDb() | |
69 | |
70 if args.init or args.run: | |
71 print("** Generating config files") | |
72 testConfig.generateOrthancConfigurationFile(args.plugins_path) | |
73 | |
74 print("** Launching DbServer") | |
75 testConfig.launchDbServer() | |
76 | |
77 print("** Launching Orthanc") | |
78 testConfig.launchOrthanc(args.orthanc_path) | |
79 | |
80 if args.init: | |
81 testConfig.initializeDb() | |
82 | |
83 if args.run: | |
84 print("** Runnnig tests") | |
85 results[configName] = testConfig.runTests() | |
86 print("** Stoping Orthanc") | |
87 testConfig.stopOrthanc() | |
88 | |
89 print("++++++++++++++ results summary +++++++++++++++") | |
90 testNames = set() | |
91 resultsByTestName = {} | |
92 for configName, configResult in results.items(): | |
93 for result in configResult: | |
94 testNames.add(result.name) | |
95 if not result.name in resultsByTestName: | |
96 resultsByTestName[result.name] = {} | |
97 resultsByTestName[result.name][configName] = result | |
98 | |
99 headerLine = "{empty:<40}|".format(empty="") | |
100 for configName in selectedTestConfigs: | |
101 headerLine += "{configName:^15}|".format(configName=configName) | |
102 | |
103 print(headerLine) | |
104 | |
105 for testName in sorted(testNames): | |
106 resultLine = "{name:<40}|".format(name=testName) | |
107 for configName in selectedTestConfigs: | |
108 resultLine += "{avg:>11.2f} ms |".format(avg = resultsByTestName[testName][configName].averageTimeInMs) | |
109 print(resultLine) | |
110 | |
111 print("** Done") |