comparison NewTests/Concurrency/test_concurrency.py @ 620:8ba9b20ae95f debug-pg-transactions

debug pg transactions tests
author Alain Mazy <am@osimis.io>
date Mon, 05 Feb 2024 22:32:39 +0100
parents 6ba2ff41ea52
children
comparison
equal deleted inserted replaced
619:79812e0df162 620:8ba9b20ae95f
100 "Username": "postgres", 100 "Username": "postgres",
101 "Password": "postgres", 101 "Password": "postgres",
102 "IndexConnectionsCount": 10, 102 "IndexConnectionsCount": 10,
103 "MaximumConnectionRetries" : 2000, 103 "MaximumConnectionRetries" : 2000,
104 "ConnectionRetryInterval" : 5, 104 "ConnectionRetryInterval" : 5,
105 "TransactionMode": "ReadCommitted", 105 "TransactionMode": "READ COMMITTED"
106 #"TransactionMode": "Serializable", 106 #"TransactionMode": "Serializable",
107 "EnableVerboseLogs": True 107 # "EnableVerboseLogs": True
108 }, 108 },
109 "AuthenticationEnabled": False, 109 "AuthenticationEnabled": False,
110 "OverwriteInstances": True, 110 "OverwriteInstances": True,
111 "JobsEngineThreadsCount" : { 111 "JobsEngineThreadsCount" : {
112 "ResourceModification": 8 112 "ResourceModification": 8
147 cls.o = OrthancApiClient(cls.o._root_url) 147 cls.o = OrthancApiClient(cls.o._root_url)
148 cls.o.wait_started() 148 cls.o.wait_started()
149 cls.o.delete_all_content() 149 cls.o.delete_all_content()
150 150
151 def check_is_empty(self): 151 def check_is_empty(self):
152 print("checking is empty")
153
152 self.assertEqual(0, len(self.o.studies.get_all_ids())) 154 self.assertEqual(0, len(self.o.studies.get_all_ids()))
153 self.assertEqual(0, len(self.o.series.get_all_ids())) 155 self.assertEqual(0, len(self.o.series.get_all_ids()))
154 self.assertEqual(0, len(self.o.instances.get_all_ids())) 156 self.assertEqual(0, len(self.o.instances.get_all_ids()))
155 157
158 print("checking is empty (2)")
156 stats = self.o.get_json("/statistics") 159 stats = self.o.get_json("/statistics")
160 print("checking is empty (3)")
157 self.assertEqual(0, stats.get("CountPatients")) 161 self.assertEqual(0, stats.get("CountPatients"))
158 self.assertEqual(0, stats.get("CountStudies")) 162 self.assertEqual(0, stats.get("CountStudies"))
159 self.assertEqual(0, stats.get("CountSeries")) 163 self.assertEqual(0, stats.get("CountSeries"))
160 self.assertEqual(0, stats.get("CountInstances")) 164 self.assertEqual(0, stats.get("CountInstances"))
161 self.assertEqual(0, int(stats.get("TotalDiskSize"))) 165 self.assertEqual(0, int(stats.get("TotalDiskSize")))
176 t.start() 180 t.start()
177 181
178 for t in workers: 182 for t in workers:
179 t.join() 183 t.join()
180 184
181 def test_concurrent_uploads_same_study(self): 185 # def test_concurrent_uploads_same_study(self):
182 self.o.delete_all_content() 186 # self.o.delete_all_content()
183 self.clear_storage(storage_name=self._storage_name) 187 # self.clear_storage(storage_name=self._storage_name)
184 188
185 start_time = time.time() 189 # start_time = time.time()
186 workers_count = 20 190 # workers_count = 20
187 repeat_count = 1 191 # repeat_count = 1
188 192
189 # massively reupload the same study multiple times with OverwriteInstances set to true 193 # # massively reupload the same study multiple times with OverwriteInstances set to true
190 # Make sure the studies, series and instances are created only once 194 # # Make sure the studies, series and instances are created only once
191 self.execute_workers( 195 # self.execute_workers(
192 worker_func=worker_upload_folder, 196 # worker_func=worker_upload_folder,
193 worker_args=(self.o._root_url, here / "../../Database/Knee", repeat_count,), 197 # worker_args=(self.o._root_url, here / "../../Database/Knee", repeat_count,),
194 workers_count=workers_count) 198 # workers_count=workers_count)
195 199
196 elapsed = time.time() - start_time 200 # elapsed = time.time() - start_time
197 print(f"TIMING test_concurrent_uploads_same_study with {workers_count} workers and {repeat_count}x repeat: {elapsed:.3f} s") 201 # print(f"TIMING test_concurrent_uploads_same_study with {workers_count} workers and {repeat_count}x repeat: {elapsed:.3f} s")
198 202
199 self.assertTrue(self.o.is_alive()) 203 # self.assertTrue(self.o.is_alive())
200 204
201 self.assertEqual(1, len(self.o.studies.get_all_ids())) 205 # self.assertEqual(1, len(self.o.studies.get_all_ids()))
202 self.assertEqual(2, len(self.o.series.get_all_ids())) 206 # self.assertEqual(2, len(self.o.series.get_all_ids()))
203 self.assertEqual(50, len(self.o.instances.get_all_ids())) 207 # self.assertEqual(50, len(self.o.instances.get_all_ids()))
204 208
205 stats = self.o.get_json("/statistics") 209 # stats = self.o.get_json("/statistics")
206 self.assertEqual(1, stats.get("CountPatients")) 210 # self.assertEqual(1, stats.get("CountPatients"))
207 self.assertEqual(1, stats.get("CountStudies")) 211 # self.assertEqual(1, stats.get("CountStudies"))
208 self.assertEqual(2, stats.get("CountSeries")) 212 # self.assertEqual(2, stats.get("CountSeries"))
209 self.assertEqual(50, stats.get("CountInstances")) 213 # self.assertEqual(50, stats.get("CountInstances"))
210 self.assertEqual(4118738, int(stats.get("TotalDiskSize"))) 214 # self.assertEqual(4118738, int(stats.get("TotalDiskSize")))
211 215
212 self.o.instances.delete(orthanc_ids=self.o.instances.get_all_ids()) 216 # self.o.instances.delete(orthanc_ids=self.o.instances.get_all_ids())
213 217
214 self.check_is_empty() 218 # self.check_is_empty()
215 219
216 def test_concurrent_anonymize_same_study(self): 220 def test_concurrent_anonymize_same_study(self):
217 self.o.delete_all_content() 221 self.o.delete_all_content()
218 self.clear_storage(storage_name=self._storage_name) 222 self.clear_storage(storage_name=self._storage_name)
219 223
220 self.o.upload_folder(here / "../../Database/Knee") 224 self.o.upload_folder(here / "../../Database/Knee")
221 study_id = self.o.studies.get_all_ids()[0] 225 study_id = self.o.studies.get_all_ids()[0]
222 226
223 start_time = time.time() 227 start_time = time.time()
224 workers_count = 4 228 workers_count = 4
225 repeat_count = 10 229 repeat_count = 2
226 230
227 # massively anonymize the same study. This generates new studies and is a 231 # massively anonymize the same study. This generates new studies and is a
228 # good way to simulate ingestion of new studies 232 # good way to simulate ingestion of new studies
229 self.execute_workers( 233 self.execute_workers(
230 worker_func=worker_anonymize_study, 234 worker_func=worker_anonymize_study,
238 242
239 self.assertEqual(1 + workers_count * repeat_count, len(self.o.studies.get_all_ids())) 243 self.assertEqual(1 + workers_count * repeat_count, len(self.o.studies.get_all_ids()))
240 self.assertEqual(2 * (1 + workers_count * repeat_count), len(self.o.series.get_all_ids())) 244 self.assertEqual(2 * (1 + workers_count * repeat_count), len(self.o.series.get_all_ids()))
241 self.assertEqual(50 * (1 + workers_count * repeat_count), len(self.o.instances.get_all_ids())) 245 self.assertEqual(50 * (1 + workers_count * repeat_count), len(self.o.instances.get_all_ids()))
242 246
247 print("get stats")
243 stats = self.o.get_json("/statistics") 248 stats = self.o.get_json("/statistics")
249 print("get stats (2)")
244 self.assertEqual(1 + workers_count * repeat_count, stats.get("CountPatients")) 250 self.assertEqual(1 + workers_count * repeat_count, stats.get("CountPatients"))
245 self.assertEqual(1 + workers_count * repeat_count, stats.get("CountStudies")) 251 self.assertEqual(1 + workers_count * repeat_count, stats.get("CountStudies"))
246 self.assertEqual(2 * (1 + workers_count * repeat_count), stats.get("CountSeries")) 252 self.assertEqual(2 * (1 + workers_count * repeat_count), stats.get("CountSeries"))
247 self.assertEqual(50 * (1 + workers_count * repeat_count), stats.get("CountInstances")) 253 self.assertEqual(50 * (1 + workers_count * repeat_count), stats.get("CountInstances"))
254 print("get changes")
248 changes, last_change, done = self.o.get_changes(since=0, limit=100000) 255 changes, last_change, done = self.o.get_changes(since=0, limit=100000)
256 print("get changes (2)")
249 self.assertTrue(done) 257 self.assertTrue(done)
250 258
251 self.assertEqual(1 + workers_count * repeat_count, count_changes(changes, ChangeType.NEW_PATIENT)) 259 self.assertEqual(1 + workers_count * repeat_count, count_changes(changes, ChangeType.NEW_PATIENT))
252 self.assertEqual(1 + workers_count * repeat_count, count_changes(changes, ChangeType.NEW_STUDY)) 260 self.assertEqual(1 + workers_count * repeat_count, count_changes(changes, ChangeType.NEW_STUDY))
253 self.assertEqual(2 * (1 + workers_count * repeat_count), count_changes(changes, ChangeType.NEW_SERIES)) 261 self.assertEqual(2 * (1 + workers_count * repeat_count), count_changes(changes, ChangeType.NEW_SERIES))
254 self.assertEqual(50 * (1 + workers_count * repeat_count), count_changes(changes, ChangeType.NEW_INSTANCE)) 262 self.assertEqual(50 * (1 + workers_count * repeat_count), count_changes(changes, ChangeType.NEW_INSTANCE))
255 263
256 start_time = time.time() 264 start_time = time.time()
257 265
258 self.o.instances.delete(orthanc_ids=self.o.instances.get_all_ids()) 266 print("deleting")
267 all_instances_ids = self.o.instances.get_all_ids()
268 print("deleting (2)")
269 self.o.instances.delete(orthanc_ids=all_instances_ids)
270 print("deleted")
259 271
260 elapsed = time.time() - start_time 272 elapsed = time.time() - start_time
261 print(f"TIMING test_concurrent_anonymize_same_study deletion took: {elapsed:.3f} s") 273 print(f"TIMING test_concurrent_anonymize_same_study deletion took: {elapsed:.3f} s")
262 274
263 self.check_is_empty() 275 self.check_is_empty()