comparison NewTests/Concurrency/test_transfer.py @ 601:3e15e950c462

new transfer tests in the concurrency section
author Alain Mazy <am@osimis.io>
date Fri, 19 Jan 2024 15:03:47 +0100
parents
children d88b0fc15f08
comparison
equal deleted inserted replaced
600:58384ae69f41 601:3e15e950c462
1 import subprocess
2 import time
3 import unittest
4 from orthanc_api_client import OrthancApiClient, ResourceType
5 from orthanc_tools import OrthancTestDbPopulator
6 from helpers import Helpers, wait_container_healthy
7
8 import pathlib
9 import os
10 here = pathlib.Path(__file__).parent.resolve()
11
12
13
14
15 class TestConcurrencyTransfers(unittest.TestCase):
16
17 @classmethod
18 def cleanup(cls):
19 os.chdir(here)
20 print("Cleaning old compose")
21 subprocess.run(["docker", "compose", "-f", "docker-compose-transfers-concurrency.yml", "down", "-v", "--remove-orphans"], check=True)
22
23 @classmethod
24 def compose_up(cls):
25 print("Pullling containers")
26 subprocess.run(["docker", "compose", "-f", "docker-compose-transfers-concurrency.yml", "pull"], check=True)
27
28 print("Compose up")
29 subprocess.run(["docker", "compose", "-f", "docker-compose-transfers-concurrency.yml", "up", "-d"], check=True)
30
31 @classmethod
32 def setUpClass(cls):
33 cls.cleanup()
34 cls.compose_up()
35
36 @classmethod
37 def tearDownClass(cls):
38 #cls.cleanup()
39 pass
40
41 def clean_start(self):
42 oa = OrthancApiClient("http://localhost:8052")
43 ob = OrthancApiClient("http://localhost:8053")
44
45 oa.wait_started()
46 ob.wait_started()
47
48 oa.delete_all_content()
49 ob.delete_all_content()
50
51 return oa, ob
52
53 def test_push(self):
54 oa, ob = self.clean_start()
55
56 populator = OrthancTestDbPopulator(oa, studies_count=5, random_seed=65)
57 populator.execute()
58
59 all_studies_ids = oa.studies.get_all_ids()
60 instances_count = oa.get_statistics().instances_count
61 disk_size = oa.get_statistics().total_disk_size
62 repeat_count = 2
63
64 for compression in [True, False]:
65 start_time = time.time()
66
67 for i in range(0, repeat_count):
68 oa.transfers.send(target_peer='b',
69 resources_ids=all_studies_ids,
70 resource_type=ResourceType.STUDY,
71 compress=compression)
72
73 self.assertEqual(instances_count, ob.get_statistics().instances_count)
74 self.assertEqual(disk_size, ob.get_statistics().total_disk_size)
75 ob.delete_all_content()
76
77 elapsed = time.time() - start_time
78 print(f"TIMING test_push (compression={compression}) with {instances_count} instances for a total of {disk_size/(1024*1024)} MB (repeat {repeat_count}x): {elapsed:.3f} s")
79
80
81 def test_pull(self):
82 oa, ob = self.clean_start()
83
84 populator = OrthancTestDbPopulator(ob, studies_count=5, random_seed=65)
85 populator.execute()
86
87 all_studies_ids = ob.studies.get_all_ids()
88 instances_count = ob.get_statistics().instances_count
89 disk_size = ob.get_statistics().total_disk_size
90 repeat_count = 2
91
92 for compression in [True, False]:
93 start_time = time.time()
94
95 for i in range(0, repeat_count):
96 remote_job = ob.transfers.send_async(target_peer='a',
97 resources_ids=all_studies_ids,
98 resource_type=ResourceType.STUDY,
99 compress=True)
100 job = oa.jobs.get(orthanc_id=remote_job.remote_job_id)
101 job.wait_completed(polling_interval=0.1)
102
103 self.assertEqual(instances_count, oa.get_statistics().instances_count)
104 self.assertEqual(disk_size, oa.get_statistics().total_disk_size)
105 oa.delete_all_content()
106
107
108 elapsed = time.time() - start_time
109 print(f"TIMING test_pull (compression={compression}) with {instances_count} instances for a total of {disk_size/(1024*1024)} MB (repeat {repeat_count}x): {elapsed:.3f} s")