Mercurial > hg > orthanc-transfers
annotate UnitTests/UnitTestsMain.cpp @ 77:1e396fb509ca default
updated copyright, as Orthanc Team now replaces Osimis
author | Sebastien Jodogne <s.jodogne@gmail.com> |
---|---|
date | Thu, 30 May 2024 22:44:10 +0200 |
parents | 44a0430d7899 |
children |
rev | line source |
---|---|
0 | 1 /** |
2 * Transfers accelerator plugin for Orthanc | |
77
1e396fb509ca
updated copyright, as Orthanc Team now replaces Osimis
Sebastien Jodogne <s.jodogne@gmail.com>
parents:
33
diff
changeset
|
3 * Copyright (C) 2018-2023 Osimis S.A., Belgium |
1e396fb509ca
updated copyright, as Orthanc Team now replaces Osimis
Sebastien Jodogne <s.jodogne@gmail.com>
parents:
33
diff
changeset
|
4 * Copyright (C) 2024-2024 Orthanc Team SRL, Belgium |
1e396fb509ca
updated copyright, as Orthanc Team now replaces Osimis
Sebastien Jodogne <s.jodogne@gmail.com>
parents:
33
diff
changeset
|
5 * Copyright (C) 2021-2024 Sebastien Jodogne, ICTEAM UCLouvain, Belgium |
0 | 6 * |
7 * This program is free software: you can redistribute it and/or | |
8 * modify it under the terms of the GNU Affero General Public License | |
9 * as published by the Free Software Foundation, either version 3 of | |
10 * the License, or (at your option) any later version. | |
11 * | |
12 * This program is distributed in the hope that it will be useful, but | |
13 * WITHOUT ANY WARRANTY; without even the implied warranty of | |
14 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU | |
15 * Affero General Public License for more details. | |
16 * | |
17 * You should have received a copy of the GNU Affero General Public License | |
18 * along with this program. If not, see <http://www.gnu.org/licenses/>. | |
19 **/ | |
20 | |
21 | |
22 #include "../Framework/DownloadArea.h" | |
23 | |
20 | 24 #include <Compression/GzipCompressor.h> |
25 #include <Logging.h> | |
26 #include <OrthancException.h> | |
0 | 27 #include <gtest/gtest.h> |
28 | |
29 | |
30 TEST(Toolbox, Enumerations) | |
31 { | |
32 using namespace OrthancPlugins; | |
33 ASSERT_EQ(BucketCompression_None, StringToBucketCompression(EnumerationToString(BucketCompression_None))); | |
34 ASSERT_EQ(BucketCompression_Gzip, StringToBucketCompression(EnumerationToString(BucketCompression_Gzip))); | |
35 ASSERT_THROW(StringToBucketCompression("None"), Orthanc::OrthancException); | |
36 } | |
37 | |
38 | |
39 TEST(Toolbox, Conversions) | |
40 { | |
41 ASSERT_EQ(2u, OrthancPlugins::ConvertToKilobytes(2048)); | |
42 ASSERT_EQ(1u, OrthancPlugins::ConvertToKilobytes(1000)); | |
43 ASSERT_EQ(0u, OrthancPlugins::ConvertToKilobytes(500)); | |
44 | |
45 ASSERT_EQ(2u, OrthancPlugins::ConvertToMegabytes(2048 * 1024)); | |
46 ASSERT_EQ(1u, OrthancPlugins::ConvertToMegabytes(1000 * 1024)); | |
47 ASSERT_EQ(0u, OrthancPlugins::ConvertToMegabytes(500 * 1024)); | |
48 } | |
49 | |
50 | |
51 TEST(TransferBucket, Basic) | |
52 { | |
53 using namespace OrthancPlugins; | |
54 | |
55 DicomInstanceInfo d1("d1", 10, ""); | |
56 DicomInstanceInfo d2("d2", 20, ""); | |
57 DicomInstanceInfo d3("d3", 30, ""); | |
58 DicomInstanceInfo d4("d4", 40, ""); | |
59 | |
60 { | |
61 TransferBucket b; | |
62 ASSERT_EQ(0u, b.GetTotalSize()); | |
63 ASSERT_EQ(0u, b.GetChunksCount()); | |
64 | |
65 b.AddChunk(d1, 0, 10); | |
66 b.AddChunk(d2, 0, 20); | |
67 ASSERT_THROW(b.AddChunk(d3, 0, 31), Orthanc::OrthancException); | |
68 ASSERT_THROW(b.AddChunk(d3, 1, 30), Orthanc::OrthancException); | |
69 b.AddChunk(d3, 0, 30); | |
70 | |
71 ASSERT_EQ(60u, b.GetTotalSize()); | |
72 ASSERT_EQ(3u, b.GetChunksCount()); | |
73 | |
74 ASSERT_EQ("d1", b.GetChunkInstanceId(0)); | |
75 ASSERT_EQ(0u, b.GetChunkOffset(0)); | |
76 ASSERT_EQ(10u, b.GetChunkSize(0)); | |
77 ASSERT_EQ("d2", b.GetChunkInstanceId(1)); | |
78 ASSERT_EQ(0u, b.GetChunkOffset(1)); | |
79 ASSERT_EQ(20u, b.GetChunkSize(1)); | |
80 ASSERT_EQ("d3", b.GetChunkInstanceId(2)); | |
81 ASSERT_EQ(0u, b.GetChunkOffset(2)); | |
82 ASSERT_EQ(30u, b.GetChunkSize(2)); | |
83 | |
84 std::string uri; | |
85 b.ComputePullUri(uri, BucketCompression_None); | |
86 ASSERT_EQ("/transfers/chunks/d1.d2.d3?offset=0&size=60&compression=none", uri); | |
87 b.ComputePullUri(uri, BucketCompression_Gzip); | |
88 ASSERT_EQ("/transfers/chunks/d1.d2.d3?offset=0&size=60&compression=gzip", uri); | |
89 | |
90 b.Clear(); | |
91 ASSERT_EQ(0u, b.GetTotalSize()); | |
92 ASSERT_EQ(0u, b.GetChunksCount()); | |
93 | |
94 ASSERT_THROW(b.ComputePullUri(uri, BucketCompression_None), Orthanc::OrthancException); // Empty | |
95 } | |
96 | |
97 { | |
98 TransferBucket b; | |
99 b.AddChunk(d1, 5, 5); | |
100 ASSERT_THROW(b.AddChunk(d2, 1, 7), Orthanc::OrthancException); // Can only skip bytes in 1st chunk | |
101 b.AddChunk(d2, 0, 20); | |
102 b.AddChunk(d3, 0, 7); | |
103 ASSERT_THROW(b.AddChunk(d4, 0, 10), Orthanc::OrthancException); // d2 was not complete | |
104 | |
105 ASSERT_EQ(32u, b.GetTotalSize()); | |
106 ASSERT_EQ(3u, b.GetChunksCount()); | |
107 | |
108 ASSERT_EQ("d1", b.GetChunkInstanceId(0)); | |
109 ASSERT_EQ(5u, b.GetChunkOffset(0)); | |
110 ASSERT_EQ(5u, b.GetChunkSize(0)); | |
111 ASSERT_EQ("d2", b.GetChunkInstanceId(1)); | |
112 ASSERT_EQ(0u, b.GetChunkOffset(1)); | |
113 ASSERT_EQ(20u, b.GetChunkSize(1)); | |
114 ASSERT_EQ("d3", b.GetChunkInstanceId(2)); | |
115 ASSERT_EQ(0u, b.GetChunkOffset(2)); | |
116 ASSERT_EQ(7u, b.GetChunkSize(2)); | |
117 | |
118 std::string uri; | |
119 b.ComputePullUri(uri, BucketCompression_None); | |
120 ASSERT_EQ("/transfers/chunks/d1.d2.d3?offset=5&size=32&compression=none", uri); | |
121 b.ComputePullUri(uri, BucketCompression_Gzip); | |
122 ASSERT_EQ("/transfers/chunks/d1.d2.d3?offset=5&size=32&compression=gzip", uri); | |
123 | |
124 b.Clear(); | |
125 ASSERT_EQ(0u, b.GetTotalSize()); | |
126 ASSERT_EQ(0u, b.GetChunksCount()); | |
127 | |
128 b.AddChunk(d2, 1, 7); | |
129 ASSERT_EQ(7u, b.GetTotalSize()); | |
130 ASSERT_EQ(1u, b.GetChunksCount()); | |
131 } | |
132 } | |
133 | |
134 | |
135 TEST(TransferBucket, Serialization) | |
136 { | |
137 using namespace OrthancPlugins; | |
138 | |
139 Json::Value s; | |
140 | |
141 { | |
142 DicomInstanceInfo d1("d1", 10, ""); | |
143 DicomInstanceInfo d2("d2", 20, ""); | |
144 DicomInstanceInfo d3("d3", 30, ""); | |
145 | |
146 TransferBucket b; | |
147 b.AddChunk(d1, 5, 5); | |
148 b.AddChunk(d2, 0, 20); | |
149 b.AddChunk(d3, 0, 7); | |
150 b.Serialize(s); | |
151 } | |
152 | |
153 { | |
154 TransferBucket b(s); | |
155 | |
156 std::string uri; | |
157 b.ComputePullUri(uri, BucketCompression_None); | |
158 ASSERT_EQ("/transfers/chunks/d1.d2.d3?offset=5&size=32&compression=none", uri); | |
159 } | |
160 } | |
161 | |
162 | |
163 TEST(TransferScheduler, Empty) | |
164 { | |
165 using namespace OrthancPlugins; | |
166 | |
167 TransferScheduler s; | |
168 ASSERT_EQ(0u, s.GetInstancesCount()); | |
169 ASSERT_EQ(0u, s.GetTotalSize()); | |
170 | |
171 std::vector<DicomInstanceInfo> i; | |
172 s.ListInstances(i); | |
173 ASSERT_TRUE(i.empty()); | |
174 | |
175 std::vector<TransferBucket> b; | |
176 s.ComputePullBuckets(b, 10, 1000, "http://localhost/", BucketCompression_None); | |
177 ASSERT_TRUE(b.empty()); | |
178 | |
179 Json::Value v; | |
180 s.FormatPushTransaction(v, b, 10, 1000, BucketCompression_None); | |
181 ASSERT_TRUE(b.empty()); | |
182 ASSERT_EQ(Json::objectValue, v.type()); | |
183 ASSERT_TRUE(v.isMember("Buckets")); | |
184 ASSERT_TRUE(v.isMember("Compression")); | |
185 ASSERT_TRUE(v.isMember("Instances")); | |
186 ASSERT_EQ(Json::arrayValue, v["Buckets"].type()); | |
187 ASSERT_EQ(Json::stringValue, v["Compression"].type()); | |
188 ASSERT_EQ(Json::arrayValue, v["Instances"].type()); | |
189 ASSERT_EQ(0u, v["Buckets"].size()); | |
190 ASSERT_EQ("none", v["Compression"].asString()); | |
191 ASSERT_EQ(0u, v["Instances"].size()); | |
192 } | |
193 | |
194 | |
195 TEST(TransferScheduler, Basic) | |
196 { | |
197 using namespace OrthancPlugins; | |
198 | |
199 DicomInstanceInfo d1("d1", 10, "md1"); | |
200 DicomInstanceInfo d2("d2", 10, "md2"); | |
201 DicomInstanceInfo d3("d3", 10, "md3"); | |
202 | |
203 TransferScheduler s; | |
204 s.AddInstance(d1); | |
205 s.AddInstance(d2); | |
206 s.AddInstance(d3); | |
207 | |
208 std::vector<DicomInstanceInfo> i; | |
209 s.ListInstances(i); | |
210 ASSERT_EQ(3u, i.size()); | |
211 | |
212 std::vector<TransferBucket> b; | |
213 s.ComputePullBuckets(b, 10, 1000, "http://localhost/", BucketCompression_None); | |
214 ASSERT_EQ(3u, b.size()); | |
215 ASSERT_EQ(1u, b[0].GetChunksCount()); | |
216 ASSERT_EQ("d1", b[0].GetChunkInstanceId(0)); | |
217 ASSERT_EQ(0u, b[0].GetChunkOffset(0)); | |
218 ASSERT_EQ(10u, b[0].GetChunkSize(0)); | |
219 ASSERT_EQ(1u, b[1].GetChunksCount()); | |
220 ASSERT_EQ("d2", b[1].GetChunkInstanceId(0)); | |
221 ASSERT_EQ(0u, b[1].GetChunkOffset(0)); | |
222 ASSERT_EQ(10u, b[1].GetChunkSize(0)); | |
223 ASSERT_EQ(1u, b[2].GetChunksCount()); | |
224 ASSERT_EQ("d3", b[2].GetChunkInstanceId(0)); | |
225 ASSERT_EQ(0u, b[2].GetChunkOffset(0)); | |
226 ASSERT_EQ(10u, b[2].GetChunkSize(0)); | |
227 | |
228 Json::Value v; | |
229 s.FormatPushTransaction(v, b, 10, 1000, BucketCompression_Gzip); | |
230 ASSERT_EQ(3u, b.size()); | |
231 ASSERT_EQ(3u, v["Buckets"].size()); | |
232 ASSERT_EQ("gzip", v["Compression"].asString()); | |
233 ASSERT_EQ(3u, v["Instances"].size()); | |
234 | |
235 for (Json::Value::ArrayIndex i = 0; i < 3; i++) | |
236 { | |
237 TransferBucket b(v["Buckets"][i]); | |
238 ASSERT_EQ(1u, b.GetChunksCount()); | |
239 if (i == 0) | |
240 ASSERT_EQ("d1", b.GetChunkInstanceId(0)); | |
241 else if (i == 1) | |
242 ASSERT_EQ("d2", b.GetChunkInstanceId(0)); | |
243 else | |
244 ASSERT_EQ("d3", b.GetChunkInstanceId(0)); | |
245 | |
246 ASSERT_EQ(0u, b.GetChunkOffset(0)); | |
247 ASSERT_EQ(10u, b.GetChunkSize(0)); | |
248 } | |
249 | |
250 for (Json::Value::ArrayIndex i = 0; i < 3; i++) | |
251 { | |
252 DicomInstanceInfo d(v["Instances"][i]); | |
253 if (i == 0) | |
254 { | |
255 ASSERT_EQ("d1", d.GetId()); | |
256 ASSERT_EQ("md1", d.GetMD5()); | |
257 } | |
258 else if (i == 1) | |
259 { | |
260 ASSERT_EQ("d2", d.GetId()); | |
261 ASSERT_EQ("md2", d.GetMD5()); | |
262 } | |
263 else | |
264 { | |
265 ASSERT_EQ("d3", d.GetId()); | |
266 ASSERT_EQ("md3", d.GetMD5()); | |
267 } | |
268 | |
269 ASSERT_EQ(10u, d.GetSize()); | |
270 } | |
271 } | |
272 | |
273 | |
274 | |
275 TEST(TransferScheduler, Grouping) | |
276 { | |
277 using namespace OrthancPlugins; | |
278 | |
279 DicomInstanceInfo d1("d1", 10, "md1"); | |
280 DicomInstanceInfo d2("d2", 10, "md2"); | |
281 DicomInstanceInfo d3("d3", 10, "md3"); | |
282 | |
283 TransferScheduler s; | |
284 s.AddInstance(d1); | |
285 s.AddInstance(d2); | |
286 s.AddInstance(d3); | |
287 | |
288 { | |
289 std::vector<TransferBucket> b; | |
290 s.ComputePullBuckets(b, 20, 1000, "http://localhost/", BucketCompression_None); | |
291 ASSERT_EQ(2u, b.size()); | |
292 ASSERT_EQ(2u, b[0].GetChunksCount()); | |
293 ASSERT_EQ("d1", b[0].GetChunkInstanceId(0)); | |
294 ASSERT_EQ("d2", b[0].GetChunkInstanceId(1)); | |
295 ASSERT_EQ(1u, b[1].GetChunksCount()); | |
296 ASSERT_EQ("d3", b[1].GetChunkInstanceId(0)); | |
297 } | |
298 | |
299 { | |
300 std::vector<TransferBucket> b; | |
301 s.ComputePullBuckets(b, 21, 1000, "http://localhost/", BucketCompression_None); | |
302 ASSERT_EQ(1u, b.size()); | |
303 ASSERT_EQ(3u, b[0].GetChunksCount()); | |
304 ASSERT_EQ("d1", b[0].GetChunkInstanceId(0)); | |
305 ASSERT_EQ("d2", b[0].GetChunkInstanceId(1)); | |
306 ASSERT_EQ("d3", b[0].GetChunkInstanceId(2)); | |
307 } | |
308 | |
309 { | |
310 std::string longBase(2048, '_'); | |
311 std::vector<TransferBucket> b; | |
312 s.ComputePullBuckets(b, 21, 1000, longBase, BucketCompression_None); | |
313 ASSERT_EQ(3u, b.size()); | |
314 ASSERT_EQ(1u, b[0].GetChunksCount()); | |
315 ASSERT_EQ("d1", b[0].GetChunkInstanceId(0)); | |
316 ASSERT_EQ(1u, b[1].GetChunksCount()); | |
317 ASSERT_EQ("d2", b[1].GetChunkInstanceId(0)); | |
318 ASSERT_EQ(1u, b[2].GetChunksCount()); | |
319 ASSERT_EQ("d3", b[2].GetChunkInstanceId(0)); | |
320 } | |
321 } | |
322 | |
323 | |
324 TEST(TransferScheduler, Splitting) | |
325 { | |
326 using namespace OrthancPlugins; | |
327 | |
328 for (size_t i = 1; i < 20; i++) | |
329 { | |
330 DicomInstanceInfo dicom("dicom", i, ""); | |
331 | |
332 TransferScheduler s; | |
333 s.AddInstance(dicom); | |
334 | |
335 { | |
336 std::vector<TransferBucket> b; | |
337 s.ComputePullBuckets(b, 1, 1000, "http://localhost/", BucketCompression_None); | |
338 ASSERT_EQ(1u, b.size()); | |
339 ASSERT_EQ(1u, b[0].GetChunksCount()); | |
340 ASSERT_EQ("dicom", b[0].GetChunkInstanceId(0)); | |
341 ASSERT_EQ(0u, b[0].GetChunkOffset(0)); | |
342 ASSERT_EQ(i, b[0].GetChunkSize(0)); | |
343 } | |
344 | |
345 for (size_t split = 1; split < 20; split++) | |
346 { | |
347 size_t count; | |
348 if (dicom.GetSize() % split != 0) | |
349 count = dicom.GetSize() / split + 1; | |
350 else | |
351 count = dicom.GetSize() / split; | |
352 | |
353 std::vector<TransferBucket> b; | |
354 s.ComputePullBuckets(b, 1, split, "http://localhost/", BucketCompression_None); | |
355 ASSERT_EQ(count, b.size()); | |
356 | |
357 size_t size = dicom.GetSize() / count; | |
358 size_t offset = 0; | |
359 for (size_t j = 0; j < count; j++) | |
360 { | |
361 ASSERT_EQ(1u, b[j].GetChunksCount()); | |
362 ASSERT_EQ("dicom", b[j].GetChunkInstanceId(0)); | |
363 ASSERT_EQ(offset, b[j].GetChunkOffset(0)); | |
364 if (j + 1 != count) | |
365 ASSERT_EQ(size, b[j].GetChunkSize(0)); | |
366 else | |
367 ASSERT_EQ(dicom.GetSize() - (count - 1) * size, b[j].GetChunkSize(0)); | |
368 offset += b[j].GetChunkSize(0); | |
369 } | |
370 } | |
371 } | |
372 } | |
373 | |
374 | |
375 TEST(DownloadArea, Basic) | |
376 { | |
377 using namespace OrthancPlugins; | |
378 | |
379 std::string s1 = "Hello"; | |
380 std::string s2 = "Hello, World!"; | |
381 | |
382 std::string md1, md2; | |
383 Orthanc::Toolbox::ComputeMD5(md1, s1); | |
384 Orthanc::Toolbox::ComputeMD5(md2, s2); | |
385 | |
386 std::vector<DicomInstanceInfo> instances; | |
387 instances.push_back(DicomInstanceInfo("d1", s1.size(), md1)); | |
388 instances.push_back(DicomInstanceInfo("d2", s2.size(), md2)); | |
389 | |
390 { | |
391 DownloadArea area(instances); | |
392 ASSERT_EQ(s1.size() + s2.size(), area.GetTotalSize()); | |
393 ASSERT_THROW(area.CheckMD5(), Orthanc::OrthancException); | |
394 | |
395 area.WriteInstance("d1", s1.c_str(), s1.size()); | |
396 area.WriteInstance("d2", s2.c_str(), s2.size()); | |
397 | |
398 area.CheckMD5(); | |
399 } | |
400 | |
401 { | |
402 DownloadArea area(instances); | |
403 ASSERT_THROW(area.CheckMD5(), Orthanc::OrthancException); | |
404 | |
405 { | |
406 TransferBucket b; | |
407 b.AddChunk(instances[0] /*d1*/, 0, 2); | |
408 area.WriteBucket(b, s1.c_str(), 2, BucketCompression_None); | |
409 } | |
410 | |
411 { | |
412 TransferBucket b; | |
413 b.AddChunk(instances[0] /*d1*/, 2, 3); | |
414 b.AddChunk(instances[1] /*d2*/, 0, 4); | |
415 std::string s = s1.substr(2, 3) + s2.substr(0, 4); | |
416 area.WriteBucket(b, s.c_str(), s.size(), BucketCompression_None); | |
417 } | |
418 | |
419 { | |
420 TransferBucket b; | |
421 b.AddChunk(instances[1] /*d2*/, 4, 9); | |
422 std::string s = s2.substr(4); | |
423 std::string t; | |
424 Orthanc::GzipCompressor compressor; | |
425 compressor.Compress(t, s.c_str(), s.size()); | |
426 area.WriteBucket(b, t.c_str(), t.size(), BucketCompression_Gzip); | |
427 } | |
428 | |
429 area.CheckMD5(); | |
430 } | |
431 } | |
432 | |
433 | |
434 | |
435 int main(int argc, char **argv) | |
436 { | |
437 ::testing::InitGoogleTest(&argc, argv); | |
438 Orthanc::Logging::Initialize(); | |
439 Orthanc::Logging::EnableInfoLevel(true); | |
440 Orthanc::Logging::EnableTraceLevel(true); | |
441 | |
442 int result = RUN_ALL_TESTS(); | |
443 | |
444 Orthanc::Logging::Finalize(); | |
445 | |
446 return result; | |
447 } |