0
|
1 /**
|
|
2 * Transfers accelerator plugin for Orthanc
|
|
3 * Copyright (C) 2018 Osimis, Belgium
|
|
4 *
|
|
5 * This program is free software: you can redistribute it and/or
|
|
6 * modify it under the terms of the GNU Affero General Public License
|
|
7 * as published by the Free Software Foundation, either version 3 of
|
|
8 * the License, or (at your option) any later version.
|
|
9 *
|
|
10 * This program is distributed in the hope that it will be useful, but
|
|
11 * WITHOUT ANY WARRANTY; without even the implied warranty of
|
|
12 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
|
|
13 * Affero General Public License for more details.
|
|
14 *
|
|
15 * You should have received a copy of the GNU Affero General Public License
|
|
16 * along with this program. If not, see <http://www.gnu.org/licenses/>.
|
|
17 **/
|
|
18
|
|
19
|
|
20 #include "../Framework/DownloadArea.h"
|
|
21
|
|
22 #include <Core/Compression/GzipCompressor.h>
|
|
23 #include <Core/Logging.h>
|
|
24 #include <Core/OrthancException.h>
|
|
25 #include <gtest/gtest.h>
|
|
26
|
|
27
|
|
28 TEST(Toolbox, Enumerations)
|
|
29 {
|
|
30 using namespace OrthancPlugins;
|
|
31 ASSERT_EQ(BucketCompression_None, StringToBucketCompression(EnumerationToString(BucketCompression_None)));
|
|
32 ASSERT_EQ(BucketCompression_Gzip, StringToBucketCompression(EnumerationToString(BucketCompression_Gzip)));
|
|
33 ASSERT_THROW(StringToBucketCompression("None"), Orthanc::OrthancException);
|
|
34 }
|
|
35
|
|
36
|
|
37 TEST(Toolbox, Conversions)
|
|
38 {
|
|
39 ASSERT_EQ(2u, OrthancPlugins::ConvertToKilobytes(2048));
|
|
40 ASSERT_EQ(1u, OrthancPlugins::ConvertToKilobytes(1000));
|
|
41 ASSERT_EQ(0u, OrthancPlugins::ConvertToKilobytes(500));
|
|
42
|
|
43 ASSERT_EQ(2u, OrthancPlugins::ConvertToMegabytes(2048 * 1024));
|
|
44 ASSERT_EQ(1u, OrthancPlugins::ConvertToMegabytes(1000 * 1024));
|
|
45 ASSERT_EQ(0u, OrthancPlugins::ConvertToMegabytes(500 * 1024));
|
|
46 }
|
|
47
|
|
48
|
|
49 TEST(TransferBucket, Basic)
|
|
50 {
|
|
51 using namespace OrthancPlugins;
|
|
52
|
|
53 DicomInstanceInfo d1("d1", 10, "");
|
|
54 DicomInstanceInfo d2("d2", 20, "");
|
|
55 DicomInstanceInfo d3("d3", 30, "");
|
|
56 DicomInstanceInfo d4("d4", 40, "");
|
|
57
|
|
58 {
|
|
59 TransferBucket b;
|
|
60 ASSERT_EQ(0u, b.GetTotalSize());
|
|
61 ASSERT_EQ(0u, b.GetChunksCount());
|
|
62
|
|
63 b.AddChunk(d1, 0, 10);
|
|
64 b.AddChunk(d2, 0, 20);
|
|
65 ASSERT_THROW(b.AddChunk(d3, 0, 31), Orthanc::OrthancException);
|
|
66 ASSERT_THROW(b.AddChunk(d3, 1, 30), Orthanc::OrthancException);
|
|
67 b.AddChunk(d3, 0, 30);
|
|
68
|
|
69 ASSERT_EQ(60u, b.GetTotalSize());
|
|
70 ASSERT_EQ(3u, b.GetChunksCount());
|
|
71
|
|
72 ASSERT_EQ("d1", b.GetChunkInstanceId(0));
|
|
73 ASSERT_EQ(0u, b.GetChunkOffset(0));
|
|
74 ASSERT_EQ(10u, b.GetChunkSize(0));
|
|
75 ASSERT_EQ("d2", b.GetChunkInstanceId(1));
|
|
76 ASSERT_EQ(0u, b.GetChunkOffset(1));
|
|
77 ASSERT_EQ(20u, b.GetChunkSize(1));
|
|
78 ASSERT_EQ("d3", b.GetChunkInstanceId(2));
|
|
79 ASSERT_EQ(0u, b.GetChunkOffset(2));
|
|
80 ASSERT_EQ(30u, b.GetChunkSize(2));
|
|
81
|
|
82 std::string uri;
|
|
83 b.ComputePullUri(uri, BucketCompression_None);
|
|
84 ASSERT_EQ("/transfers/chunks/d1.d2.d3?offset=0&size=60&compression=none", uri);
|
|
85 b.ComputePullUri(uri, BucketCompression_Gzip);
|
|
86 ASSERT_EQ("/transfers/chunks/d1.d2.d3?offset=0&size=60&compression=gzip", uri);
|
|
87
|
|
88 b.Clear();
|
|
89 ASSERT_EQ(0u, b.GetTotalSize());
|
|
90 ASSERT_EQ(0u, b.GetChunksCount());
|
|
91
|
|
92 ASSERT_THROW(b.ComputePullUri(uri, BucketCompression_None), Orthanc::OrthancException); // Empty
|
|
93 }
|
|
94
|
|
95 {
|
|
96 TransferBucket b;
|
|
97 b.AddChunk(d1, 5, 5);
|
|
98 ASSERT_THROW(b.AddChunk(d2, 1, 7), Orthanc::OrthancException); // Can only skip bytes in 1st chunk
|
|
99 b.AddChunk(d2, 0, 20);
|
|
100 b.AddChunk(d3, 0, 7);
|
|
101 ASSERT_THROW(b.AddChunk(d4, 0, 10), Orthanc::OrthancException); // d2 was not complete
|
|
102
|
|
103 ASSERT_EQ(32u, b.GetTotalSize());
|
|
104 ASSERT_EQ(3u, b.GetChunksCount());
|
|
105
|
|
106 ASSERT_EQ("d1", b.GetChunkInstanceId(0));
|
|
107 ASSERT_EQ(5u, b.GetChunkOffset(0));
|
|
108 ASSERT_EQ(5u, b.GetChunkSize(0));
|
|
109 ASSERT_EQ("d2", b.GetChunkInstanceId(1));
|
|
110 ASSERT_EQ(0u, b.GetChunkOffset(1));
|
|
111 ASSERT_EQ(20u, b.GetChunkSize(1));
|
|
112 ASSERT_EQ("d3", b.GetChunkInstanceId(2));
|
|
113 ASSERT_EQ(0u, b.GetChunkOffset(2));
|
|
114 ASSERT_EQ(7u, b.GetChunkSize(2));
|
|
115
|
|
116 std::string uri;
|
|
117 b.ComputePullUri(uri, BucketCompression_None);
|
|
118 ASSERT_EQ("/transfers/chunks/d1.d2.d3?offset=5&size=32&compression=none", uri);
|
|
119 b.ComputePullUri(uri, BucketCompression_Gzip);
|
|
120 ASSERT_EQ("/transfers/chunks/d1.d2.d3?offset=5&size=32&compression=gzip", uri);
|
|
121
|
|
122 b.Clear();
|
|
123 ASSERT_EQ(0u, b.GetTotalSize());
|
|
124 ASSERT_EQ(0u, b.GetChunksCount());
|
|
125
|
|
126 b.AddChunk(d2, 1, 7);
|
|
127 ASSERT_EQ(7u, b.GetTotalSize());
|
|
128 ASSERT_EQ(1u, b.GetChunksCount());
|
|
129 }
|
|
130 }
|
|
131
|
|
132
|
|
133 TEST(TransferBucket, Serialization)
|
|
134 {
|
|
135 using namespace OrthancPlugins;
|
|
136
|
|
137 Json::Value s;
|
|
138
|
|
139 {
|
|
140 DicomInstanceInfo d1("d1", 10, "");
|
|
141 DicomInstanceInfo d2("d2", 20, "");
|
|
142 DicomInstanceInfo d3("d3", 30, "");
|
|
143
|
|
144 TransferBucket b;
|
|
145 b.AddChunk(d1, 5, 5);
|
|
146 b.AddChunk(d2, 0, 20);
|
|
147 b.AddChunk(d3, 0, 7);
|
|
148 b.Serialize(s);
|
|
149 }
|
|
150
|
|
151 {
|
|
152 TransferBucket b(s);
|
|
153
|
|
154 std::string uri;
|
|
155 b.ComputePullUri(uri, BucketCompression_None);
|
|
156 ASSERT_EQ("/transfers/chunks/d1.d2.d3?offset=5&size=32&compression=none", uri);
|
|
157 }
|
|
158 }
|
|
159
|
|
160
|
|
161 TEST(TransferScheduler, Empty)
|
|
162 {
|
|
163 using namespace OrthancPlugins;
|
|
164
|
|
165 TransferScheduler s;
|
|
166 ASSERT_EQ(0u, s.GetInstancesCount());
|
|
167 ASSERT_EQ(0u, s.GetTotalSize());
|
|
168
|
|
169 std::vector<DicomInstanceInfo> i;
|
|
170 s.ListInstances(i);
|
|
171 ASSERT_TRUE(i.empty());
|
|
172
|
|
173 std::vector<TransferBucket> b;
|
|
174 s.ComputePullBuckets(b, 10, 1000, "http://localhost/", BucketCompression_None);
|
|
175 ASSERT_TRUE(b.empty());
|
|
176
|
|
177 Json::Value v;
|
|
178 s.FormatPushTransaction(v, b, 10, 1000, BucketCompression_None);
|
|
179 ASSERT_TRUE(b.empty());
|
|
180 ASSERT_EQ(Json::objectValue, v.type());
|
|
181 ASSERT_TRUE(v.isMember("Buckets"));
|
|
182 ASSERT_TRUE(v.isMember("Compression"));
|
|
183 ASSERT_TRUE(v.isMember("Instances"));
|
|
184 ASSERT_EQ(Json::arrayValue, v["Buckets"].type());
|
|
185 ASSERT_EQ(Json::stringValue, v["Compression"].type());
|
|
186 ASSERT_EQ(Json::arrayValue, v["Instances"].type());
|
|
187 ASSERT_EQ(0u, v["Buckets"].size());
|
|
188 ASSERT_EQ("none", v["Compression"].asString());
|
|
189 ASSERT_EQ(0u, v["Instances"].size());
|
|
190 }
|
|
191
|
|
192
|
|
193 TEST(TransferScheduler, Basic)
|
|
194 {
|
|
195 using namespace OrthancPlugins;
|
|
196
|
|
197 DicomInstanceInfo d1("d1", 10, "md1");
|
|
198 DicomInstanceInfo d2("d2", 10, "md2");
|
|
199 DicomInstanceInfo d3("d3", 10, "md3");
|
|
200
|
|
201 TransferScheduler s;
|
|
202 s.AddInstance(d1);
|
|
203 s.AddInstance(d2);
|
|
204 s.AddInstance(d3);
|
|
205
|
|
206 std::vector<DicomInstanceInfo> i;
|
|
207 s.ListInstances(i);
|
|
208 ASSERT_EQ(3u, i.size());
|
|
209
|
|
210 std::vector<TransferBucket> b;
|
|
211 s.ComputePullBuckets(b, 10, 1000, "http://localhost/", BucketCompression_None);
|
|
212 ASSERT_EQ(3u, b.size());
|
|
213 ASSERT_EQ(1u, b[0].GetChunksCount());
|
|
214 ASSERT_EQ("d1", b[0].GetChunkInstanceId(0));
|
|
215 ASSERT_EQ(0u, b[0].GetChunkOffset(0));
|
|
216 ASSERT_EQ(10u, b[0].GetChunkSize(0));
|
|
217 ASSERT_EQ(1u, b[1].GetChunksCount());
|
|
218 ASSERT_EQ("d2", b[1].GetChunkInstanceId(0));
|
|
219 ASSERT_EQ(0u, b[1].GetChunkOffset(0));
|
|
220 ASSERT_EQ(10u, b[1].GetChunkSize(0));
|
|
221 ASSERT_EQ(1u, b[2].GetChunksCount());
|
|
222 ASSERT_EQ("d3", b[2].GetChunkInstanceId(0));
|
|
223 ASSERT_EQ(0u, b[2].GetChunkOffset(0));
|
|
224 ASSERT_EQ(10u, b[2].GetChunkSize(0));
|
|
225
|
|
226 Json::Value v;
|
|
227 s.FormatPushTransaction(v, b, 10, 1000, BucketCompression_Gzip);
|
|
228 ASSERT_EQ(3u, b.size());
|
|
229 ASSERT_EQ(3u, v["Buckets"].size());
|
|
230 ASSERT_EQ("gzip", v["Compression"].asString());
|
|
231 ASSERT_EQ(3u, v["Instances"].size());
|
|
232
|
|
233 for (Json::Value::ArrayIndex i = 0; i < 3; i++)
|
|
234 {
|
|
235 TransferBucket b(v["Buckets"][i]);
|
|
236 ASSERT_EQ(1u, b.GetChunksCount());
|
|
237 if (i == 0)
|
|
238 ASSERT_EQ("d1", b.GetChunkInstanceId(0));
|
|
239 else if (i == 1)
|
|
240 ASSERT_EQ("d2", b.GetChunkInstanceId(0));
|
|
241 else
|
|
242 ASSERT_EQ("d3", b.GetChunkInstanceId(0));
|
|
243
|
|
244 ASSERT_EQ(0u, b.GetChunkOffset(0));
|
|
245 ASSERT_EQ(10u, b.GetChunkSize(0));
|
|
246 }
|
|
247
|
|
248 for (Json::Value::ArrayIndex i = 0; i < 3; i++)
|
|
249 {
|
|
250 DicomInstanceInfo d(v["Instances"][i]);
|
|
251 if (i == 0)
|
|
252 {
|
|
253 ASSERT_EQ("d1", d.GetId());
|
|
254 ASSERT_EQ("md1", d.GetMD5());
|
|
255 }
|
|
256 else if (i == 1)
|
|
257 {
|
|
258 ASSERT_EQ("d2", d.GetId());
|
|
259 ASSERT_EQ("md2", d.GetMD5());
|
|
260 }
|
|
261 else
|
|
262 {
|
|
263 ASSERT_EQ("d3", d.GetId());
|
|
264 ASSERT_EQ("md3", d.GetMD5());
|
|
265 }
|
|
266
|
|
267 ASSERT_EQ(10u, d.GetSize());
|
|
268 }
|
|
269 }
|
|
270
|
|
271
|
|
272
|
|
273 TEST(TransferScheduler, Grouping)
|
|
274 {
|
|
275 using namespace OrthancPlugins;
|
|
276
|
|
277 DicomInstanceInfo d1("d1", 10, "md1");
|
|
278 DicomInstanceInfo d2("d2", 10, "md2");
|
|
279 DicomInstanceInfo d3("d3", 10, "md3");
|
|
280
|
|
281 TransferScheduler s;
|
|
282 s.AddInstance(d1);
|
|
283 s.AddInstance(d2);
|
|
284 s.AddInstance(d3);
|
|
285
|
|
286 {
|
|
287 std::vector<TransferBucket> b;
|
|
288 s.ComputePullBuckets(b, 20, 1000, "http://localhost/", BucketCompression_None);
|
|
289 ASSERT_EQ(2u, b.size());
|
|
290 ASSERT_EQ(2u, b[0].GetChunksCount());
|
|
291 ASSERT_EQ("d1", b[0].GetChunkInstanceId(0));
|
|
292 ASSERT_EQ("d2", b[0].GetChunkInstanceId(1));
|
|
293 ASSERT_EQ(1u, b[1].GetChunksCount());
|
|
294 ASSERT_EQ("d3", b[1].GetChunkInstanceId(0));
|
|
295 }
|
|
296
|
|
297 {
|
|
298 std::vector<TransferBucket> b;
|
|
299 s.ComputePullBuckets(b, 21, 1000, "http://localhost/", BucketCompression_None);
|
|
300 ASSERT_EQ(1u, b.size());
|
|
301 ASSERT_EQ(3u, b[0].GetChunksCount());
|
|
302 ASSERT_EQ("d1", b[0].GetChunkInstanceId(0));
|
|
303 ASSERT_EQ("d2", b[0].GetChunkInstanceId(1));
|
|
304 ASSERT_EQ("d3", b[0].GetChunkInstanceId(2));
|
|
305 }
|
|
306
|
|
307 {
|
|
308 std::string longBase(2048, '_');
|
|
309 std::vector<TransferBucket> b;
|
|
310 s.ComputePullBuckets(b, 21, 1000, longBase, BucketCompression_None);
|
|
311 ASSERT_EQ(3u, b.size());
|
|
312 ASSERT_EQ(1u, b[0].GetChunksCount());
|
|
313 ASSERT_EQ("d1", b[0].GetChunkInstanceId(0));
|
|
314 ASSERT_EQ(1u, b[1].GetChunksCount());
|
|
315 ASSERT_EQ("d2", b[1].GetChunkInstanceId(0));
|
|
316 ASSERT_EQ(1u, b[2].GetChunksCount());
|
|
317 ASSERT_EQ("d3", b[2].GetChunkInstanceId(0));
|
|
318 }
|
|
319 }
|
|
320
|
|
321
|
|
322 TEST(TransferScheduler, Splitting)
|
|
323 {
|
|
324 using namespace OrthancPlugins;
|
|
325
|
|
326 for (size_t i = 1; i < 20; i++)
|
|
327 {
|
|
328 DicomInstanceInfo dicom("dicom", i, "");
|
|
329
|
|
330 TransferScheduler s;
|
|
331 s.AddInstance(dicom);
|
|
332
|
|
333 {
|
|
334 std::vector<TransferBucket> b;
|
|
335 s.ComputePullBuckets(b, 1, 1000, "http://localhost/", BucketCompression_None);
|
|
336 ASSERT_EQ(1u, b.size());
|
|
337 ASSERT_EQ(1u, b[0].GetChunksCount());
|
|
338 ASSERT_EQ("dicom", b[0].GetChunkInstanceId(0));
|
|
339 ASSERT_EQ(0u, b[0].GetChunkOffset(0));
|
|
340 ASSERT_EQ(i, b[0].GetChunkSize(0));
|
|
341 }
|
|
342
|
|
343 for (size_t split = 1; split < 20; split++)
|
|
344 {
|
|
345 size_t count;
|
|
346 if (dicom.GetSize() % split != 0)
|
|
347 count = dicom.GetSize() / split + 1;
|
|
348 else
|
|
349 count = dicom.GetSize() / split;
|
|
350
|
|
351 std::vector<TransferBucket> b;
|
|
352 s.ComputePullBuckets(b, 1, split, "http://localhost/", BucketCompression_None);
|
|
353 ASSERT_EQ(count, b.size());
|
|
354
|
|
355 size_t size = dicom.GetSize() / count;
|
|
356 size_t offset = 0;
|
|
357 for (size_t j = 0; j < count; j++)
|
|
358 {
|
|
359 ASSERT_EQ(1u, b[j].GetChunksCount());
|
|
360 ASSERT_EQ("dicom", b[j].GetChunkInstanceId(0));
|
|
361 ASSERT_EQ(offset, b[j].GetChunkOffset(0));
|
|
362 if (j + 1 != count)
|
|
363 ASSERT_EQ(size, b[j].GetChunkSize(0));
|
|
364 else
|
|
365 ASSERT_EQ(dicom.GetSize() - (count - 1) * size, b[j].GetChunkSize(0));
|
|
366 offset += b[j].GetChunkSize(0);
|
|
367 }
|
|
368 }
|
|
369 }
|
|
370 }
|
|
371
|
|
372
|
|
373 TEST(DownloadArea, Basic)
|
|
374 {
|
|
375 using namespace OrthancPlugins;
|
|
376
|
|
377 std::string s1 = "Hello";
|
|
378 std::string s2 = "Hello, World!";
|
|
379
|
|
380 std::string md1, md2;
|
|
381 Orthanc::Toolbox::ComputeMD5(md1, s1);
|
|
382 Orthanc::Toolbox::ComputeMD5(md2, s2);
|
|
383
|
|
384 std::vector<DicomInstanceInfo> instances;
|
|
385 instances.push_back(DicomInstanceInfo("d1", s1.size(), md1));
|
|
386 instances.push_back(DicomInstanceInfo("d2", s2.size(), md2));
|
|
387
|
|
388 {
|
|
389 DownloadArea area(instances);
|
|
390 ASSERT_EQ(s1.size() + s2.size(), area.GetTotalSize());
|
|
391 ASSERT_THROW(area.CheckMD5(), Orthanc::OrthancException);
|
|
392
|
|
393 area.WriteInstance("d1", s1.c_str(), s1.size());
|
|
394 area.WriteInstance("d2", s2.c_str(), s2.size());
|
|
395
|
|
396 area.CheckMD5();
|
|
397 }
|
|
398
|
|
399 {
|
|
400 DownloadArea area(instances);
|
|
401 ASSERT_THROW(area.CheckMD5(), Orthanc::OrthancException);
|
|
402
|
|
403 {
|
|
404 TransferBucket b;
|
|
405 b.AddChunk(instances[0] /*d1*/, 0, 2);
|
|
406 area.WriteBucket(b, s1.c_str(), 2, BucketCompression_None);
|
|
407 }
|
|
408
|
|
409 {
|
|
410 TransferBucket b;
|
|
411 b.AddChunk(instances[0] /*d1*/, 2, 3);
|
|
412 b.AddChunk(instances[1] /*d2*/, 0, 4);
|
|
413 std::string s = s1.substr(2, 3) + s2.substr(0, 4);
|
|
414 area.WriteBucket(b, s.c_str(), s.size(), BucketCompression_None);
|
|
415 }
|
|
416
|
|
417 {
|
|
418 TransferBucket b;
|
|
419 b.AddChunk(instances[1] /*d2*/, 4, 9);
|
|
420 std::string s = s2.substr(4);
|
|
421 std::string t;
|
|
422 Orthanc::GzipCompressor compressor;
|
|
423 compressor.Compress(t, s.c_str(), s.size());
|
|
424 area.WriteBucket(b, t.c_str(), t.size(), BucketCompression_Gzip);
|
|
425 }
|
|
426
|
|
427 area.CheckMD5();
|
|
428 }
|
|
429 }
|
|
430
|
|
431
|
|
432
|
|
433 int main(int argc, char **argv)
|
|
434 {
|
|
435 ::testing::InitGoogleTest(&argc, argv);
|
|
436 Orthanc::Logging::Initialize();
|
|
437 Orthanc::Logging::EnableInfoLevel(true);
|
|
438 Orthanc::Logging::EnableTraceLevel(true);
|
|
439
|
|
440 int result = RUN_ALL_TESTS();
|
|
441
|
|
442 Orthanc::Logging::Finalize();
|
|
443
|
|
444 return result;
|
|
445 }
|