comparison UnitTestsSources/ServerIndexTests.cpp @ 3174:8ea7c4546c3a

primitives to collect metrics in Orthanc
author Sebastien Jodogne <s.jodogne@gmail.com>
date Tue, 29 Jan 2019 15:15:48 +0100
parents f86ebf971a72
children fca730c267d7
comparison
equal deleted inserted replaced
3173:096f4a29f223 3174:8ea7c4546c3a
700 context.SetupJobsEngine(true, false); 700 context.SetupJobsEngine(true, false);
701 ServerIndex& index = context.GetIndex(); 701 ServerIndex& index = context.GetIndex();
702 702
703 index.SetMaximumStorageSize(10); 703 index.SetMaximumStorageSize(10);
704 704
705 Json::Value tmp; 705 uint64_t diskSize, uncompressedSize, countPatients, countStudies, countSeries, countInstances;
706 index.ComputeStatistics(tmp); 706 index.GetGlobalStatistics(diskSize, uncompressedSize, countPatients,
707 ASSERT_EQ(0, tmp["CountPatients"].asInt()); 707 countStudies, countSeries, countInstances);
708 ASSERT_EQ(0, boost::lexical_cast<int>(tmp["TotalDiskSize"].asString())); 708
709 ASSERT_EQ(0u, countPatients);
710 ASSERT_EQ(0u, diskSize);
709 711
710 ServerIndex::Attachments attachments; 712 ServerIndex::Attachments attachments;
711 713
712 std::vector<std::string> ids; 714 std::vector<std::string> ids;
713 for (int i = 0; i < 10; i++) 715 for (int i = 0; i < 10; i++)
745 ASSERT_EQ(hasher.HashStudy(), toStore.GetHasher().HashStudy()); 747 ASSERT_EQ(hasher.HashStudy(), toStore.GetHasher().HashStudy());
746 ASSERT_EQ(hasher.HashSeries(), toStore.GetHasher().HashSeries()); 748 ASSERT_EQ(hasher.HashSeries(), toStore.GetHasher().HashSeries());
747 ASSERT_EQ(hasher.HashInstance(), toStore.GetHasher().HashInstance()); 749 ASSERT_EQ(hasher.HashInstance(), toStore.GetHasher().HashInstance());
748 } 750 }
749 751
750 index.ComputeStatistics(tmp); 752 index.GetGlobalStatistics(diskSize, uncompressedSize, countPatients,
751 ASSERT_EQ(10, tmp["CountPatients"].asInt()); 753 countStudies, countSeries, countInstances);
752 ASSERT_EQ(0, boost::lexical_cast<int>(tmp["TotalDiskSize"].asString())); 754 ASSERT_EQ(10u, countPatients);
755 ASSERT_EQ(0u, diskSize);
753 756
754 for (size_t i = 0; i < ids.size(); i++) 757 for (size_t i = 0; i < ids.size(); i++)
755 { 758 {
756 FileInfo info(Toolbox::GenerateUuid(), FileContentType_Dicom, 1, "md5"); 759 FileInfo info(Toolbox::GenerateUuid(), FileContentType_Dicom, 1, "md5");
757 index.AddAttachment(info, ids[i]); 760 index.AddAttachment(info, ids[i]);
758 761
759 index.ComputeStatistics(tmp); 762 index.GetGlobalStatistics(diskSize, uncompressedSize, countPatients,
760 ASSERT_GE(10, boost::lexical_cast<int>(tmp["TotalDiskSize"].asString())); 763 countStudies, countSeries, countInstances);
764 ASSERT_GE(10u, diskSize);
761 } 765 }
762 766
763 // Because the DB is in memory, the SQLite index must not have been created 767 // Because the DB is in memory, the SQLite index must not have been created
764 ASSERT_FALSE(SystemToolbox::IsRegularFile(path + "/index")); 768 ASSERT_FALSE(SystemToolbox::IsRegularFile(path + "/index"));
765 769
798 802
799 DicomInstanceHasher hasher(instance); 803 DicomInstanceHasher hasher(instance);
800 std::string id = hasher.HashInstance(); 804 std::string id = hasher.HashInstance();
801 context.GetIndex().SetOverwriteInstances(overwrite); 805 context.GetIndex().SetOverwriteInstances(overwrite);
802 806
803 Json::Value tmp; 807 uint64_t diskSize, uncompressedSize, countPatients, countStudies, countSeries, countInstances;
804 context.GetIndex().ComputeStatistics(tmp); 808 context.GetIndex().GetGlobalStatistics(diskSize, uncompressedSize, countPatients,
805 ASSERT_EQ(0, tmp["CountInstances"].asInt()); 809 countStudies, countSeries, countInstances);
806 ASSERT_EQ(0, boost::lexical_cast<int>(tmp["TotalDiskSize"].asString())); 810
811 ASSERT_EQ(0, countInstances);
812 ASSERT_EQ(0, diskSize);
807 813
808 { 814 {
809 DicomInstanceToStore toStore; 815 DicomInstanceToStore toStore;
810 toStore.SetSummary(instance); 816 toStore.SetSummary(instance);
811 toStore.SetOrigin(DicomInstanceOrigin::FromPlugins()); 817 toStore.SetOrigin(DicomInstanceOrigin::FromPlugins());
818 824
819 FileInfo dicom1, json1; 825 FileInfo dicom1, json1;
820 ASSERT_TRUE(context.GetIndex().LookupAttachment(dicom1, id, FileContentType_Dicom)); 826 ASSERT_TRUE(context.GetIndex().LookupAttachment(dicom1, id, FileContentType_Dicom));
821 ASSERT_TRUE(context.GetIndex().LookupAttachment(json1, id, FileContentType_DicomAsJson)); 827 ASSERT_TRUE(context.GetIndex().LookupAttachment(json1, id, FileContentType_DicomAsJson));
822 828
823 context.GetIndex().ComputeStatistics(tmp); 829 context.GetIndex().GetGlobalStatistics(diskSize, uncompressedSize, countPatients,
824 ASSERT_EQ(1, tmp["CountInstances"].asInt()); 830 countStudies, countSeries, countInstances);
825 ASSERT_EQ(dicom1.GetCompressedSize() + json1.GetCompressedSize(), 831 ASSERT_EQ(1u, countInstances);
826 boost::lexical_cast<size_t>(tmp["TotalDiskSize"].asString())); 832 ASSERT_EQ(dicom1.GetCompressedSize() + json1.GetCompressedSize(), diskSize);
827 ASSERT_EQ(dicom1.GetUncompressedSize() + json1.GetUncompressedSize(), 833 ASSERT_EQ(dicom1.GetUncompressedSize() + json1.GetUncompressedSize(), uncompressedSize);
828 boost::lexical_cast<size_t>(tmp["TotalUncompressedSize"].asString())); 834
829 835 Json::Value tmp;
830 context.ReadDicomAsJson(tmp, id); 836 context.ReadDicomAsJson(tmp, id);
831 ASSERT_EQ("name", tmp["0010,0010"]["Value"].asString()); 837 ASSERT_EQ("name", tmp["0010,0010"]["Value"].asString());
832 838
833 { 839 {
834 ServerContext::DicomCacheLocker locker(context, id); 840 ServerContext::DicomCacheLocker locker(context, id);
853 859
854 FileInfo dicom2, json2; 860 FileInfo dicom2, json2;
855 ASSERT_TRUE(context.GetIndex().LookupAttachment(dicom2, id, FileContentType_Dicom)); 861 ASSERT_TRUE(context.GetIndex().LookupAttachment(dicom2, id, FileContentType_Dicom));
856 ASSERT_TRUE(context.GetIndex().LookupAttachment(json2, id, FileContentType_DicomAsJson)); 862 ASSERT_TRUE(context.GetIndex().LookupAttachment(json2, id, FileContentType_DicomAsJson));
857 863
858 context.GetIndex().ComputeStatistics(tmp); 864 context.GetIndex().GetGlobalStatistics(diskSize, uncompressedSize, countPatients,
859 ASSERT_EQ(1, tmp["CountInstances"].asInt()); 865 countStudies, countSeries, countInstances);
860 ASSERT_EQ(dicom2.GetCompressedSize() + json2.GetCompressedSize(), 866 ASSERT_EQ(1, countInstances);
861 boost::lexical_cast<size_t>(tmp["TotalDiskSize"].asString())); 867 ASSERT_EQ(dicom2.GetCompressedSize() + json2.GetCompressedSize(), diskSize);
862 ASSERT_EQ(dicom2.GetUncompressedSize() + json2.GetUncompressedSize(), 868 ASSERT_EQ(dicom2.GetUncompressedSize() + json2.GetUncompressedSize(), uncompressedSize);
863 boost::lexical_cast<size_t>(tmp["TotalUncompressedSize"].asString()));
864 869
865 if (overwrite) 870 if (overwrite)
866 { 871 {
867 ASSERT_NE(dicom1.GetUuid(), dicom2.GetUuid()); 872 ASSERT_NE(dicom1.GetUuid(), dicom2.GetUuid());
868 ASSERT_NE(json1.GetUuid(), json2.GetUuid()); 873 ASSERT_NE(json1.GetUuid(), json2.GetUuid());