Mercurial > hg > orthanc
changeset 202:1650557bd81a
refactoring
author | Sebastien Jodogne <s.jodogne@gmail.com> |
---|---|
date | Tue, 27 Nov 2012 17:48:37 +0100 |
parents | bee20e978835 |
children | 9283552c25df |
files | OrthancServer/DatabaseWrapper.h OrthancServer/PrepareDatabase-v1.sql OrthancServer/PrepareDatabase.sql OrthancServer/ServerIndex.cpp OrthancServer/ServerIndex.h |
diffstat | 5 files changed, 308 insertions(+), 645 deletions(-) [+] |
line wrap: on
line diff
--- a/OrthancServer/DatabaseWrapper.h Tue Nov 27 17:36:19 2012 +0100 +++ b/OrthancServer/DatabaseWrapper.h Tue Nov 27 17:48:37 2012 +0100 @@ -181,5 +181,10 @@ { return new SQLite::Transaction(db_); } + + const char* GetErrorMessage() const + { + return db_.GetErrorMessage(); + } }; }
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/OrthancServer/PrepareDatabase-v1.sql Tue Nov 27 17:48:37 2012 +0100 @@ -0,0 +1,136 @@ +CREATE TABLE GlobalProperties( + name TEXT PRIMARY KEY, + value TEXT + ); + +CREATE TABLE Resources( + uuid TEXT PRIMARY KEY, + resourceType INTEGER + ); + +CREATE TABLE Patients( + uuid TEXT PRIMARY KEY, + dicomPatientId TEXT + ); + +CREATE TABLE Studies( + uuid TEXT PRIMARY KEY, + parentPatient TEXT REFERENCES Patients(uuid) ON DELETE CASCADE, + dicomStudy TEXT + ); + +CREATE TABLE Series( + uuid TEXT PRIMARY KEY, + parentStudy TEXT REFERENCES Studies(uuid) ON DELETE CASCADE, + dicomSeries TEXT, + expectedNumberOfInstances INTEGER + ); + +CREATE TABLE Instances( + uuid TEXT PRIMARY KEY, + parentSeries TEXT REFERENCES Series(uuid) ON DELETE CASCADE, + dicomInstance TEXT, + fileUuid TEXT, + fileSize INTEGER, + jsonUuid TEXT, + distantAet TEXT, + indexInSeries INTEGER + ); + +CREATE TABLE MainDicomTags( + uuid TEXT, + tagGroup INTEGER, + tagElement INTEGER, + value TEXT, + PRIMARY KEY(uuid, tagGroup, tagElement) + ); + +CREATE TABLE Changes( + seq INTEGER PRIMARY KEY AUTOINCREMENT, + basePath TEXT, + uuid TEXT + ); + + +CREATE INDEX PatientToStudies ON Studies(parentPatient); +CREATE INDEX StudyToSeries ON Series(parentStudy); +CREATE INDEX SeriesToInstances ON Instances(parentSeries); + +CREATE INDEX DicomPatientIndex ON Patients(dicomPatientId); +CREATE INDEX DicomStudyIndex ON Studies(dicomStudy); +CREATE INDEX DicomSeriesIndex ON Series(dicomSeries); +CREATE INDEX DicomInstanceIndex ON Instances(dicomInstance); + +CREATE INDEX MainDicomTagsIndex ON MainDicomTags(uuid); +CREATE INDEX MainDicomTagsGroupElement ON MainDicomTags(tagGroup, tagElement); +CREATE INDEX MainDicomTagsValues ON MainDicomTags(value COLLATE BINARY); + +CREATE INDEX ChangesIndex ON Changes(uuid); + +CREATE TRIGGER InstanceRemoved +AFTER DELETE ON Instances +FOR EACH ROW BEGIN + DELETE FROM Resources WHERE uuid = old.uuid; + DELETE FROM MainDicomTags WHERE uuid = old.uuid; + DELETE FROM Changes WHERE uuid = old.uuid; + SELECT DeleteFromFileStorage(old.fileUuid); + SELECT DeleteFromFileStorage(old.jsonUuid); + SELECT SignalDeletedLevel(3, old.parentSeries); +END; + +CREATE TRIGGER SeriesRemoved +AFTER DELETE ON Series +FOR EACH ROW BEGIN + DELETE FROM Resources WHERE uuid = old.uuid; + DELETE FROM MainDicomTags WHERE uuid = old.uuid; + DELETE FROM Changes WHERE uuid = old.uuid; + SELECT SignalDeletedLevel(2, old.parentStudy); +END; + +CREATE TRIGGER StudyRemoved +AFTER DELETE ON Studies +FOR EACH ROW BEGIN + DELETE FROM Resources WHERE uuid = old.uuid; + DELETE FROM MainDicomTags WHERE uuid = old.uuid; + DELETE FROM Changes WHERE uuid = old.uuid; + SELECT SignalDeletedLevel(1, old.parentPatient); +END; + +CREATE TRIGGER PatientRemoved +AFTER DELETE ON Patients +FOR EACH ROW BEGIN + DELETE FROM Resources WHERE uuid = old.uuid; + DELETE FROM MainDicomTags WHERE uuid = old.uuid; + DELETE FROM Changes WHERE uuid = old.uuid; + SELECT SignalDeletedLevel(0, ""); +END; + + + + +CREATE TRIGGER InstanceRemovedUpwardCleaning +AFTER DELETE ON Instances +FOR EACH ROW + WHEN (SELECT COUNT(*) FROM Instances WHERE parentSeries = old.parentSeries) = 0 + BEGIN + SELECT DeleteFromFileStorage("deleting parent series"); -- TODO REMOVE THIS + DELETE FROM Series WHERE uuid = old.parentSeries; + END; + +CREATE TRIGGER SeriesRemovedUpwardCleaning +AFTER DELETE ON Series +FOR EACH ROW + WHEN (SELECT COUNT(*) FROM Series WHERE parentStudy = old.parentStudy) = 0 + BEGIN + SELECT DeleteFromFileStorage("deleting parent study"); -- TODO REMOVE THIS + DELETE FROM Studies WHERE uuid = old.parentStudy; + END; + +CREATE TRIGGER StudyRemovedUpwardCleaning +AFTER DELETE ON Studies +FOR EACH ROW + WHEN (SELECT COUNT(*) FROM Studies WHERE parentPatient = old.parentPatient) = 0 + BEGIN + SELECT DeleteFromFileStorage("deleting parent patient"); -- TODO REMOVE THIS + DELETE FROM Patients WHERE uuid = old.parentPatient; + END;
--- a/OrthancServer/PrepareDatabase.sql Tue Nov 27 17:36:19 2012 +0100 +++ /dev/null Thu Jan 01 00:00:00 1970 +0000 @@ -1,136 +0,0 @@ -CREATE TABLE GlobalProperties( - name TEXT PRIMARY KEY, - value TEXT - ); - -CREATE TABLE Resources( - uuid TEXT PRIMARY KEY, - resourceType INTEGER - ); - -CREATE TABLE Patients( - uuid TEXT PRIMARY KEY, - dicomPatientId TEXT - ); - -CREATE TABLE Studies( - uuid TEXT PRIMARY KEY, - parentPatient TEXT REFERENCES Patients(uuid) ON DELETE CASCADE, - dicomStudy TEXT - ); - -CREATE TABLE Series( - uuid TEXT PRIMARY KEY, - parentStudy TEXT REFERENCES Studies(uuid) ON DELETE CASCADE, - dicomSeries TEXT, - expectedNumberOfInstances INTEGER - ); - -CREATE TABLE Instances( - uuid TEXT PRIMARY KEY, - parentSeries TEXT REFERENCES Series(uuid) ON DELETE CASCADE, - dicomInstance TEXT, - fileUuid TEXT, - fileSize INTEGER, - jsonUuid TEXT, - distantAet TEXT, - indexInSeries INTEGER - ); - -CREATE TABLE MainDicomTags( - uuid TEXT, - tagGroup INTEGER, - tagElement INTEGER, - value TEXT, - PRIMARY KEY(uuid, tagGroup, tagElement) - ); - -CREATE TABLE Changes( - seq INTEGER PRIMARY KEY AUTOINCREMENT, - basePath TEXT, - uuid TEXT - ); - - -CREATE INDEX PatientToStudies ON Studies(parentPatient); -CREATE INDEX StudyToSeries ON Series(parentStudy); -CREATE INDEX SeriesToInstances ON Instances(parentSeries); - -CREATE INDEX DicomPatientIndex ON Patients(dicomPatientId); -CREATE INDEX DicomStudyIndex ON Studies(dicomStudy); -CREATE INDEX DicomSeriesIndex ON Series(dicomSeries); -CREATE INDEX DicomInstanceIndex ON Instances(dicomInstance); - -CREATE INDEX MainDicomTagsIndex ON MainDicomTags(uuid); -CREATE INDEX MainDicomTagsGroupElement ON MainDicomTags(tagGroup, tagElement); -CREATE INDEX MainDicomTagsValues ON MainDicomTags(value COLLATE BINARY); - -CREATE INDEX ChangesIndex ON Changes(uuid); - -CREATE TRIGGER InstanceRemoved -AFTER DELETE ON Instances -FOR EACH ROW BEGIN - DELETE FROM Resources WHERE uuid = old.uuid; - DELETE FROM MainDicomTags WHERE uuid = old.uuid; - DELETE FROM Changes WHERE uuid = old.uuid; - SELECT DeleteFromFileStorage(old.fileUuid); - SELECT DeleteFromFileStorage(old.jsonUuid); - SELECT SignalDeletedLevel(3, old.parentSeries); -END; - -CREATE TRIGGER SeriesRemoved -AFTER DELETE ON Series -FOR EACH ROW BEGIN - DELETE FROM Resources WHERE uuid = old.uuid; - DELETE FROM MainDicomTags WHERE uuid = old.uuid; - DELETE FROM Changes WHERE uuid = old.uuid; - SELECT SignalDeletedLevel(2, old.parentStudy); -END; - -CREATE TRIGGER StudyRemoved -AFTER DELETE ON Studies -FOR EACH ROW BEGIN - DELETE FROM Resources WHERE uuid = old.uuid; - DELETE FROM MainDicomTags WHERE uuid = old.uuid; - DELETE FROM Changes WHERE uuid = old.uuid; - SELECT SignalDeletedLevel(1, old.parentPatient); -END; - -CREATE TRIGGER PatientRemoved -AFTER DELETE ON Patients -FOR EACH ROW BEGIN - DELETE FROM Resources WHERE uuid = old.uuid; - DELETE FROM MainDicomTags WHERE uuid = old.uuid; - DELETE FROM Changes WHERE uuid = old.uuid; - SELECT SignalDeletedLevel(0, ""); -END; - - - - -CREATE TRIGGER InstanceRemovedUpwardCleaning -AFTER DELETE ON Instances -FOR EACH ROW - WHEN (SELECT COUNT(*) FROM Instances WHERE parentSeries = old.parentSeries) = 0 - BEGIN - SELECT DeleteFromFileStorage("deleting parent series"); -- TODO REMOVE THIS - DELETE FROM Series WHERE uuid = old.parentSeries; - END; - -CREATE TRIGGER SeriesRemovedUpwardCleaning -AFTER DELETE ON Series -FOR EACH ROW - WHEN (SELECT COUNT(*) FROM Series WHERE parentStudy = old.parentStudy) = 0 - BEGIN - SELECT DeleteFromFileStorage("deleting parent study"); -- TODO REMOVE THIS - DELETE FROM Studies WHERE uuid = old.parentStudy; - END; - -CREATE TRIGGER StudyRemovedUpwardCleaning -AFTER DELETE ON Studies -FOR EACH ROW - WHEN (SELECT COUNT(*) FROM Studies WHERE parentPatient = old.parentPatient) = 0 - BEGIN - SELECT DeleteFromFileStorage("deleting parent patient"); -- TODO REMOVE THIS - DELETE FROM Patients WHERE uuid = old.parentPatient; - END;
--- a/OrthancServer/ServerIndex.cpp Tue Nov 27 17:36:19 2012 +0100 +++ b/OrthancServer/ServerIndex.cpp Tue Nov 27 17:48:37 2012 +0100 @@ -53,7 +53,7 @@ { namespace Internals { - class ServerIndexListenerTodo : public IServerIndexListener + class ServerIndexListener : public IServerIndexListener { private: FileStorage& fileStorage_; @@ -62,7 +62,7 @@ std::string remainingPublicId_; public: - ServerIndexListenerTodo(FileStorage& fileStorage) : + ServerIndexListener(FileStorage& fileStorage) : fileStorage_(fileStorage), hasRemainingLevel_(false) { @@ -221,259 +221,35 @@ } - void ServerIndex::StoreMainDicomTags(const std::string& uuid, - const DicomMap& map) - { - DicomArray flattened(map); - for (size_t i = 0; i < flattened.GetSize(); i++) - { - SQLite::Statement s(db_, SQLITE_FROM_HERE, "INSERT INTO MainDicomTags VALUES(?, ?, ?, ?)"); - s.BindString(0, uuid); - s.BindInt(1, flattened.GetElement(i).GetTag().GetGroup()); - s.BindInt(2, flattened.GetElement(i).GetTag().GetElement()); - s.BindString(3, flattened.GetElement(i).GetValue().AsString()); - s.Run(); - } - } - - bool ServerIndex::HasInstance(DicomInstanceHasher& hasher) - { - SQLite::Statement s(db_, SQLITE_FROM_HERE, "SELECT uuid FROM Instances WHERE dicomInstance=?"); - s.BindString(0, hasher.GetInstanceUid()); - return s.Step(); - } - - - void ServerIndex::RecordChange(const std::string& resourceType, - const std::string& uuid) - { - SQLite::Statement s(db_, SQLITE_FROM_HERE, "INSERT INTO Changes VALUES(NULL, ?, ?)"); - s.BindString(0, resourceType); - s.BindString(1, uuid); - s.Run(); - } - - - void ServerIndex::CreateInstance(DicomInstanceHasher& hasher, - const DicomMap& dicomSummary, - const std::string& fileUuid, - uint64_t fileSize, - const std::string& jsonUuid, - const std::string& remoteAet) - { - SQLite::Statement s2(db_, SQLITE_FROM_HERE, "INSERT INTO Resources VALUES(?, ?)"); - s2.BindString(0, hasher.HashInstance()); - s2.BindInt(1, ResourceType_Instance); - s2.Run(); - - SQLite::Statement s(db_, SQLITE_FROM_HERE, "INSERT INTO Instances VALUES(?, ?, ?, ?, ?, ?, ?, ?)"); - s.BindString(0, hasher.HashInstance()); - s.BindString(1, hasher.HashSeries()); - s.BindString(2, hasher.GetInstanceUid()); - s.BindString(3, fileUuid); - s.BindInt64(4, fileSize); - s.BindString(5, jsonUuid); - s.BindString(6, remoteAet); - - const DicomValue* indexInSeries; - if ((indexInSeries = dicomSummary.TestAndGetValue(DICOM_TAG_INSTANCE_NUMBER)) != NULL || - (indexInSeries = dicomSummary.TestAndGetValue(DICOM_TAG_IMAGE_INDEX)) != NULL) - { - s.BindInt(7, boost::lexical_cast<unsigned int>(indexInSeries->AsString())); - } - else - { - s.BindNull(7); - } - - s.Run(); - - RecordChange("instances", hasher.HashInstance()); - - DicomMap dicom; - dicomSummary.ExtractInstanceInformation(dicom); - StoreMainDicomTags(hasher.HashInstance(), dicom); - } - - void ServerIndex::RemoveInstance(const std::string& uuid) - { - SQLite::Statement s(db_, SQLITE_FROM_HERE, "DELETE FROM Instances WHERE uuid=?"); - s.BindString(0, uuid); - s.Run(); - } - - bool ServerIndex::HasSeries(DicomInstanceHasher& hasher) - { - SQLite::Statement s(db_, SQLITE_FROM_HERE, "SELECT uuid FROM Series WHERE dicomSeries=?"); - s.BindString(0, hasher.GetSeriesUid()); - return s.Step(); - } - - void ServerIndex::CreateSeries(DicomInstanceHasher& hasher, - const DicomMap& dicomSummary) - { - SQLite::Statement s2(db_, SQLITE_FROM_HERE, "INSERT INTO Resources VALUES(?, ?)"); - s2.BindString(0, hasher.HashSeries()); - s2.BindInt(1, ResourceType_Series); - s2.Run(); - - SQLite::Statement s(db_, SQLITE_FROM_HERE, "INSERT INTO Series VALUES(?, ?, ?, ?)"); - s.BindString(0, hasher.HashSeries()); - s.BindString(1, hasher.HashStudy()); - s.BindString(2, hasher.GetSeriesUid()); - - const DicomValue* expectedNumberOfInstances; - if (//(expectedNumberOfInstances = dicomSummary.TestAndGetValue(DICOM_TAG_NUMBER_OF_FRAMES)) != NULL || - (expectedNumberOfInstances = dicomSummary.TestAndGetValue(DICOM_TAG_NUMBER_OF_SLICES)) != NULL || - //(expectedNumberOfInstances = dicomSummary.TestAndGetValue(DICOM_TAG_CARDIAC_NUMBER_OF_IMAGES)) != NULL || - (expectedNumberOfInstances = dicomSummary.TestAndGetValue(DICOM_TAG_IMAGES_IN_ACQUISITION)) != NULL) - { - s.BindInt(3, boost::lexical_cast<unsigned int>(expectedNumberOfInstances->AsString())); - } - else - { - s.BindNull(3); - } - - s.Run(); - - RecordChange("series", hasher.HashSeries()); - - DicomMap dicom; - dicomSummary.ExtractSeriesInformation(dicom); - StoreMainDicomTags(hasher.HashSeries(), dicom); - } - - bool ServerIndex::HasStudy(DicomInstanceHasher& hasher) - { - SQLite::Statement s(db_, SQLITE_FROM_HERE, "SELECT uuid FROM Studies WHERE dicomStudy=?"); - s.BindString(0, hasher.GetStudyUid()); - return s.Step(); - } - - void ServerIndex::CreateStudy(DicomInstanceHasher& hasher, - const DicomMap& dicomSummary) - { - SQLite::Statement s2(db_, SQLITE_FROM_HERE, "INSERT INTO Resources VALUES(?, ?)"); - s2.BindString(0, hasher.HashStudy()); - s2.BindInt(1, ResourceType_Study); - s2.Run(); - - SQLite::Statement s(db_, SQLITE_FROM_HERE, "INSERT INTO Studies VALUES(?, ?, ?)"); - s.BindString(0, hasher.HashStudy()); - s.BindString(1, hasher.HashPatient()); - s.BindString(2, hasher.GetStudyUid()); - s.Run(); - - RecordChange("studies", hasher.HashStudy()); - - DicomMap dicom; - dicomSummary.ExtractStudyInformation(dicom); - StoreMainDicomTags(hasher.HashStudy(), dicom); - } - - - - bool ServerIndex::HasPatient(DicomInstanceHasher& hasher) - { - SQLite::Statement s(db_, SQLITE_FROM_HERE, "SELECT uuid FROM Patients WHERE dicomPatientId=?"); - s.BindString(0,hasher.GetPatientId()); - return s.Step(); - } - - void ServerIndex::CreatePatient(DicomInstanceHasher& hasher, - const DicomMap& dicomSummary) - { - std::string dicomPatientId = dicomSummary.GetValue(DICOM_TAG_PATIENT_ID).AsString(); - - SQLite::Statement s2(db_, SQLITE_FROM_HERE, "INSERT INTO Resources VALUES(?, ?)"); - s2.BindString(0, hasher.HashPatient()); - s2.BindInt(1, ResourceType_Patient); - s2.Run(); - - SQLite::Statement s(db_, SQLITE_FROM_HERE, "INSERT INTO Patients VALUES(?, ?)"); - s.BindString(0, hasher.HashPatient()); - s.BindString(1, dicomPatientId); - s.Run(); - - RecordChange("patients", hasher.HashPatient()); - - DicomMap dicom; - dicomSummary.ExtractPatientInformation(dicom); - StoreMainDicomTags(hasher.HashPatient(), dicom); - } - - bool ServerIndex::DeleteInternal(Json::Value& target, const std::string& uuid, - const std::string& tableName) + ResourceType expectedType) { boost::mutex::scoped_lock scoped_lock(mutex_); - - { - listener2_->Reset(); - - std::auto_ptr<SQLite::Transaction> t(db2_->StartTransaction()); - t->Begin(); + listener_->Reset(); - int64_t id; - ResourceType type; - if (!db2_->LookupResource(uuid, id, type)) - { - return false; - } - - db2_->DeleteResource(id); - - if (listener2_->HasRemainingLevel()) - { - ResourceType type = listener2_->GetRemainingType(); - const std::string& uuid = listener2_->GetRemainingPublicId(); + std::auto_ptr<SQLite::Transaction> t(db_->StartTransaction()); + t->Begin(); - target["RemainingAncestor"] = Json::Value(Json::objectValue); - target["RemainingAncestor"]["Path"] = std::string(GetBasePath(type)) + "/" + uuid; - target["RemainingAncestor"]["Type"] = ToString(type); - target["RemainingAncestor"]["ID"] = uuid; - } - else - { - target["RemainingAncestor"] = Json::nullValue; - } - - std::cout << target << std::endl; - - t->Commit(); - - return true; - } - - - - deletedLevels_->Clear(); - - SQLite::Statement s(db_, "DELETE FROM " + tableName + " WHERE uuid=?"); - s.BindString(0, uuid); - - if (!s.Run()) + int64_t id; + ResourceType type; + if (!db_->LookupResource(uuid, id, type) || + expectedType != type) { return false; } + + db_->DeleteResource(id); - if (db_.GetLastChangeCount() == 0) + if (listener_->HasRemainingLevel()) { - // Nothing was deleted, inexistent UUID - return false; - } - - if (deletedLevels_->HasRemainingLevel()) - { - std::string type(deletedLevels_->GetRemainingLevelType()); - const std::string& uuid = deletedLevels_->GetRemainingLevelUuid(); + ResourceType type = listener_->GetRemainingType(); + const std::string& uuid = listener_->GetRemainingPublicId(); target["RemainingAncestor"] = Json::Value(Json::objectValue); - target["RemainingAncestor"]["Path"] = "/" + type + "/" + uuid; - target["RemainingAncestor"]["Type"] = type; + target["RemainingAncestor"]["Path"] = std::string(GetBasePath(type)) + "/" + uuid; + target["RemainingAncestor"]["Type"] = ToString(type); target["RemainingAncestor"]["ID"] = uuid; } else @@ -481,6 +257,8 @@ target["RemainingAncestor"] = Json::nullValue; } + t->Commit(); + return true; } @@ -488,12 +266,11 @@ ServerIndex::ServerIndex(FileStorage& fileStorage, const std::string& dbPath) { - listener2_.reset(new Internals::ServerIndexListenerTodo(fileStorage)); + listener_.reset(new Internals::ServerIndexListener(fileStorage)); if (dbPath == ":memory:") { - db_.OpenInMemory(); - db2_.reset(new DatabaseWrapper(*listener2_)); + db_.reset(new DatabaseWrapper(*listener_)); } else { @@ -507,199 +284,120 @@ { } - db2_.reset(new DatabaseWrapper(p.string() + "/index2", *listener2_)); - - p /= "index"; - db_.Open(p.string()); - } - - db_.Register(new Internals::DeleteFromFileStorageFunction(fileStorage)); - deletedLevels_ = (Internals::SignalDeletedLevelFunction*) - db_.Register(new Internals::SignalDeletedLevelFunction); - - if (!db_.DoesTableExist("GlobalProperties")) - { - LOG(INFO) << "Creating the database"; - std::string query; - EmbeddedResources::GetFileResource(query, EmbeddedResources::PREPARE_DATABASE); - db_.Execute(query); + db_.reset(new DatabaseWrapper(p.string() + "/index", *listener_)); } } - StoreStatus ServerIndex::Store2(const DicomMap& dicomSummary, - const std::string& fileUuid, - uint64_t uncompressedFileSize, - const std::string& jsonUuid, - const std::string& remoteAet) - { - boost::mutex::scoped_lock scoped_lock(mutex_); - - DicomInstanceHasher hasher(dicomSummary); - - try - { - std::auto_ptr<SQLite::Transaction> t(db2_->StartTransaction()); - t->Begin(); - - int64_t patient, study, series, instance; - ResourceType type; - bool isNewSeries = false; - - // Do nothing if the instance already exists - if (db2_->LookupResource(hasher.HashInstance(), patient, type)) - { - assert(type == ResourceType_Instance); - return StoreStatus_AlreadyStored; - } - - // Create the instance - instance = db2_->CreateResource(hasher.HashInstance(), ResourceType_Instance); - - DicomMap dicom; - dicomSummary.ExtractInstanceInformation(dicom); - db2_->SetMainDicomTags(instance, dicom); - - // Create the patient/study/series/instance hierarchy - if (!db2_->LookupResource(hasher.HashSeries(), series, type)) - { - // This is a new series - isNewSeries = true; - series = db2_->CreateResource(hasher.HashSeries(), ResourceType_Series); - dicomSummary.ExtractSeriesInformation(dicom); - db2_->SetMainDicomTags(series, dicom); - db2_->AttachChild(series, instance); - - if (!db2_->LookupResource(hasher.HashStudy(), study, type)) - { - // This is a new study - study = db2_->CreateResource(hasher.HashStudy(), ResourceType_Study); - dicomSummary.ExtractStudyInformation(dicom); - db2_->SetMainDicomTags(study, dicom); - db2_->AttachChild(study, series); - - if (!db2_->LookupResource(hasher.HashPatient(), patient, type)) - { - // This is a new patient - patient = db2_->CreateResource(hasher.HashPatient(), ResourceType_Patient); - dicomSummary.ExtractPatientInformation(dicom); - db2_->SetMainDicomTags(patient, dicom); - db2_->AttachChild(patient, study); - } - else - { - assert(type == ResourceType_Patient); - db2_->AttachChild(patient, study); - } - } - else - { - assert(type == ResourceType_Study); - db2_->AttachChild(study, series); - } - } - else - { - assert(type == ResourceType_Series); - db2_->AttachChild(series, instance); - } - - // Attach the files to the newly created instance - db2_->AttachFile(instance, AttachedFileType_Dicom, fileUuid, uncompressedFileSize); - db2_->AttachFile(instance, AttachedFileType_Json, jsonUuid, 0); // TODO "0" - - // Attach the metadata - db2_->SetMetadata(instance, MetadataType_Instance_ReceptionDate, Toolbox::GetNowIsoString()); - db2_->SetMetadata(instance, MetadataType_Instance_RemoteAet, remoteAet); - - const DicomValue* value; - if ((value = dicomSummary.TestAndGetValue(DICOM_TAG_INSTANCE_NUMBER)) != NULL || - (value = dicomSummary.TestAndGetValue(DICOM_TAG_IMAGE_INDEX)) != NULL) - { - db2_->SetMetadata(instance, MetadataType_Instance_IndexInSeries, value->AsString()); - } - - if (isNewSeries) - { - if ((value = dicomSummary.TestAndGetValue(DICOM_TAG_NUMBER_OF_SLICES)) != NULL || - (value = dicomSummary.TestAndGetValue(DICOM_TAG_IMAGES_IN_ACQUISITION)) != NULL || - (value = dicomSummary.TestAndGetValue(DICOM_TAG_CARDIAC_NUMBER_OF_IMAGES)) != NULL) - { - db2_->SetMetadata(series, MetadataType_Series_ExpectedNumberOfInstances, value->AsString()); - } - } - - t->Commit(); - } - catch (OrthancException& e) - { - LOG(ERROR) << "EXCEPTION2 [" << e.What() << "]" << " " << db_.GetErrorMessage(); - } - - return StoreStatus_Failure; - } - - StoreStatus ServerIndex::Store(const DicomMap& dicomSummary, const std::string& fileUuid, uint64_t uncompressedFileSize, const std::string& jsonUuid, const std::string& remoteAet) { - Store2(dicomSummary, fileUuid, uncompressedFileSize, jsonUuid, remoteAet); - boost::mutex::scoped_lock scoped_lock(mutex_); DicomInstanceHasher hasher(dicomSummary); try { - SQLite::Transaction t(db_); - t.Begin(); + std::auto_ptr<SQLite::Transaction> t(db_->StartTransaction()); + t->Begin(); - if (HasInstance(hasher)) + int64_t patient, study, series, instance; + ResourceType type; + bool isNewSeries = false; + + // Do nothing if the instance already exists + if (db_->LookupResource(hasher.HashInstance(), patient, type)) { + assert(type == ResourceType_Instance); return StoreStatus_AlreadyStored; - // TODO: Check consistency? } - if (HasPatient(hasher)) + // Create the instance + instance = db_->CreateResource(hasher.HashInstance(), ResourceType_Instance); + + DicomMap dicom; + dicomSummary.ExtractInstanceInformation(dicom); + db_->SetMainDicomTags(instance, dicom); + + // Create the patient/study/series/instance hierarchy + if (!db_->LookupResource(hasher.HashSeries(), series, type)) { - // TODO: Check consistency? + // This is a new series + isNewSeries = true; + series = db_->CreateResource(hasher.HashSeries(), ResourceType_Series); + dicomSummary.ExtractSeriesInformation(dicom); + db_->SetMainDicomTags(series, dicom); + db_->AttachChild(series, instance); + + if (!db_->LookupResource(hasher.HashStudy(), study, type)) + { + // This is a new study + study = db_->CreateResource(hasher.HashStudy(), ResourceType_Study); + dicomSummary.ExtractStudyInformation(dicom); + db_->SetMainDicomTags(study, dicom); + db_->AttachChild(study, series); + + if (!db_->LookupResource(hasher.HashPatient(), patient, type)) + { + // This is a new patient + patient = db_->CreateResource(hasher.HashPatient(), ResourceType_Patient); + dicomSummary.ExtractPatientInformation(dicom); + db_->SetMainDicomTags(patient, dicom); + db_->AttachChild(patient, study); + } + else + { + assert(type == ResourceType_Patient); + db_->AttachChild(patient, study); + } + } + else + { + assert(type == ResourceType_Study); + db_->AttachChild(study, series); + } } else { - CreatePatient(hasher, dicomSummary); - } - - if (HasStudy(hasher)) - { - // TODO: Check consistency? - } - else - { - CreateStudy(hasher, dicomSummary); + assert(type == ResourceType_Series); + db_->AttachChild(series, instance); } - if (HasSeries(hasher)) + // Attach the files to the newly created instance + db_->AttachFile(instance, AttachedFileType_Dicom, fileUuid, uncompressedFileSize); + db_->AttachFile(instance, AttachedFileType_Json, jsonUuid, 0); // TODO "0" + + // Attach the metadata + db_->SetMetadata(instance, MetadataType_Instance_ReceptionDate, Toolbox::GetNowIsoString()); + db_->SetMetadata(instance, MetadataType_Instance_RemoteAet, remoteAet); + + const DicomValue* value; + if ((value = dicomSummary.TestAndGetValue(DICOM_TAG_INSTANCE_NUMBER)) != NULL || + (value = dicomSummary.TestAndGetValue(DICOM_TAG_IMAGE_INDEX)) != NULL) { - // TODO: Check consistency? - } - else - { - CreateSeries(hasher, dicomSummary); + db_->SetMetadata(instance, MetadataType_Instance_IndexInSeries, value->AsString()); } - CreateInstance(hasher, dicomSummary, fileUuid, - uncompressedFileSize, jsonUuid, remoteAet); - - t.Commit(); + if (isNewSeries) + { + if ((value = dicomSummary.TestAndGetValue(DICOM_TAG_NUMBER_OF_SLICES)) != NULL || + (value = dicomSummary.TestAndGetValue(DICOM_TAG_IMAGES_IN_ACQUISITION)) != NULL || + (value = dicomSummary.TestAndGetValue(DICOM_TAG_CARDIAC_NUMBER_OF_IMAGES)) != NULL) + { + db_->SetMetadata(series, MetadataType_Series_ExpectedNumberOfInstances, value->AsString()); + } + } + + t->Commit(); + return StoreStatus_Success; - //t.Rollback(); } catch (OrthancException& e) { - LOG(ERROR) << "EXCEPTION [" << e.What() << "]" << " " << db_.GetErrorMessage(); + LOG(ERROR) << "EXCEPTION2 [" << e.What() << "]" << " " << db_->GetErrorMessage(); } return StoreStatus_Failure; @@ -744,20 +442,20 @@ uint64_t ServerIndex::GetTotalCompressedSize() { boost::mutex::scoped_lock scoped_lock(mutex_); - return db2_->GetTotalCompressedSize(); + return db_->GetTotalCompressedSize(); } uint64_t ServerIndex::GetTotalUncompressedSize() { boost::mutex::scoped_lock scoped_lock(mutex_); - return db2_->GetTotalUncompressedSize(); + return db_->GetTotalUncompressedSize(); } SeriesStatus ServerIndex::GetSeriesStatus(int id) { // Get the expected number of instances in this series (from the metadata) - std::string s = db2_->GetMetadata(id, MetadataType_Series_ExpectedNumberOfInstances); + std::string s = db_->GetMetadata(id, MetadataType_Series_ExpectedNumberOfInstances); size_t expected; try @@ -775,14 +473,14 @@ // Loop over the instances of this series std::list<int64_t> children; - db2_->GetChildrenInternalId(children, id); + db_->GetChildrenInternalId(children, id); std::set<size_t> instances; for (std::list<int64_t>::const_iterator it = children.begin(); it != children.end(); it++) { // Get the index of this instance in the series - s = db2_->GetMetadata(*it, MetadataType_Instance_IndexInSeries); + s = db_->GetMetadata(*it, MetadataType_Instance_IndexInSeries); size_t index; try { @@ -820,11 +518,11 @@ - void ServerIndex::MainDicomTagsToJson2(Json::Value& target, - int64_t resourceId) + void ServerIndex::MainDicomTagsToJson(Json::Value& target, + int64_t resourceId) { DicomMap tags; - db2_->GetMainDicomTags(tags, resourceId); + db_->GetMainDicomTags(tags, resourceId); target["MainDicomTags"] = Json::objectValue; FromDcmtkBridge::ToJson(target["MainDicomTags"], tags); } @@ -840,7 +538,7 @@ // Lookup for the requested resource int64_t id; ResourceType type; - if (!db2_->LookupResource(publicId, id, type) || + if (!db_->LookupResource(publicId, id, type) || type != expectedType) { return false; @@ -850,12 +548,12 @@ if (type != ResourceType_Patient) { int64_t parentId; - if (!db2_->LookupParent(parentId, id)) + if (!db_->LookupParent(parentId, id)) { throw OrthancException(ErrorCode_InternalError); } - std::string parent = db2_->GetPublicId(parentId); + std::string parent = db_->GetPublicId(parentId); switch (type) { @@ -878,7 +576,7 @@ // List the children resources std::list<std::string> children; - db2_->GetChildrenPublicId(children, id); + db_->GetChildrenPublicId(children, id); if (type != ResourceType_Instance) { @@ -926,9 +624,9 @@ result["Status"] = ToString(GetSeriesStatus(id)); int i; - if (db2_->GetMetadataAsInteger(i, id, MetadataType_Series_ExpectedNumberOfInstances)) + if (db_->GetMetadataAsInteger(i, id, MetadataType_Series_ExpectedNumberOfInstances)) result["ExpectedNumberOfInstances"] = i; - else + else result["ExpectedNumberOfInstances"] = Json::nullValue; break; @@ -940,7 +638,7 @@ std::string fileUuid; uint64_t uncompressedSize; - if (!db2_->LookupFile(id, AttachedFileType_Dicom, fileUuid, uncompressedSize)) + if (!db_->LookupFile(id, AttachedFileType_Dicom, fileUuid, uncompressedSize)) { throw OrthancException(ErrorCode_InternalError); } @@ -949,7 +647,7 @@ result["FileUuid"] = fileUuid; int i; - if (db2_->GetMetadataAsInteger(i, id, MetadataType_Instance_IndexInSeries)) + if (db_->GetMetadataAsInteger(i, id, MetadataType_Instance_IndexInSeries)) result["IndexInSeries"] = i; else result["IndexInSeries"] = Json::nullValue; @@ -963,7 +661,7 @@ // Record the remaining information result["ID"] = publicId; - MainDicomTagsToJson2(result, id); + MainDicomTagsToJson(result, id); return true; } @@ -978,7 +676,7 @@ int64_t id; ResourceType type; - if (!db2_->LookupResource(instanceUuid, id, type) || + if (!db_->LookupResource(instanceUuid, id, type) || type != ResourceType_Instance) { throw OrthancException(ErrorCode_InternalError); @@ -986,7 +684,7 @@ uint64_t compressedSize, uncompressedSize; - return db2_->LookupFile(id, contentType, fileUuid, compressedSize, uncompressedSize, compressionType); + return db_->LookupFile(id, contentType, fileUuid, compressedSize, uncompressedSize, compressionType); } @@ -995,7 +693,7 @@ ResourceType resourceType) { boost::mutex::scoped_lock scoped_lock(mutex_); - db2_->GetAllPublicIds(target, resourceType); + db_->GetAllPublicIds(target, resourceType); } @@ -1004,63 +702,66 @@ const std::string& filter, unsigned int maxResults) { - assert(target.type() == Json::objectValue); boost::mutex::scoped_lock scoped_lock(mutex_); + return false; + // TODO !!!! + + /*assert(target.type() == Json::objectValue); + boost::mutex::scoped_lock scoped_lock(mutex_); - if (filter.size() != 0 && - filter != "instances" && - filter != "series" && - filter != "studies" && - filter != "patients") - { + if (filter.size() != 0 && + filter != "instances" && + filter != "series" && + filter != "studies" && + filter != "patients") + { return false; - } + } - std::auto_ptr<SQLite::Statement> s; - if (filter.size() == 0) - { + std::auto_ptr<SQLite::Statement> s; + if (filter.size() == 0) + { s.reset(new SQLite::Statement(db_, SQLITE_FROM_HERE, "SELECT * FROM Changes WHERE seq>? " - "ORDER BY seq LIMIT ?")); + "ORDER BY seq LIMIT ?")); s->BindInt64(0, since); s->BindInt(1, maxResults); - } - else - { + } + else + { s.reset(new SQLite::Statement(db_, SQLITE_FROM_HERE, "SELECT * FROM Changes WHERE seq>? " - "AND basePath=? ORDER BY seq LIMIT ?")); + "AND basePath=? ORDER BY seq LIMIT ?")); s->BindInt64(0, since); s->BindString(1, filter); s->BindInt(2, maxResults); - } + } - int64_t lastSeq = 0; - Json::Value results(Json::arrayValue); - while (s->Step()) - { + int64_t lastSeq = 0; + Json::Value results(Json::arrayValue); + while (s->Step()) + { int64_t seq = s->ColumnInt64(0); std::string basePath = s->ColumnString(1); std::string uuid = s->ColumnString(2); if (filter.size() == 0 || - filter == basePath) + filter == basePath) { - Json::Value change(Json::objectValue); - change["Seq"] = static_cast<int>(seq); // TODO JsonCpp in 64bit - change["BasePath"] = basePath; - change["ID"] = uuid; - results.append(change); + Json::Value change(Json::objectValue); + change["Seq"] = static_cast<int>(seq); // TODO JsonCpp in 64bit + change["BasePath"] = basePath; + change["ID"] = uuid; + results.append(change); } if (seq > lastSeq) { - lastSeq = seq; + lastSeq = seq; } - } + } - target["Results"] = results; - target["LastSeq"] = static_cast<int>(lastSeq); // TODO JsonCpp in 64bit + target["Results"] = results; + target["LastSeq"] = static_cast<int>(lastSeq); // TODO JsonCpp in 64bit - return true; + return true;*/ } - }
--- a/OrthancServer/ServerIndex.h Tue Nov 27 17:36:19 2012 +0100 +++ b/OrthancServer/ServerIndex.h Tue Nov 27 17:48:37 2012 +0100 @@ -46,67 +46,24 @@ { namespace Internals { - class SignalDeletedLevelFunction; - class ServerIndexListenerTodo; + class ServerIndexListener; } class ServerIndex { private: - SQLite::Connection db_; boost::mutex mutex_; - std::auto_ptr<Internals::ServerIndexListenerTodo> listener2_; - std::auto_ptr<DatabaseWrapper> db2_; - - // DO NOT delete the following one, SQLite::Connection will do it automatically - Internals::SignalDeletedLevelFunction* deletedLevels_; - - void StoreMainDicomTags(const std::string& uuid, - const DicomMap& map); - - bool HasPatient(DicomInstanceHasher& hasher); - - void CreatePatient(DicomInstanceHasher& hasher, - const DicomMap& dicomSummary); - - bool HasStudy(DicomInstanceHasher& hasher); - - void CreateStudy(DicomInstanceHasher& hasher, - const DicomMap& dicomSummary); + std::auto_ptr<Internals::ServerIndexListener> listener_; + std::auto_ptr<DatabaseWrapper> db_; - bool HasSeries(DicomInstanceHasher& hasher); - - void CreateSeries(DicomInstanceHasher& hasher, - const DicomMap& dicomSummary); - - bool HasInstance(DicomInstanceHasher& hasher); - - void CreateInstance(DicomInstanceHasher& hasher, - const DicomMap& dicomSummary, - const std::string& fileUuid, - uint64_t fileSize, - const std::string& jsonUuid, - const std::string& remoteAet); - - void RecordChange(const std::string& resourceType, - const std::string& uuid); - - void RemoveInstance(const std::string& uuid); - - void MainDicomTagsToJson2(Json::Value& result, - int64_t resourceId); + void MainDicomTagsToJson(Json::Value& result, + int64_t resourceId); bool DeleteInternal(Json::Value& target, const std::string& uuid, - const std::string& tableName); - - StoreStatus Store2(const DicomMap& dicomSummary, - const std::string& fileUuid, - uint64_t uncompressedFileSize, - const std::string& jsonUuid, - const std::string& remoteAet); + ResourceType expectedType); bool LookupResource(Json::Value& result, const std::string& publicId, @@ -172,25 +129,25 @@ bool DeletePatient(Json::Value& target, const std::string& patientUuid) { - return DeleteInternal(target, patientUuid, "Patients"); + return DeleteInternal(target, patientUuid, ResourceType_Patient); } bool DeleteStudy(Json::Value& target, const std::string& studyUuid) { - return DeleteInternal(target, studyUuid, "Studies"); + return DeleteInternal(target, studyUuid, ResourceType_Study); } bool DeleteSeries(Json::Value& target, const std::string& seriesUuid) { - return DeleteInternal(target, seriesUuid, "Series"); + return DeleteInternal(target, seriesUuid, ResourceType_Series); } bool DeleteInstance(Json::Value& target, const std::string& instanceUuid) { - return DeleteInternal(target, instanceUuid, "Instances"); + return DeleteInternal(target, instanceUuid, ResourceType_Instance); } bool GetChanges(Json::Value& target,