Mercurial > hg > orthanc-tests
changeset 713:afa9cdd13880 find-refactoring
merge default -> find-refactoring
author | Alain Mazy <am@orthanc.team> |
---|---|
date | Fri, 04 Oct 2024 19:00:38 +0200 |
parents | 6bd2b7f40917 (diff) 0322fda1834e (current diff) |
children | 812ad50c5564 |
files | Tests/Tests.py |
diffstat | 4 files changed, 452 insertions(+), 2 deletions(-) [+] |
line wrap: on
line diff
--- a/NewTests/README Fri Oct 04 18:45:13 2024 +0200 +++ b/NewTests/README Fri Oct 04 19:00:38 2024 +0200 @@ -192,3 +192,22 @@ python3 NewTests/main.py --pattern=PostgresUpgrades.test_pg_upgrades.TestPgUpgrades.* \ --orthanc_under_tests_docker_image=orthancteam/orthanc:current + + +Read Only PG: +-------------- + +Run the Read Only tests with your locally build version and break before execution to allow you to start your debugger. + +python3 NewTests/main.py --pattern=ReadOnly.test_readonly_pg.TestReadOnlyPG.* \ + --orthanc_under_tests_exe=/home/alain/o/build/orthanc/Orthanc \ + --orthanc_under_tests_http_port=8043 \ + --plugin=/home/alain/o/build/orthanc-dicomweb/libOrthancDicomWeb.so \ + --plugin=/home/alain/o/build/pg/libOrthancPostgreSQLIndex.so \ + --break_after_preparation + +with Docker (TODO): + +python3 NewTests/main.py --pattern=ReadOnly.test_readonly_pg.TestReadOnlyPG.* \ + --orthanc_under_tests_docker_image=orthancteam/orthanc:current \ + --orthanc_under_tests_http_port=8043 \ No newline at end of file
--- /dev/null Thu Jan 01 00:00:00 1970 +0000 +++ b/NewTests/ReadOnly/test_readonly_pg.py Fri Oct 04 19:00:38 2024 +0200 @@ -0,0 +1,149 @@ +import unittest +import time +import os +import threading +from helpers import OrthancTestCase, Helpers + +from orthanc_api_client import OrthancApiClient, ChangeType +from orthanc_api_client.exceptions import HttpError +from orthanc_api_client import helpers as OrthancHelpers + +from orthanc_tools import OrthancTestDbPopulator + +import pathlib +import subprocess +import glob +here = pathlib.Path(__file__).parent.resolve() + + +class TestReadOnlyPG(OrthancTestCase): + + @classmethod + def terminate(cls): + + if Helpers.is_docker(): + subprocess.run(["docker", "rm", "-f", "pg-server"]) + else: + cls.pg_service_process.terminate() + + + @classmethod + def prepare(cls): + test_name = "ReadOnlyPG" + cls._storage_name = "read-only-pg" #actually not used since we are using PG storage + network_name = "read-only-pg" + + print(f'-------------- preparing {test_name} tests') + + pg_hostname = "localhost" + if Helpers.is_docker(): + pg_hostname = "pg-server" + cls.create_docker_network(network_name) + + config = { + "PostgreSQL" : { + "EnableStorage": True, + "EnableIndex": True, + "Host": pg_hostname, + "Port": 5432, + "Database": "postgres", + "Username": "postgres", + "Password": "postgres", + "IndexConnectionsCount": 10, + "MaximumConnectionRetries" : 20, + "ConnectionRetryInterval" : 1, + "TransactionMode": "ReadCommitted", + "EnableVerboseLogs": True + }, + "AuthenticationEnabled": False, + "OverwriteInstances": True, + "ReadOnly": False, # disable for preparation + "DicomWeb": { + "EnableMetadataCache": False # disable for preparation + } + } + + # launch the docker PG server + print('--------------- launching PostgreSQL server ------------------') + + pg_cmd = [ + "docker", "run", "--rm", + "-p", "5432:5432", + "--name", "pg-server", + "--env", "POSTGRES_HOST_AUTH_METHOD=trust" + ] + + if Helpers.is_docker(): + pg_cmd.extend(["--network", network_name]) + pg_cmd.append("postgres:15") + + cls.pg_service_process = subprocess.Popen(pg_cmd) + time.sleep(5) + + print('--------------- launching Orthanc to prepare DB ------------------') + cls.launch_orthanc_to_prepare_db( + config_name=f"{test_name}", + storage_name=cls._storage_name, + config=config, + plugins=Helpers.plugins, + docker_network=network_name + ) + + # upload a study + cls.uploaded_instances_ids = cls.o.upload_folder(here / "../../Database/Knix/Loc") + cls.one_instance_id = cls.uploaded_instances_ids[0] + cls.one_series_id = cls.o.instances.get_parent_series_id(cls.one_instance_id) + cls.one_study_id = cls.o.series.get_parent_study_id(cls.one_series_id) + cls.one_patient_id = cls.o.studies.get_parent_patient_id(cls.one_study_id) + + cls.kill_orthanc() + + print('--------------- stopped preparation Orthanc ------------------') + + time.sleep(3) + + # modify config for the readonly version + config["ReadOnly"] = True + config["DicomWeb"]["EnableMetadataCache"] = True + + config_path = cls.generate_configuration( + config_name=f"{test_name}", + storage_name=cls._storage_name, + config=config, + plugins=Helpers.plugins + ) + + if Helpers.break_after_preparation: + print(f"++++ It is now time to start your Orthanc under tests with configuration file '{config_path}' +++++") + input("Press Enter to continue") + else: + cls.launch_orthanc_under_tests( + config_name=f"{test_name}", + storage_name=cls._storage_name, + config=config, + plugins=Helpers.plugins, + docker_network=network_name + ) + + cls.o = OrthancApiClient(cls.o._root_url) + cls.o.wait_started() + + + def test_write_methods_fail(self): + self.assertRaises(Exception, lambda: self.o.upload_folder(here / "../../Database/Knix/Loc")) + self.assertRaises(Exception, lambda: self.o.instances.delete(self.one_instance_id)) + self.assertRaises(Exception, lambda: self.o.series.delete(self.one_series_id)) + self.assertRaises(Exception, lambda: self.o.studies.delete(self.one_study_id)) + self.assertRaises(Exception, lambda: self.o.patients.delete(self.one_patient_id)) + + tags = self.o.instances.get_tags(self.one_instance_id) + + + + def test_read_methods_succeed(self): + # nothing should raise + tags = self.o.instances.get_tags(self.one_instance_id) + + self.o.get_json(f"/dicom-web/studies/{tags['StudyInstanceUID']}/metadata") + self.o.get_json(f"/dicom-web/studies/{tags['StudyInstanceUID']}/series/{tags['SeriesInstanceUID']}/metadata") + self.o.get_json(f"/statistics")
--- a/NewTests/helpers.py Fri Oct 04 18:45:13 2024 +0200 +++ b/NewTests/helpers.py Fri Oct 04 19:00:38 2024 +0200 @@ -196,7 +196,7 @@ subprocess.run(["docker", "network", "create", network]) @classmethod - def launch_orthanc_to_prepare_db(cls, config_name: str = None, config: object = None, config_path: str = None, storage_name: str = None, plugins = []): + def launch_orthanc_to_prepare_db(cls, config_name: str = None, config: object = None, config_path: str = None, storage_name: str = None, plugins = [], docker_network: str = None): if config_name and storage_name and config: # generate the configuration file config_path = cls.generate_configuration( @@ -219,7 +219,8 @@ docker_image=Helpers.orthanc_previous_version_docker_image, storage_name=storage_name, config_name=config_name, - config_path=config_path + config_path=config_path, + network=docker_network ) else: raise RuntimeError("Invalid configuration, can not launch Orthanc")
--- a/Tests/Tests.py Fri Oct 04 18:45:13 2024 +0200 +++ b/Tests/Tests.py Fri Oct 04 19:00:38 2024 +0200 @@ -10784,3 +10784,284 @@ self.assertEqual(1, int(a[0]['RequestedTags']['NumberOfStudyRelatedInstances'])) self.assertEqual('CT', a[0]['RequestedTags']['ModalitiesInStudy']) self.assertEqual('', a[0]['RequestedTags']['PatientComments']) + + + def test_extended_find_order_by(self): + if IsOrthancVersionAbove(_REMOTE, 1, 12, 5) and HasExtendedFind(_REMOTE): # TODO: remove HasExtendedFind once find-refactoring branch has been merged + + # Upload 12 instances + for i in range(3): + UploadInstance(_REMOTE, 'Brainix/Flair/IM-0001-000%d.dcm' % (i + 1)) + UploadInstance(_REMOTE, 'Brainix/Epi/IM-0001-000%d.dcm' % (i + 1)) + UploadInstance(_REMOTE, 'Knee/T1/IM-0001-000%d.dcm' % (i + 1)) + UploadInstance(_REMOTE, 'Knee/T2/IM-0001-000%d.dcm' % (i + 1)) + + kneeT2SeriesId = 'bbf7a453-0d34251a-03663b55-46bb31b9-ffd74c59' + kneeT1SeriesId = '6de73705-c4e65c1b-9d9ea1b5-cabcd8e7-f15e4285' + brainixFlairSeriesId = '1e2c125c-411b8e86-3f4fe68e-a7584dd3-c6da78f0' + brainixEpiSeriesId = '2ac1316d-3e432022-62eabff2-c59f5475-9b1ac3f8' + DoPut(_REMOTE, '/series/%s/metadata/my-metadata' % kneeT2SeriesId, 'kneeT2') + DoPut(_REMOTE, '/series/%s/metadata/my-metadata' % kneeT1SeriesId, 'kneeT1') + DoPut(_REMOTE, '/series/%s/metadata/my-metadata' % brainixFlairSeriesId, 'brainixFlair') + DoPut(_REMOTE, '/series/%s/metadata/my-metadata' % brainixEpiSeriesId, 'brainixEpi') + + # order by resource tag + a = DoPost(_REMOTE, '/tools/find', { 'Level' : 'Study', + 'Expand': True, + 'Query' : { + 'PatientName' : '*' + }, + 'OrderBy' : [ + { + 'Type': 'DicomTag', + 'Key': 'PatientName', + 'Direction': 'ASC' + } + ] + }) + self.assertEqual(2, len(a)) + self.assertEqual("BRAINIX", a[0]['PatientMainDicomTags']['PatientName']) + + a = DoPost(_REMOTE, '/tools/find', { 'Level' : 'Study', + 'Expand': True, + 'Query' : { + 'PatientName' : '*' + }, + 'OrderBy' : [ + { + 'Type': 'DicomTag', + 'Key': 'PatientName', + 'Direction': 'DESC' + } + ] + }) + + self.assertEqual("BRAINIX", a[1]['PatientMainDicomTags']['PatientName']) + + # order by parent tag + a = DoPost(_REMOTE, '/tools/find', { 'Level' : 'Series', + 'Expand': False, + 'Query' : { + 'SeriesDescription' : '*' + }, + 'OrderBy' : [ + { + 'Type': 'DicomTag', + 'Key': 'StudyDate', + 'Direction': 'ASC' + } + ] + }) + # knee StudyDate = 20080819 + # brainix StudyDate = 20061201 + self.assertEqual(4, len(a)) + self.assertTrue(a[0] == brainixEpiSeriesId or a[0] == brainixFlairSeriesId) + self.assertTrue(a[3] == kneeT1SeriesId or a[3] == kneeT2SeriesId) + + # order by parent tag and resource tag + a = DoPost(_REMOTE, '/tools/find', { 'Level' : 'Series', + 'Expand': False, + 'Query' : { + 'SeriesDescription' : '*' + }, + 'OrderBy' : [ + { + 'Type': 'DicomTag', + 'Key': 'StudyDate', + 'Direction': 'ASC' + }, + { + 'Type': 'DicomTag', + 'Key': 'SeriesTime', + 'Direction': 'ASC' + } + ] + }) + # knee StudyDate = 20080819 + # brainix StudyDate = 20061201 + self.assertEqual(4, len(a)) + self.assertEqual(brainixFlairSeriesId, a[0]) + self.assertEqual(brainixEpiSeriesId, a[1]) + self.assertEqual(kneeT1SeriesId, a[2]) + self.assertEqual(kneeT2SeriesId, a[3]) + + # order by grandparent tag and resource tag + a = DoPost(_REMOTE, '/tools/find', { 'Level' : 'Series', + 'Expand': False, + 'Query' : { + 'SeriesDescription' : '*' + }, + 'OrderBy' : [ + { + 'Type': 'DicomTag', + 'Key': 'PatientBirthDate', + 'Direction': 'ASC' + }, + { + 'Type': 'DicomTag', + 'Key': 'SeriesTime', + 'Direction': 'ASC' + } + ] + }) + # knee PatientBirthDate = 20080822 + # brainix PatientBirthDate = 19490301 + self.assertEqual(4, len(a)) + self.assertEqual(brainixFlairSeriesId, a[0]) + self.assertEqual(brainixEpiSeriesId, a[1]) + self.assertEqual(kneeT1SeriesId, a[2]) + self.assertEqual(kneeT2SeriesId, a[3]) + + # order by grandgrandparent tag and resource tag + a = DoPost(_REMOTE, '/tools/find', { 'Level' : 'Instance', + 'Expand': True, + 'Query' : { + }, + 'OrderBy' : [ + { + 'Type': 'DicomTag', + 'Key': 'PatientBirthDate', + 'Direction': 'ASC' + }, + { + 'Type': 'DicomTag', + 'Key': 'InstanceNumber', + 'Direction': 'ASC' + }, + { + 'Type': 'DicomTag', + 'Key': 'SeriesTime', + 'Direction': 'ASC' + } + ], + 'RequestedTags' : ['PatientBirthDate', 'InstanceNumber', 'SeriesTime'] + }) + self.assertEqual(12, len(a)) + for i in range(1, len(a)-1): + self.assertTrue(a[i-1]['RequestedTags']['PatientBirthDate'] <= a[i]['RequestedTags']['PatientBirthDate']) + if a[i-1]['RequestedTags']['PatientBirthDate'] == a[i]['RequestedTags']['PatientBirthDate']: + self.assertTrue(a[i-1]['RequestedTags']['InstanceNumber'] <= a[i]['RequestedTags']['InstanceNumber']) + if a[i-1]['RequestedTags']['InstanceNumber'] == a[i]['RequestedTags']['InstanceNumber']: + self.assertTrue(a[i-1]['RequestedTags']['SeriesTime'] <= a[i]['RequestedTags']['SeriesTime']) + + # order by grandgrandparent tag and resource tag (2) + a = DoPost(_REMOTE, '/tools/find', { 'Level' : 'Instance', + 'Expand': True, + 'Query' : { + }, + 'OrderBy' : [ + { + 'Type': 'DicomTag', + 'Key': 'InstanceNumber', + 'Direction': 'DESC' + }, + { + 'Type': 'DicomTag', + 'Key': 'PatientBirthDate', + 'Direction': 'ASC' + }, + { + 'Type': 'DicomTag', + 'Key': 'SeriesTime', + 'Direction': 'ASC' + } + ], + 'RequestedTags' : ['InstanceNumber', 'PatientBirthDate', 'SeriesTime' ] + }) + self.assertEqual(12, len(a)) + for i in range(1, len(a)-1): + self.assertTrue(a[i-1]['RequestedTags']['InstanceNumber'] >= a[i]['RequestedTags']['InstanceNumber']) + if a[i-1]['RequestedTags']['InstanceNumber'] == a[i]['RequestedTags']['InstanceNumber']: + self.assertTrue(a[i-1]['RequestedTags']['PatientBirthDate'] <= a[i]['RequestedTags']['PatientBirthDate']) + if a[i-1]['RequestedTags']['PatientBirthDate'] == a[i]['RequestedTags']['PatientBirthDate']: + self.assertTrue(a[i-1]['RequestedTags']['SeriesTime'] <= a[i]['RequestedTags']['SeriesTime']) + + # order by resource tag on a tag that is missing in one of the resources -> it should be listed + a = DoPost(_REMOTE, '/tools/find', { 'Level' : 'Series', + 'Expand': False, + 'Query' : { + }, + + 'OrderBy' : [ + { + 'Type': 'DicomTag', + 'Key': 'BodyPartExamined', # in Knee but not in Brainix => Brainix is first because NULL is in front of other values + 'Direction': 'ASC' + } + ] + }) + self.assertTrue(a[0] == brainixEpiSeriesId or a[0] == brainixFlairSeriesId) + self.assertTrue(a[3] == kneeT1SeriesId or a[3] == kneeT2SeriesId) + + # order by metadata + a = DoPost(_REMOTE, '/tools/find', { 'Level' : 'Series', + 'Query' : { + 'SeriesDescription' : '*' + }, + 'OrderBy' : [ + { + 'Type': 'Metadata', + 'Key': 'my-metadata', + 'Direction': 'ASC' + } + ] + }) + self.assertEqual(brainixEpiSeriesId, a[0]) + self.assertEqual(brainixFlairSeriesId, a[1]) + self.assertEqual(kneeT1SeriesId, a[2]) + self.assertEqual(kneeT2SeriesId, a[3]) + + a = DoPost(_REMOTE, '/tools/find', { 'Level' : 'Series', + 'Query' : { + 'SeriesDescription' : '*' + }, + 'OrderBy' : [ + { + 'Type': 'Metadata', + 'Key': 'my-metadata', + 'Direction': 'DESC' + } + ] + }) + self.assertEqual(brainixEpiSeriesId, a[3]) + self.assertEqual(brainixFlairSeriesId, a[2]) + self.assertEqual(kneeT1SeriesId, a[1]) + self.assertEqual(kneeT2SeriesId, a[0]) + + # combined ordering (DicomTag + metadata) + a = DoPost(_REMOTE, '/tools/find', { 'Level' : 'Series', + 'Query' : { + 'SeriesDescription' : '*' + }, + 'OrderBy' : [ + { + 'Type': 'DicomTag', + 'Key': 'PatientName', + 'Direction': 'ASC' + }, + { + 'Type': 'Metadata', + 'Key': 'my-metadata', + 'Direction': 'DESC' + } + ] + }) + self.assertEqual(brainixFlairSeriesId, a[0]) + self.assertEqual(brainixEpiSeriesId, a[1]) + self.assertEqual(kneeT2SeriesId, a[2]) + self.assertEqual(kneeT1SeriesId, a[3]) + + + def test_extended_find_parent(self): + if IsOrthancVersionAbove(_REMOTE, 1, 12, 5) and HasExtendedFind(_REMOTE): # TODO: remove HasExtendedFind once find-refactoring branch has been merged + pass + # TODO: + # find and order series in a study + # ... + + def test_extended_filter_metadata(self): + if IsOrthancVersionAbove(_REMOTE, 1, 12, 5) and HasExtendedFind(_REMOTE): # TODO: remove HasExtendedFind once find-refactoring branch has been merged + pass + # TODO: + # filter on metadata value + # ... \ No newline at end of file