changeset 830:78c81c902938

integration attach-custom-data->mainline
author Sebastien Jodogne <s.jodogne@gmail.com>
date Fri, 13 Jun 2025 17:02:22 +0200
parents b4ac775869b2 (current diff) ba3295716819 (diff)
children 50097b7179ea
files
diffstat 8 files changed, 458 insertions(+), 17 deletions(-) [+]
line wrap: on
line diff
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/NewTests/AdvancedStorage/test_advanced_storage.py	Fri Jun 13 17:02:22 2025 +0200
@@ -0,0 +1,402 @@
+import unittest
+import time
+import os
+import threading
+import pprint
+import shutil
+from helpers import OrthancTestCase, Helpers, DB
+
+from orthanc_api_client import OrthancApiClient, ChangeType
+from orthanc_api_client.exceptions import HttpError
+from orthanc_api_client import helpers as OrthancHelpers
+from orthanc_api_client import exceptions as orthanc_exceptions
+
+from orthanc_tools import OrthancTestDbPopulator
+
+import pathlib
+import subprocess
+import glob
+here = pathlib.Path(__file__).parent.resolve()
+
+
+class TestAdvancedStorage(OrthancTestCase):
+
+    @classmethod
+    def terminate(cls):
+
+        if Helpers.db == DB.PG:
+            subprocess.run(["docker", "rm", "-f", "pg-server"])
+
+
+    @classmethod
+    def prepare(cls):
+        if Helpers.db == DB.UNSPECIFIED:
+            Helpers.db = DB.PG
+
+        pg_hostname = "localhost"
+        if Helpers.is_docker():
+            pg_hostname = "pg-server"
+
+        if Helpers.db == DB.PG:
+            db_config_key = "PostgreSQL"
+            db_config_content = {
+                "EnableStorage": False,
+                "EnableIndex": True,
+                "Host": pg_hostname,
+                "Port": 5432,
+                "Database": "postgres",
+                "Username": "postgres",
+                "Password": "postgres"
+            }
+            config_name = "advanced-storage-pg"
+            test_name = "AdvancedStoragePG"
+            cls._storage_name = "advanced-storage-pg"
+            network_name = "advanced-storage-pg"
+        else:
+            db_config_key = "NoDatabaseConfig"
+            db_config_content = {}
+            config_name = "advanced-storage"
+            test_name = "AdvancedStorage"
+            cls._storage_name = "advanced-storage"
+            network_name = "advanced-storage"
+
+        cls.clear_storage(storage_name=cls._storage_name)
+
+        # the path seen by the test
+        cls.base_test_storage_path = cls.get_storage_path(storage_name=cls._storage_name) + '/'
+
+        # the path seen by orthanc
+        if Helpers.is_docker():
+            cls.base_orthanc_storage_path = "/var/lib/orthanc/db/"
+        else:
+            cls.base_orthanc_storage_path = cls.base_test_storage_path
+
+        shutil.rmtree(cls.base_test_storage_path + 'indexed-files-a', ignore_errors=True)
+        shutil.rmtree(cls.base_test_storage_path + 'indexed-files-b', ignore_errors=True)
+        shutil.rmtree(cls.base_test_storage_path + 'adopt-files', ignore_errors=True)
+
+        pathlib.Path(cls.base_test_storage_path + 'indexed-files-a').mkdir(parents=True, exist_ok=True)
+        pathlib.Path(cls.base_test_storage_path + 'indexed-files-b').mkdir(parents=True, exist_ok=True)
+        pathlib.Path(cls.base_test_storage_path + 'adopt-files').mkdir(parents=True, exist_ok=True)
+
+
+        print(f'-------------- preparing {test_name} tests')
+
+        if Helpers.is_docker():
+            cls.create_docker_network(network_name)
+
+        if Helpers.db == DB.PG:
+            # launch the docker PG server
+            print('--------------- launching PostgreSQL server ------------------')
+
+            # delete previous container if any
+            subprocess.run(["docker", "rm", "-f", "pg-server"])
+
+            pg_cmd = [            
+                "docker", "run", "--rm", 
+                "-p", "5432:5432", 
+                "--name", "pg-server",
+                "--env", "POSTGRES_HOST_AUTH_METHOD=trust"
+                ]
+            
+            if Helpers.is_docker():
+                pg_cmd.extend(["--network", network_name])
+
+            pg_cmd.append("postgres:15")
+
+            cls.pg_service_process = subprocess.Popen(pg_cmd)
+            time.sleep(5)
+
+
+        cls.launch_orthanc_to_prepare_db(
+            config_name=config_name + "-preparation",
+            storage_name=cls._storage_name,
+            config={
+                "AuthenticationEnabled": False,
+                "OverwriteInstances": True,
+                "AdvancedStorage": {
+                    "Enable": False
+                },
+                db_config_key : db_config_content
+            },
+            plugins=Helpers.plugins,
+            docker_network=network_name,
+            enable_verbose=True
+        )
+
+        # upload a study and keep track of data before housekeeper runs
+        cls.instances_ids_before = []
+        cls.instances_ids_before.extend(cls.o.upload_file(here / "../../Database/Knee/T1/IM-0001-0001.dcm"))
+        cls.instances_ids_before.extend(cls.o.upload_file(here / "../../Database/Knee/T1/IM-0001-0002.dcm"))
+        cls.instances_ids_before.extend(cls.o.upload_file(here / "../../Database/Knee/T1/IM-0001-0003.dcm"))
+        cls.instances_ids_before.extend(cls.o.upload_file(here / "../../Database/Knee/T1/IM-0001-0004.dcm"))
+
+        cls.kill_orthanc()
+        time.sleep(3)
+
+        config = { 
+            db_config_key : db_config_content,
+            "AuthenticationEnabled": False,
+            "OverwriteInstances": True,
+            "AdvancedStorage": {
+                "Enable": True,
+                "NamingScheme": "{split(StudyDate)}/{StudyInstanceUID} - {PatientID}/{SeriesInstanceUID}/{pad6(InstanceNumber)} - {UUID}{.ext}",
+                "MaxPathLength": 512,
+                "MultipleStorages": {
+                    "Storages" : {
+                        "a" : cls.base_orthanc_storage_path + "storage-a",
+                        "b" : cls.base_orthanc_storage_path + "storage-b"
+                    },
+                    "CurrentWriteStorage": "b"
+                },
+                "OtherAttachmentsPrefix": "other-attachments",
+                "Indexer" : {
+                    "Enable": True,
+                    "Folders": [
+                        cls.base_orthanc_storage_path + "indexed-files-a/",
+                        cls.base_orthanc_storage_path + "indexed-files-b/"
+                    ],
+                    "Interval": 1
+                },
+                "DelayedDeletion": {
+                    "Enable": True
+                }
+            },
+            "StableAge": 1
+        }
+
+        config_path = cls.generate_configuration(
+            config_name=f"{test_name}",
+            storage_name=cls._storage_name,
+            config=config,
+            plugins=Helpers.plugins
+        )
+
+        if Helpers.break_after_preparation:
+            print(f"++++ It is now time to start your Orthanc under tests with configuration file '{config_path}' +++++")
+            input("Press Enter to continue")
+        else:
+            cls.launch_orthanc_under_tests(
+                config_name=f"{test_name}",
+                storage_name=cls._storage_name,
+                config=config,
+                plugins=Helpers.plugins,
+                docker_network=network_name,
+                enable_verbose=True
+            )
+
+        cls.o = OrthancApiClient(cls.o._root_url)
+        cls.o.wait_started()
+
+    def check_file_exists(self, orthanc_path):
+        if Helpers.is_docker():
+            orthanc_path = orthanc_path.replace("/var/lib/orthanc/db", self.get_storage_path(self._storage_name))
+        return os.path.exists(orthanc_path)
+
+    def test_can_read_files_saved_without_plugin(self):
+        info0 = self.o.get_json(endpoint=f"/instances/{self.instances_ids_before[0]}/attachments/dicom/info")
+        if not Helpers.is_docker():
+            self.assertTrue(info0['Path'].startswith(self.get_storage_path(self._storage_name)))
+        # pprint.pprint(info0)
+        self.assertFalse(info0['Path'].endswith('.dcm'))
+        self.assertFalse(info0['IsAdopted'])
+        self.assertFalse('IsIndexed' in info0 and info0['IsIndexed'])
+
+        info1 = self.o.get_json(endpoint=f"/instances/{self.instances_ids_before[1]}/attachments/dicom/info")
+
+        # check if we can move the first instance
+        # move it to storage A
+        self.o.post(endpoint="/plugins/advanced-storage/move-storage",
+                    json={
+                        'Resources': [self.instances_ids_before[0]],
+                        'TargetStorageId' : 'a'
+                    })
+        
+        # check its path after the move
+        info_after_move = self.o.get_json(endpoint=f"/instances/{self.instances_ids_before[0]}/attachments/dicom/info")
+        self.assertIn('storage-a', info_after_move['Path'])
+        self.assertEqual("a", info_after_move['StorageId'])
+        # self.assertTrue(os.path.exists(info_after_move['Path']))
+        self.assertTrue(self.check_file_exists(info_after_move['Path']))
+
+        self.wait_until_no_more_pending_deletion_files()
+        # self.assertFalse(os.path.exists(info0['Path']))
+        self.assertFalse(self.check_file_exists(info0['Path']))
+
+        # now delete the instance 0 (the one that has been moved) 
+        self.o.instances.delete(orthanc_id=self.instances_ids_before[0])
+        
+        self.wait_until_no_more_pending_deletion_files()
+        # self.assertFalse(os.path.exists(info_after_move['Path']))
+        self.assertFalse(self.check_file_exists(info_after_move['Path']))
+
+        # now delete the instance 1 (that has NOT been moved) 
+        self.o.instances.delete(orthanc_id=self.instances_ids_before[1])
+        
+        self.wait_until_no_more_pending_deletion_files()
+        # self.assertFalse(os.path.exists(info1['Path']))
+        self.assertFalse(self.check_file_exists(info1['Path']))
+
+
+    def test_basic(self):
+        # upload a single file
+        uploaded_instances_ids = self.o.upload_file(here / "../../Database/Knix/Loc/IM-0001-0001.dcm")
+
+        # check its path
+        info = self.o.get_json(endpoint=f"/instances/{uploaded_instances_ids[0]}/attachments/dicom/info")
+        
+        self.assertIn('storage-b/2007/01/01/1.2.840.113619.2.176.2025.1499492.7391.1171285944.390 - ozp00SjY2xG/1.2.840.113619.2.176.2025.1499492.7391.1171285944.388/000001 - ', info['Path'])
+        # self.assertTrue(os.path.exists(info['Path']))
+        self.assertTrue(self.check_file_exists(info['Path']))
+        self.assertTrue(info['Path'].endswith(".dcm"))
+        self.assertFalse(info['IsAdopted'])
+        self.assertFalse(info['IsIndexed'])
+        self.assertEqual("b", info['StorageId'])
+
+    def has_no_more_pending_deletion_files(self):
+        status = self.o.get_json("/plugins/advanced-storage/status")
+        return status['DelayedDeletionIsActive'] and status['FilesPendingDeletion'] == 0
+
+    def wait_until_no_more_pending_deletion_files(self):
+        time.sleep(1)
+        OrthancHelpers.wait_until(lambda: self.has_no_more_pending_deletion_files(), timeout=10, polling_interval=1)
+
+    def test_move_storage(self):
+
+
+        # upload a single file
+        uploaded_instances_ids = self.o.upload_file(here / "../../Database/Knix/Loc/IM-0001-0001.dcm")
+
+        # check its path
+        info_before_move = self.o.get_json(endpoint=f"/instances/{uploaded_instances_ids[0]}/attachments/dicom/info")
+        self.assertIn('storage-b', info_before_move['Path'])
+        self.assertEqual("b", info_before_move['StorageId'])
+        # self.assertTrue(os.path.exists(info_before_move['Path']))
+        self.assertTrue(self.check_file_exists(info_before_move['Path']))
+
+        # move it to storage A
+        self.o.post(endpoint="/plugins/advanced-storage/move-storage",
+                    json={
+                        'Resources': [uploaded_instances_ids[0]],
+                        'TargetStorageId' : 'a'
+                    })
+        
+        # check its path after the move
+        info_after_move = self.o.get_json(endpoint=f"/instances/{uploaded_instances_ids[0]}/attachments/dicom/info")
+        self.assertIn('storage-a', info_after_move['Path'])
+        self.assertEqual("a", info_after_move['StorageId'])
+        # self.assertTrue(os.path.exists(info_after_move['Path']))
+        self.assertTrue(self.check_file_exists(info_after_move['Path']))
+
+        self.wait_until_no_more_pending_deletion_files()
+        # self.assertFalse(os.path.exists(info_before_move['Path']))
+        self.assertFalse(self.check_file_exists(info_before_move['Path']))
+
+        # move it to back to storage B
+        self.o.post(endpoint="/plugins/advanced-storage/move-storage",
+                    json={
+                        'Resources': [uploaded_instances_ids[0]],
+                        'TargetStorageId' : 'b'
+                    })
+        
+        # check its path after the move
+        info_after_move2 = self.o.get_json(endpoint=f"/instances/{uploaded_instances_ids[0]}/attachments/dicom/info")
+        self.assertIn('storage-b', info_after_move2['Path'])
+        self.assertEqual("b", info_after_move2['StorageId'])
+        # self.assertTrue(os.path.exists(info_after_move2['Path']))
+        self.assertTrue(self.check_file_exists(info_after_move2['Path']))
+
+        self.wait_until_no_more_pending_deletion_files()
+        # self.assertFalse(os.path.exists(info_after_move['Path']))
+        self.assertFalse(self.check_file_exists(info_after_move['Path']))
+
+    def test_adopt_abandon(self):
+
+        shutil.copy(here / "../../Database/Beaufix/IM-0001-0001.dcm", self.base_test_storage_path + "adopt-files/")
+        shutil.copy(here / "../../Database/Beaufix/IM-0001-0002.dcm", self.base_test_storage_path + "adopt-files/")
+
+
+        # adopt a file
+        r1 = self.o.post(endpoint="/plugins/advanced-storage/adopt-instance",
+                        json={
+                            "Path": self.base_orthanc_storage_path + "adopt-files/IM-0001-0001.dcm"
+                        }).json()
+        r2 = self.o.post(endpoint="/plugins/advanced-storage/adopt-instance",
+                        json={
+                            "Path": self.base_orthanc_storage_path + "adopt-files/IM-0001-0002.dcm"
+                        }).json()
+
+        # pprint.pprint(r1)
+
+        # check its path
+        info1 = self.o.get_json(endpoint=f"/instances/{r1['InstanceId']}/attachments/dicom/info")
+        self.assertNotIn('storage-b', info1['Path'])
+        self.assertNotIn('StorageId', info1)
+        self.assertTrue(info1['IsAdopted'])
+        self.assertFalse(info1['IsIndexed'])
+        # self.assertTrue(os.path.exists(info1['Path']))
+        self.assertTrue(self.check_file_exists(info1['Path']))
+        self.assertEqual(r1['AttachmentUuid'], info1['Uuid'])
+
+        info2 = self.o.get_json(endpoint=f"/instances/{r2['InstanceId']}/attachments/dicom/info")
+
+        # try to move an adopted file -> it should fail
+        with self.assertRaises(orthanc_exceptions.HttpError) as ctx:
+            self.o.post(endpoint="/plugins/advanced-storage/move-storage",
+                        json={
+                            'Resources': [r1['InstanceId']],
+                            'TargetStorageId' : 'a'
+                        })
+
+        # delete an adopted file -> the file shall not be removed
+        self.o.instances.delete(orthanc_id=r1['InstanceId'])
+        self.assertNotIn(r1['InstanceId'], self.o.instances.get_all_ids())
+        # self.assertTrue(os.path.exists(info1['Path']))
+        self.assertTrue(self.check_file_exists(info1['Path']))
+
+        # abandon an adopted file -> the file shall not be removed (it shall be equivalent to a delete)
+        self.o.post(endpoint="/plugins/advanced-storage/abandon-instance",
+                    json={
+                        "Path": self.base_orthanc_storage_path + "adopt-files/IM-0001-0002.dcm"
+                    })
+        self.assertNotIn(r2['InstanceId'], self.o.instances.get_all_ids())
+        # self.assertTrue(os.path.exists(info2['Path']))
+        self.assertTrue(self.check_file_exists(info2['Path']))
+
+    def test_indexer(self):
+        # add 2 files to the 2 indexed folders
+        shutil.copy(here / "../../Database/Comunix/Ct/IM-0001-0001.dcm", self.base_test_storage_path + "indexed-files-a/")
+        shutil.copy(here / "../../Database/Comunix/Pet/IM-0001-0001.dcm", self.base_test_storage_path + "indexed-files-b/")
+
+        # wait for the files to be indexed
+        time.sleep(5)
+
+        # check that the study has been indexed
+        studies = self.o.studies.find(query={"PatientName": "COMUNIX"})
+        self.assertEqual(2, len(self.o.studies.get_series_ids(studies[0].orthanc_id)))
+        
+        instances_ids = self.o.studies.get_instances_ids(studies[0].orthanc_id)
+        info1 = self.o.get_json(endpoint=f"/instances/{instances_ids[0]}/attachments/dicom/info")
+        info2 = self.o.get_json(endpoint=f"/instances/{instances_ids[1]}/attachments/dicom/info")
+
+        self.assertTrue(info1['IsIndexed'])
+        self.assertTrue(info1['IsAdopted'])
+
+        # remove one of the file from the indexed folders -> it shall disappear from Orthanc
+        os.remove(self.base_test_storage_path + "indexed-files-b/IM-0001-0001.dcm")
+
+        time.sleep(5)
+        studies = self.o.studies.find(query={"PatientName": "COMUNIX"})
+        self.assertEqual(1, len(self.o.studies.get_series_ids(studies[0].orthanc_id)))
+
+        # delete the other file from the Orthanc API -> the file shall not be deleted since it is not owned by Orthanc
+        # and it shall not be indexed anymore ...
+
+        self.o.studies.delete(orthanc_id=studies[0].orthanc_id)
+        time.sleep(5)
+        
+        studies = self.o.studies.find(query={"PatientName": "COMUNIX"})
+        self.assertEqual(0, len(studies))
+        # self.assertTrue(os.path.exists(info2['Path']))
+        self.assertTrue(os.path.exists(self.base_test_storage_path + "indexed-files-a/IM-0001-0001.dcm"))
+
--- a/NewTests/Concurrency/docker-compose-transfers-concurrency.yml	Wed Jun 04 16:28:27 2025 +0200
+++ b/NewTests/Concurrency/docker-compose-transfers-concurrency.yml	Fri Jun 13 17:02:22 2025 +0200
@@ -1,4 +1,3 @@
-version: "3"
 services:
 
   orthanc-pg-a:
--- a/NewTests/PostgresUpgrades/docker-compose.yml	Wed Jun 04 16:28:27 2025 +0200
+++ b/NewTests/PostgresUpgrades/docker-compose.yml	Fri Jun 13 17:02:22 2025 +0200
@@ -17,7 +17,7 @@
 
   # Orthanc previous version
   orthanc-pg-15-previous-revision:
-    image: orthancteam/orthanc:25.1.1
+    image: orthancteam/orthanc:25.2.0
     container_name: orthanc-pg-15-previous-revision
     depends_on: [pg-15]
     restart: unless-stopped
@@ -30,7 +30,7 @@
 
   # Orthanc previous version to run the integration tests
   orthanc-pg-15-previous-revision-for-integ-tests:
-    image: orthancteam/orthanc:25.1.1
+    image: orthancteam/orthanc:25.2.0
     container_name: orthanc-pg-15-previous-revision-for-integ-tests
     depends_on: [pg-15]
     restart: unless-stopped
--- a/NewTests/PostgresUpgrades/downgrade.sh	Wed Jun 04 16:28:27 2025 +0200
+++ b/NewTests/PostgresUpgrades/downgrade.sh	Fri Jun 13 17:02:22 2025 +0200
@@ -4,8 +4,12 @@
 
 apt-get update && apt-get install -y wget mercurial
 hg clone https://orthanc.uclouvain.be/hg/orthanc-databases
-pushd /scripts/orthanc-databases/
-hg update -r default
+pushd orthanc-databases
+
+# TODO: change attach-custom-data by the plugin version number or "default" !
+hg update -r attach-custom-data
+
+psql -U postgres -f /scripts/orthanc-databases/PostgreSQL/Plugins/SQL/Downgrades/Rev5ToRev4.sql
 psql -U postgres -f /scripts/orthanc-databases/PostgreSQL/Plugins/SQL/Downgrades/Rev4ToRev3.sql
 
 # if you want to test a downgrade procedure, you may use this code ...
--- a/NewTests/README	Wed Jun 04 16:28:27 2025 +0200
+++ b/NewTests/README	Fri Jun 13 17:02:22 2025 +0200
@@ -220,4 +220,22 @@
 with Docker:
 
 python3 NewTests/main.py --pattern=CGet.test_cget.TestCGet.* \
-                         --orthanc_under_tests_docker_image=orthancteam/orthanc-pre-release:2025.01.20
\ No newline at end of file
+                         --orthanc_under_tests_docker_image=orthancteam/orthanc-pre-release:2025.01.20
+
+AdvancedStorage:
+--------------
+
+Run the AdvancedStorage tests with your locally build version and break before execution to allow you to start your debugger.
+
+python3 NewTests/main.py --pattern=AdvancedStorage.test_advanced_storage.TestAdvancedStorage.* \
+                         --orthanc_under_tests_exe=/home/alain/o/build/orthanc-bis/Orthanc \
+                         --orthanc_under_tests_http_port=8043 \
+                         --plugin=/home/alain/o/build/orthanc-dicomweb/libOrthancDicomWeb.so \
+                         --plugin=/home/alain/o/build/advanced-storage/libAdvancedStorage.so \
+                         --break_before_preparation
+
+with Docker:
+
+python3 NewTests/main.py --pattern=AdvancedStorage.test_advanced_storage.TestAdvancedStorage.* \
+                         --orthanc_under_tests_docker_image=orthancteam/orthanc:current \
+                         --orthanc_under_tests_http_port=8043
--- a/NewTests/helpers.py	Wed Jun 04 16:28:27 2025 +0200
+++ b/NewTests/helpers.py	Fri Jun 13 17:02:22 2025 +0200
@@ -8,6 +8,7 @@
 import glob
 import time
 from threading import Thread
+from enum import StrEnum
 
 
 import pathlib
@@ -45,6 +46,11 @@
         time.sleep(1)
 
 
+class DB(StrEnum):
+    SQLITE = 'sqlite'
+    PG = 'pg'
+    UNSPECIFIED = 'unspecified'
+
 
 class Helpers:
 
@@ -54,11 +60,13 @@
     orthanc_under_tests_exe: str = None
     orthanc_previous_version_exe: str = None
     orthanc_under_tests_docker_image: str = None
+    db: DB = DB.UNSPECIFIED
     skip_preparation: bool = False
     break_after_preparation: bool = False
     break_before_preparation: bool = False
     plugins: typing.List[str] = []
 
+
     @classmethod
     def get_orthanc_url(cls):
         return f"http://{cls.orthanc_under_tests_hostname}:{cls.orthanc_under_tests_http_port}"
@@ -167,22 +175,29 @@
         if Helpers.is_exe():
 
             # clear the directory but keep it !
-            for root, dirs, files in os.walk(storage_path):
-                for f in files:
-                    os.unlink(os.path.join(root, f))
-                for d in dirs:
-                    shutil.rmtree(os.path.join(root, d))
-                    shutil.rmtree(storage_path, ignore_errors=True)
+            shutil.rmtree(storage_path, ignore_errors=True)
+            pathlib.Path(storage_path).mkdir(parents=True, exist_ok=True)
+
+            # for root, dirs, files in os.walk(storage_path):
+            #     for f in files:
+            #         os.unlink(os.path.join(root, f))
+            #     for d in dirs:
+            #         shutil.rmtree(os.path.join(root, d))
+            #         shutil.rmtree(storage_path, ignore_errors=True)
         else:
+            # create the directory with user ownership before docker creates it 
+            pathlib.Path(storage_path).mkdir(parents=True, exist_ok=True)
+
+            # clear the directory (but you need to be root from the container !)
             cmd = [
                     "docker", "run", "--rm", 
                     "-v", f"{storage_path}:/var/lib/orthanc/db/",
                     "--name", "storage-cleanup",
                     "debian:12-slim",
-                    "rm", "-rf", "/var/lib/orthanc/db/*"
+                    "bash", "-c", "rm -rf /var/lib/orthanc/db/*"
                 ]
+            subprocess.run(cmd, check=True)
 
-            subprocess.run(cmd, check=True)
 
     @classmethod
     def is_storage_empty(cls, storage_name: str):
@@ -196,7 +211,7 @@
             subprocess.run(["docker", "network", "create", network])
 
     @classmethod
-    def launch_orthanc_to_prepare_db(cls, config_name: str = None, config: object = None, config_path: str = None, storage_name: str = None, plugins = [], docker_network: str = None):
+    def launch_orthanc_to_prepare_db(cls, config_name: str = None, config: object = None, config_path: str = None, storage_name: str = None, plugins = [], docker_network: str = None, enable_verbose: bool = False):
         if config_name and storage_name and config:
             # generate the configuration file
             config_path = cls.generate_configuration(
@@ -220,7 +235,8 @@
                 storage_name=storage_name,
                 config_name=config_name,
                 config_path=config_path,
-                network=docker_network
+                network=docker_network,
+                enable_verbose=enable_verbose
             )
         else:
             raise RuntimeError("Invalid configuration, can not launch Orthanc")
--- a/NewTests/main.py	Wed Jun 04 16:28:27 2025 +0200
+++ b/NewTests/main.py	Fri Jun 13 17:02:22 2025 +0200
@@ -29,6 +29,7 @@
     parser.add_argument('--skip_preparation', action='store_true', help="if this is a multi stage tests with preparations, skip the preparation")
     parser.add_argument('--break_after_preparation', action='store_true', help="if this is a multi stage tests with preparations, pause after the preparation (such that you can start your own orthanc-under-tests in your debugger)")
     parser.add_argument('--break_before_preparation', action='store_true', help="if this is a multi stage tests with preparations, pause before the preparation (such that you can start your own orthanc-under-tests in your debugger)")
+    parser.add_argument('--db', type=str, default='unspecified', help="the DB engine to use")
     parser.add_argument('-p', '--plugin', dest='plugins', action='append', type=str, help='path to a plugin to add to configuration')
 
     args = parser.parse_args()
@@ -40,6 +41,7 @@
     Helpers.orthanc_under_tests_http_port = args.orthanc_under_tests_http_port
     Helpers.orthanc_under_tests_dicom_port = args.orthanc_under_tests_dicom_port
     Helpers.plugins = args.plugins
+    Helpers.db = args.db
 
     Helpers.orthanc_under_tests_exe = args.orthanc_under_tests_exe
     Helpers.orthanc_under_tests_docker_image = args.orthanc_under_tests_docker_image
--- a/NewTests/requirements.txt	Wed Jun 04 16:28:27 2025 +0200
+++ b/NewTests/requirements.txt	Fri Jun 13 17:02:22 2025 +0200
@@ -1,3 +1,3 @@
 orthanc-api-client>=0.18.5
-orthanc-tools>=0.13.0
+orthanc-tools>=0.16.5
 uvicorn
\ No newline at end of file