changeset 856:10f52fc26772 pixel-anon

merged default -> pixel-anon
author Alain Mazy <am@orthanc.team>
date Tue, 04 Nov 2025 15:52:51 +0100
parents bfbadbfae1e2 (current diff) ab360e15b792 (diff)
children 557977f25847
files
diffstat 23 files changed, 1460 insertions(+), 251 deletions(-) [+]
line wrap: on
line diff
--- a/.hgtags	Thu Apr 10 16:33:10 2025 +0200
+++ b/.hgtags	Tue Nov 04 15:52:51 2025 +0100
@@ -48,3 +48,5 @@
 847b3c6b360b9b0daeab327133703c60e14e51f0 Orthanc-1.12.5
 287aae544b3133f6cecdf768f5a09dacbd75cf91 Orthanc-1.12.6
 2eca398d9676e2378343c48769a4b3938ba96005 Orthanc-1.12.7
+50097b7179ea7eb8afa339abd2f77e233183ace2 Orthanc-1.12.8
+467d73ff9c02807f8f7f3c029e209fb445f6ef8b Orthanc-1.12.9
--- a/CITATION.cff	Thu Apr 10 16:33:10 2025 +0200
+++ b/CITATION.cff	Tue Nov 04 15:52:51 2025 +0100
@@ -10,5 +10,5 @@
 doi: "10.1007/s10278-018-0082-y"
 license: "GPL-3.0-or-later"
 repository-code: "https://orthanc.uclouvain.be/hg/orthanc/"
-version: 1.12.7
-date-released: 2025-04-07
+version: 1.12.9
+date-released: 2025-08-11
Binary file Database/Encodings/ISO_IR13.dcm has changed
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/NewTests/AdvancedStorage/test_advanced_storage.py	Tue Nov 04 15:52:51 2025 +0100
@@ -0,0 +1,467 @@
+import unittest
+import time
+import os
+import threading
+import pprint
+import shutil
+from helpers import OrthancTestCase, Helpers, DB
+
+from orthanc_api_client import OrthancApiClient, ChangeType
+from orthanc_api_client.exceptions import HttpError
+from orthanc_api_client import helpers as OrthancHelpers
+from orthanc_api_client import exceptions as orthanc_exceptions
+
+from orthanc_tools import OrthancTestDbPopulator
+
+import pathlib
+import subprocess
+import glob
+here = pathlib.Path(__file__).parent.resolve()
+
+
+class TestAdvancedStorage(OrthancTestCase):
+
+    @classmethod
+    def terminate(cls):
+
+        if Helpers.db == DB.PG:
+            subprocess.run(["docker", "rm", "-f", "pg-server"])
+
+
+    @classmethod
+    def prepare(cls):
+        if Helpers.db == DB.UNSPECIFIED:
+            Helpers.db = DB.PG
+
+        pg_hostname = "localhost"
+        if Helpers.is_docker():
+            pg_hostname = "pg-server"
+
+        if Helpers.db == DB.PG:
+            db_config_key = "PostgreSQL"
+            db_config_content = {
+                "EnableStorage": False,
+                "EnableIndex": True,
+                "Host": pg_hostname,
+                "Port": 5432,
+                "Database": "postgres",
+                "Username": "postgres",
+                "Password": "postgres"
+            }
+            config_name = "advanced-storage-pg"
+            test_name = "AdvancedStoragePG"
+            cls._storage_name = "advanced-storage-pg"
+            network_name = "advanced-storage-pg"
+        else:
+            db_config_key = "NoDatabaseConfig"
+            db_config_content = {}
+            config_name = "advanced-storage"
+            test_name = "AdvancedStorage"
+            cls._storage_name = "advanced-storage"
+            network_name = "advanced-storage"
+
+        cls.clear_storage(storage_name=cls._storage_name)
+
+        # the path seen by the test
+        cls.base_test_storage_path = cls.get_storage_path(storage_name=cls._storage_name) + '/'
+
+        # the path seen by orthanc
+        if Helpers.is_docker():
+            cls.base_orthanc_storage_path = "/var/lib/orthanc/db/"
+        else:
+            cls.base_orthanc_storage_path = cls.base_test_storage_path
+
+        shutil.rmtree(cls.base_test_storage_path + 'indexed-files-a', ignore_errors=True)
+        shutil.rmtree(cls.base_test_storage_path + 'indexed-files-b', ignore_errors=True)
+        shutil.rmtree(cls.base_test_storage_path + 'adopt-files', ignore_errors=True)
+
+        pathlib.Path(cls.base_test_storage_path + 'indexed-files-a').mkdir(parents=True, exist_ok=True)
+        pathlib.Path(cls.base_test_storage_path + 'indexed-files-b').mkdir(parents=True, exist_ok=True)
+        pathlib.Path(cls.base_test_storage_path + 'adopt-files').mkdir(parents=True, exist_ok=True)
+
+
+        print(f'-------------- preparing {test_name} tests')
+
+        if Helpers.is_docker():
+            cls.create_docker_network(network_name)
+
+        if Helpers.db == DB.PG:
+            # launch the docker PG server
+            print('--------------- launching PostgreSQL server ------------------')
+
+            # delete previous container if any
+            subprocess.run(["docker", "rm", "-f", "pg-server"])
+
+            pg_cmd = [            
+                "docker", "run", "--rm", 
+                "-p", "5432:5432", 
+                "--name", "pg-server",
+                "--env", "POSTGRES_HOST_AUTH_METHOD=trust"
+                ]
+            
+            if Helpers.is_docker():
+                pg_cmd.extend(["--network", network_name])
+
+            pg_cmd.append("postgres:15")
+
+            cls.pg_service_process = subprocess.Popen(pg_cmd)
+            time.sleep(5)
+
+
+        cls.launch_orthanc_to_prepare_db(
+            config_name=config_name + "-preparation",
+            storage_name=cls._storage_name,
+            config={
+                "AuthenticationEnabled": False,
+                "OverwriteInstances": True,
+                "AdvancedStorage": {
+                    "Enable": False
+                },
+                db_config_key : db_config_content
+            },
+            plugins=Helpers.plugins,
+            docker_network=network_name,
+            enable_verbose=True
+        )
+
+        # upload a study and keep track of data before housekeeper runs
+        cls.instances_ids_before = []
+        cls.instances_ids_before.extend(cls.o.upload_file(here / "../../Database/Knee/T1/IM-0001-0001.dcm"))
+        cls.instances_ids_before.extend(cls.o.upload_file(here / "../../Database/Knee/T1/IM-0001-0002.dcm"))
+        cls.instances_ids_before.extend(cls.o.upload_file(here / "../../Database/Knee/T1/IM-0001-0003.dcm"))
+        cls.instances_ids_before.extend(cls.o.upload_file(here / "../../Database/Knee/T1/IM-0001-0004.dcm"))
+
+        cls.kill_orthanc()
+        time.sleep(3)
+
+        config = { 
+            db_config_key : db_config_content,
+            "AuthenticationEnabled": False,
+            "OverwriteInstances": True,
+            "MaximumStorageCacheSize": 0,  # disable the cache to force reading from disk everytime
+            "AdvancedStorage": {
+                "Enable": True,
+                "NamingScheme": "{split(StudyDate)}/{StudyInstanceUID} - {PatientID}/{SeriesInstanceUID}/{pad6(InstanceNumber)} - {UUID}{.ext}",
+                "MaxPathLength": 512,
+                "MultipleStorages": {
+                    "Storages" : {
+                        "a" : cls.base_orthanc_storage_path + "storage-a",
+                        "b" : cls.base_orthanc_storage_path + "storage-b"
+                    },
+                    "CurrentWriteStorage": "b"
+                },
+                "OtherAttachmentsPrefix": "other-attachments",
+                "Indexer" : {
+                    "Enable": True,
+                    "Folders": [
+                        cls.base_orthanc_storage_path + "indexed-files-a/",
+                        cls.base_orthanc_storage_path + "indexed-files-b/"
+                    ],
+                    "Interval": 1,
+                    "TakeOwnership": False
+                },
+                "DelayedDeletion": {
+                    "Enable": True
+                }
+            },
+            "StableAge": 1
+        }
+
+        config_path = cls.generate_configuration(
+            config_name=f"{test_name}",
+            storage_name=cls._storage_name,
+            config=config,
+            plugins=Helpers.plugins
+        )
+
+        if Helpers.break_after_preparation:
+            print(f"++++ It is now time to start your Orthanc under tests with configuration file '{config_path}' +++++")
+            input("Press Enter to continue")
+        else:
+            cls.launch_orthanc_under_tests(
+                config_name=f"{test_name}",
+                storage_name=cls._storage_name,
+                config=config,
+                plugins=Helpers.plugins,
+                docker_network=network_name,
+                enable_verbose=True
+            )
+
+        cls.o = OrthancApiClient(cls.o._root_url)
+        cls.o.wait_started()
+
+    def check_file_exists(self, orthanc_path):
+        if Helpers.is_docker():
+            orthanc_path = orthanc_path.replace("/var/lib/orthanc/db", self.get_storage_path(self._storage_name))
+        return os.path.exists(orthanc_path)
+
+    def test_can_read_files_saved_without_plugin(self):
+        info0 = self.o.get_json(endpoint=f"/instances/{self.instances_ids_before[0]}/attachments/dicom/info")
+        if not Helpers.is_docker():
+            self.assertTrue(info0['Path'].startswith(self.get_storage_path(self._storage_name)))
+        # pprint.pprint(info0)
+        self.assertFalse(info0['Path'].endswith('.dcm'))
+        self.assertTrue(info0['IsOwnedByOrthanc'])
+        self.assertFalse('IsIndexed' in info0 and info0['IsIndexed'])
+
+        info1 = self.o.get_json(endpoint=f"/instances/{self.instances_ids_before[1]}/attachments/dicom/info")
+
+        # check if we can move the first instance
+        # move it to storage A
+        self.o.post(endpoint="/plugins/advanced-storage/move-storage",
+                    json={
+                        'Resources': [self.instances_ids_before[0]],
+                        'TargetStorageId' : 'a'
+                    })
+        
+        # check its path after the move
+        info_after_move = self.o.get_json(endpoint=f"/instances/{self.instances_ids_before[0]}/attachments/dicom/info")
+        self.assertIn('storage-a', info_after_move['Path'])
+        self.assertEqual("a", info_after_move['StorageId'])
+        # self.assertTrue(os.path.exists(info_after_move['Path']))
+        self.assertTrue(self.check_file_exists(info_after_move['Path']))
+
+        self.wait_until_no_more_pending_deletion_files()
+        # self.assertFalse(os.path.exists(info0['Path']))
+        self.assertFalse(self.check_file_exists(info0['Path']))
+
+        # now delete the instance 0 (the one that has been moved) 
+        self.o.instances.delete(orthanc_id=self.instances_ids_before[0])
+        
+        self.wait_until_no_more_pending_deletion_files()
+        # self.assertFalse(os.path.exists(info_after_move['Path']))
+        self.assertFalse(self.check_file_exists(info_after_move['Path']))
+
+        # now delete the instance 1 (that has NOT been moved) 
+        self.o.instances.delete(orthanc_id=self.instances_ids_before[1])
+        
+        self.wait_until_no_more_pending_deletion_files()
+        # self.assertFalse(os.path.exists(info1['Path']))
+        self.assertFalse(self.check_file_exists(info1['Path']))
+
+
+    def test_basic(self):
+        # upload a single file
+        uploaded_instances_ids = self.o.upload_file(here / "../../Database/Knix/Loc/IM-0001-0001.dcm")
+
+        # check its path
+        info = self.o.get_json(endpoint=f"/instances/{uploaded_instances_ids[0]}/attachments/dicom/info")
+        
+        self.assertIn('storage-b/2007/01/01/1.2.840.113619.2.176.2025.1499492.7391.1171285944.390 - ozp00SjY2xG/1.2.840.113619.2.176.2025.1499492.7391.1171285944.388/000001 - ', info['Path'])
+        # self.assertTrue(os.path.exists(info['Path']))
+        self.assertTrue(self.check_file_exists(info['Path']))
+        self.assertTrue(info['Path'].endswith(".dcm"))
+        self.assertTrue(info['IsOwnedByOrthanc'])
+        self.assertFalse(info['IsIndexed'])
+        self.assertEqual("b", info['StorageId'])
+
+    def has_no_more_pending_deletion_files(self):
+        status = self.o.get_json("/plugins/advanced-storage/status")
+        return status['DelayedDeletionIsActive'] and status['FilesPendingDeletion'] == 0
+
+    def wait_until_no_more_pending_deletion_files(self):
+        time.sleep(1)
+        OrthancHelpers.wait_until(lambda: self.has_no_more_pending_deletion_files(), timeout=10, polling_interval=1)
+
+    def test_move_storage(self):
+        # upload a single file
+        uploaded_instances_ids = self.o.upload_file(here / "../../Database/Knix/Loc/IM-0001-0001.dcm")
+
+        # check its path
+        info_before_move = self.o.get_json(endpoint=f"/instances/{uploaded_instances_ids[0]}/attachments/dicom/info")
+        self.assertIn('storage-b', info_before_move['Path'])
+        self.assertEqual("b", info_before_move['StorageId'])
+        # self.assertTrue(os.path.exists(info_before_move['Path']))
+        self.assertTrue(self.check_file_exists(info_before_move['Path']))
+
+        # move it to storage A
+        self.o.post(endpoint="/plugins/advanced-storage/move-storage",
+                    json={
+                        'Resources': [uploaded_instances_ids[0]],
+                        'TargetStorageId' : 'a'
+                    })
+        
+        # check its path after the move
+        info_after_move = self.o.get_json(endpoint=f"/instances/{uploaded_instances_ids[0]}/attachments/dicom/info")
+        self.assertIn('storage-a', info_after_move['Path'])
+        self.assertEqual("a", info_after_move['StorageId'])
+        # self.assertTrue(os.path.exists(info_after_move['Path']))
+        self.assertTrue(self.check_file_exists(info_after_move['Path']))
+
+        self.wait_until_no_more_pending_deletion_files()
+        # self.assertFalse(os.path.exists(info_before_move['Path']))
+        self.assertFalse(self.check_file_exists(info_before_move['Path']))
+
+        # move it to back to storage B
+        self.o.post(endpoint="/plugins/advanced-storage/move-storage",
+                    json={
+                        'Resources': [uploaded_instances_ids[0]],
+                        'TargetStorageId' : 'b'
+                    })
+        
+        # check its path after the move
+        info_after_move2 = self.o.get_json(endpoint=f"/instances/{uploaded_instances_ids[0]}/attachments/dicom/info")
+        self.assertIn('storage-b', info_after_move2['Path'])
+        self.assertEqual("b", info_after_move2['StorageId'])
+        # self.assertTrue(os.path.exists(info_after_move2['Path']))
+        self.assertTrue(self.check_file_exists(info_after_move2['Path']))
+
+        self.wait_until_no_more_pending_deletion_files()
+        # self.assertFalse(os.path.exists(info_after_move['Path']))
+        self.assertFalse(self.check_file_exists(info_after_move['Path']))
+
+    def test_adopt_abandon(self):
+
+        shutil.copy(here / "../../Database/Beaufix/IM-0001-0001.dcm", self.base_test_storage_path + "adopt-files/")
+        shutil.copy(here / "../../Database/Beaufix/IM-0001-0002.dcm", self.base_test_storage_path + "adopt-files/")
+        shutil.copy(here / "../../Database/Brainix/Epi/IM-0001-0003.dcm", self.base_test_storage_path + "adopt-files/")
+        shutil.copy(here / "../../Database/Brainix/Epi/IM-0001-0004.dcm", self.base_test_storage_path + "adopt-files/")
+        shutil.copy(here / "../../Database/Brainix/Epi/IM-0001-0005.dcm", self.base_test_storage_path + "adopt-files/")
+
+        # adopt a file
+        r1 = self.o.post(endpoint="/plugins/advanced-storage/adopt-instance",
+                        json={
+                            "Path": self.base_orthanc_storage_path + "adopt-files/IM-0001-0001.dcm",
+                            "TakeOwnership": False
+                        }).json()
+        r2 = self.o.post(endpoint="/plugins/advanced-storage/adopt-instance",
+                        json={
+                            "Path": self.base_orthanc_storage_path + "adopt-files/IM-0001-0002.dcm",
+                            "TakeOwnership": False
+                        }).json()
+        r3 = self.o.post(endpoint="/plugins/advanced-storage/adopt-instance",
+                        json={
+                            "Path": self.base_orthanc_storage_path + "adopt-files/IM-0001-0003.dcm",
+                            "TakeOwnership": True
+                        }).json()
+        r4 = self.o.post(endpoint="/plugins/advanced-storage/adopt-instance",
+                        json={
+                            "Path": self.base_orthanc_storage_path + "adopt-files/IM-0001-0004.dcm",
+                            "TakeOwnership": True
+                        }).json()
+        r5 = self.o.post(endpoint="/plugins/advanced-storage/adopt-instance",
+                        json={
+                            "Path": self.base_orthanc_storage_path + "adopt-files/IM-0001-0005.dcm",
+                            "TakeOwnership": True
+                        }).json()
+
+        # pprint.pprint(r1)
+
+        # check its path
+        info1 = self.o.get_json(endpoint=f"/instances/{r1['InstanceId']}/attachments/dicom/info")
+        self.assertNotIn('storage-b', info1['Path'])
+        self.assertNotIn('StorageId', info1)
+        self.assertFalse(info1['IsOwnedByOrthanc'])
+        self.assertFalse(info1['IsIndexed'])
+        # self.assertTrue(os.path.exists(info1['Path']))
+        self.assertTrue(self.check_file_exists(info1['Path']))
+        self.assertEqual(r1['AttachmentUuid'], info1['Uuid'])
+
+        info2 = self.o.get_json(endpoint=f"/instances/{r2['InstanceId']}/attachments/dicom/info")
+
+        # try to move an adopted file that does not belong to Orthanc -> it should fail
+        with self.assertRaises(orthanc_exceptions.HttpError) as ctx:
+            self.o.post(endpoint="/plugins/advanced-storage/move-storage",
+                        json={
+                            'Resources': [r1['InstanceId']],
+                            'TargetStorageId' : 'a'
+                        })
+
+        # delete an adopted file that does not belong to Orthanc -> the file shall not be removed
+        self.o.instances.delete(orthanc_id=r1['InstanceId'])
+        self.assertNotIn(r1['InstanceId'], self.o.instances.get_all_ids())
+        # self.assertTrue(os.path.exists(info1['Path']))
+        self.assertTrue(self.check_file_exists(info1['Path']))
+
+        # abandon an adopted file that does not belong to Orthanc -> the file shall not be removed (it shall be equivalent to a delete)
+        self.o.post(endpoint="/plugins/advanced-storage/abandon-instance",
+                    json={
+                        "Path": self.base_orthanc_storage_path + "adopt-files/IM-0001-0002.dcm"
+                    })
+        self.assertNotIn(r2['InstanceId'], self.o.instances.get_all_ids())
+        # self.assertTrue(os.path.exists(info2['Path']))
+        self.assertTrue(self.check_file_exists(info2['Path']))
+
+        info4 = self.o.get_json(endpoint=f"/instances/{r4['InstanceId']}/attachments/dicom/info")
+        self.assertTrue(info4['IsOwnedByOrthanc'])
+        self.assertFalse(info4['IsIndexed'])  # the file is not considered as indexed since it is owned by Orthanc
+        # abandon an adopted file that belongs to Orthanc -> the file shall be deleted
+        self.o.post(endpoint="/plugins/advanced-storage/abandon-instance",
+                    json={
+                        "Path": self.base_orthanc_storage_path + "adopt-files/IM-0001-0004.dcm"
+                    })
+        self.wait_until_no_more_pending_deletion_files()
+        self.assertFalse(self.check_file_exists(info4['Path']))
+
+        # delete an adopted file that belongs to Orthanc -> the file shall be removed
+        info5 = self.o.get_json(endpoint=f"/instances/{r5['InstanceId']}/attachments/dicom/info")
+        self.o.instances.delete(orthanc_id=r5['InstanceId'])
+        self.assertNotIn(r5['InstanceId'], self.o.instances.get_all_ids())
+        self.wait_until_no_more_pending_deletion_files()
+        self.assertFalse(self.check_file_exists(info5['Path']))
+
+        # try to move an adopted file that belongs to Orthanc -> it should not work.
+        with self.assertRaises(orthanc_exceptions.HttpError) as ctx:
+            self.o.post(endpoint="/plugins/advanced-storage/move-storage",
+                        json={
+                            'Resources': [r3['InstanceId']],
+                            'TargetStorageId' : 'a'
+                        })
+
+        # try to reconstruct an adopted file that belongs to Orthanc -> it shall move the file to the Orthanc Storage
+        self.o.post(endpoint=f"/instances/{r3['InstanceId']}/reconstruct",
+                    json={
+                        'ReconstructFiles': True
+                    })
+
+        info3 = self.o.get_json(endpoint=f"/instances/{r3['InstanceId']}/attachments/dicom/info")
+        self.assertIn('5Yp0E', info3['Path'])
+        self.assertEqual('b', info3['StorageId'])
+        self.assertTrue(self.check_file_exists(info3['Path']))
+
+        # after the reconstruction, the file is not considered as adopted anymore
+        self.assertTrue(info3['IsOwnedByOrthanc'])
+        self.assertFalse(info3['IsIndexed'])
+
+
+    def test_indexer(self):
+        # add 2 files to the 2 indexed folders
+        shutil.copy(here / "../../Database/Comunix/Ct/IM-0001-0001.dcm", self.base_test_storage_path + "indexed-files-a/")
+        shutil.copy(here / "../../Database/Comunix/Pet/IM-0001-0001.dcm", self.base_test_storage_path + "indexed-files-b/")
+
+        # wait for the files to be indexed
+        time.sleep(5)
+
+        # check that the study has been indexed
+        studies = self.o.studies.find(query={"PatientName": "COMUNIX"})
+        self.assertEqual(2, len(self.o.studies.get_series_ids(studies[0].orthanc_id)))
+        
+        instances_ids = self.o.studies.get_instances_ids(studies[0].orthanc_id)
+        info1 = self.o.get_json(endpoint=f"/instances/{instances_ids[0]}/attachments/dicom/info")
+        info2 = self.o.get_json(endpoint=f"/instances/{instances_ids[1]}/attachments/dicom/info")
+
+        self.assertFalse(info1['IsOwnedByOrthanc'])
+        self.assertFalse(info2['IsOwnedByOrthanc'])
+
+        # make sure we can read the files from disk (bug in 0.2.0)
+        self.o.get_binary(endpoint=f"/instances/{instances_ids[0]}/file")
+        self.o.get_binary(endpoint=f"/instances/{instances_ids[1]}/file")
+
+        # remove one of the file from the indexed folders -> it shall disappear from Orthanc
+        os.remove(self.base_test_storage_path + "indexed-files-b/IM-0001-0001.dcm")
+
+        time.sleep(5)
+        studies = self.o.studies.find(query={"PatientName": "COMUNIX"})
+        self.assertEqual(1, len(self.o.studies.get_series_ids(studies[0].orthanc_id)))
+
+        # delete the other file from the Orthanc API -> the file shall not be deleted since it is not owned by Orthanc
+        # and it shall not be indexed anymore ...
+
+        self.o.studies.delete(orthanc_id=studies[0].orthanc_id)
+        time.sleep(5)
+        
+        studies = self.o.studies.find(query={"PatientName": "COMUNIX"})
+        self.assertEqual(0, len(studies))
+        # self.assertTrue(os.path.exists(info2['Path']))
+        self.assertTrue(os.path.exists(self.base_test_storage_path + "indexed-files-a/IM-0001-0001.dcm"))
+
--- /dev/null	Thu Jan 01 00:00:00 1970 +0000
+++ b/NewTests/AdvancedStorage/test_advanced_storage_default_naming_scheme.py	Tue Nov 04 15:52:51 2025 +0100
@@ -0,0 +1,405 @@
+import unittest
+import time
+import os
+import threading
+import pprint
+import shutil
+from helpers import OrthancTestCase, Helpers, DB
+
+from orthanc_api_client import OrthancApiClient, ChangeType
+from orthanc_api_client.exceptions import HttpError
+from orthanc_api_client import helpers as OrthancHelpers
+from orthanc_api_client import exceptions as orthanc_exceptions
+
+from orthanc_tools import OrthancTestDbPopulator
+
+import pathlib
+import subprocess
+import glob
+here = pathlib.Path(__file__).parent.resolve()
+
+
+class TestAdvancedStorageDefaultNamingScheme(OrthancTestCase):
+
+    @classmethod
+    def terminate(cls):
+
+        if Helpers.db == DB.PG:
+            subprocess.run(["docker", "rm", "-f", "pg-server"])
+
+
+    @classmethod
+    def prepare(cls):
+        if Helpers.db == DB.UNSPECIFIED:
+            Helpers.db = DB.PG
+
+        pg_hostname = "localhost"
+        if Helpers.is_docker():
+            pg_hostname = "pg-server"
+
+        if Helpers.db == DB.PG:
+            db_config_key = "PostgreSQL"
+            db_config_content = {
+                "EnableStorage": False,
+                "EnableIndex": True,
+                "Host": pg_hostname,
+                "Port": 5432,
+                "Database": "postgres",
+                "Username": "postgres",
+                "Password": "postgres"
+            }
+            config_name = "advanced-storage-pg"
+            test_name = "AdvancedStoragePG"
+            cls._storage_name = "advanced-storage-pg"
+            network_name = "advanced-storage-pg"
+        else:
+            db_config_key = "NoDatabaseConfig"
+            db_config_content = {}
+            config_name = "advanced-storage"
+            test_name = "AdvancedStorage"
+            cls._storage_name = "advanced-storage"
+            network_name = "advanced-storage"
+
+        cls.clear_storage(storage_name=cls._storage_name)
+
+        # the path seen by the test
+        cls.base_test_storage_path = cls.get_storage_path(storage_name=cls._storage_name) + '/'
+
+        # the path seen by orthanc
+        if Helpers.is_docker():
+            cls.base_orthanc_storage_path = "/var/lib/orthanc/db/"
+        else:
+            cls.base_orthanc_storage_path = cls.base_test_storage_path
+
+        shutil.rmtree(cls.base_test_storage_path + 'indexed-files-a', ignore_errors=True)
+        shutil.rmtree(cls.base_test_storage_path + 'indexed-files-b', ignore_errors=True)
+        shutil.rmtree(cls.base_test_storage_path + 'adopt-files', ignore_errors=True)
+
+        pathlib.Path(cls.base_test_storage_path + 'indexed-files-a').mkdir(parents=True, exist_ok=True)
+        pathlib.Path(cls.base_test_storage_path + 'indexed-files-b').mkdir(parents=True, exist_ok=True)
+        pathlib.Path(cls.base_test_storage_path + 'adopt-files').mkdir(parents=True, exist_ok=True)
+
+
+        print(f'-------------- preparing {test_name} tests')
+
+        if Helpers.is_docker():
+            cls.create_docker_network(network_name)
+
+        if Helpers.db == DB.PG:
+            # launch the docker PG server
+            print('--------------- launching PostgreSQL server ------------------')
+
+            # delete previous container if any
+            subprocess.run(["docker", "rm", "-f", "pg-server"])
+
+            pg_cmd = [            
+                "docker", "run", "--rm", 
+                "-p", "5432:5432", 
+                "--name", "pg-server",
+                "--env", "POSTGRES_HOST_AUTH_METHOD=trust"
+                ]
+            
+            if Helpers.is_docker():
+                pg_cmd.extend(["--network", network_name])
+
+            pg_cmd.append("postgres:15")
+
+            cls.pg_service_process = subprocess.Popen(pg_cmd)
+            time.sleep(5)
+
+
+        cls.launch_orthanc_to_prepare_db(
+            config_name=config_name + "-preparation",
+            storage_name=cls._storage_name,
+            config={
+                "AuthenticationEnabled": False,
+                "OverwriteInstances": True,
+                "AdvancedStorage": {
+                    "Enable": False
+                },
+                db_config_key : db_config_content
+            },
+            plugins=Helpers.plugins,
+            docker_network=network_name,
+            enable_verbose=True
+        )
+
+        # upload a study and keep track of data before housekeeper runs
+        cls.instances_ids_before = []
+        cls.instances_ids_before.extend(cls.o.upload_file(here / "../../Database/Knee/T1/IM-0001-0001.dcm"))
+        cls.instances_ids_before.extend(cls.o.upload_file(here / "../../Database/Knee/T1/IM-0001-0002.dcm"))
+        cls.instances_ids_before.extend(cls.o.upload_file(here / "../../Database/Knee/T1/IM-0001-0003.dcm"))
+        cls.instances_ids_before.extend(cls.o.upload_file(here / "../../Database/Knee/T1/IM-0001-0004.dcm"))
+
+        cls.kill_orthanc()
+        time.sleep(3)
+
+        config = { 
+            db_config_key : db_config_content,
+            "AuthenticationEnabled": False,
+            "OverwriteInstances": True,
+            "MaximumStorageCacheSize": 0,  # disable the cache to force reading from disk everytime
+            "AdvancedStorage": {
+                "Enable": True,
+                "MultipleStorages": {
+                    "Storages" : {
+                        "a" : cls.base_orthanc_storage_path + "storage-a",
+                        "b" : cls.base_orthanc_storage_path + "storage-b"
+                    },
+                    "CurrentWriteStorage": "b"
+                },
+                "OtherAttachmentsPrefix": "other-attachments",
+                "Indexer" : {
+                    "Enable": True,
+                    "Folders": [
+                        cls.base_orthanc_storage_path + "indexed-files-a/",
+                        cls.base_orthanc_storage_path + "indexed-files-b/"
+                    ],
+                    "Interval": 1,
+                    "TakeOwnership": False
+                },
+                "DelayedDeletion": {
+                    "Enable": True
+                }
+            },
+            "StableAge": 1
+        }
+
+        config_path = cls.generate_configuration(
+            config_name=f"{test_name}",
+            storage_name=cls._storage_name,
+            config=config,
+            plugins=Helpers.plugins
+        )
+
+        if Helpers.break_after_preparation:
+            print(f"++++ It is now time to start your Orthanc under tests with configuration file '{config_path}' +++++")
+            input("Press Enter to continue")
+        else:
+            cls.launch_orthanc_under_tests(
+                config_name=f"{test_name}",
+                storage_name=cls._storage_name,
+                config=config,
+                plugins=Helpers.plugins,
+                docker_network=network_name,
+                enable_verbose=True
+            )
+
+        cls.o = OrthancApiClient(cls.o._root_url)
+        cls.o.wait_started()
+
+    def check_file_exists(self, orthanc_path):
+        if Helpers.is_docker():
+            orthanc_path = orthanc_path.replace("/var/lib/orthanc/db", self.get_storage_path(self._storage_name))
+        return os.path.exists(orthanc_path)
+
+    def has_no_more_pending_deletion_files(self):
+        status = self.o.get_json("/plugins/advanced-storage/status")
+        return status['DelayedDeletionIsActive'] and status['FilesPendingDeletion'] == 0
+
+    def wait_until_no_more_pending_deletion_files(self):
+        time.sleep(1)
+        OrthancHelpers.wait_until(lambda: self.has_no_more_pending_deletion_files(), timeout=10, polling_interval=1)
+
+    def test_move_storage(self):
+        # upload a single file
+        uploaded_instances_ids = self.o.upload_file(here / "../../Database/Knix/Loc/IM-0001-0001.dcm")
+
+        # check its path
+        info_before_move = self.o.get_json(endpoint=f"/instances/{uploaded_instances_ids[0]}/attachments/dicom/info")
+        self.assertIn('storage-b', info_before_move['Path'])
+        self.assertEqual("b", info_before_move['StorageId'])
+        # self.assertTrue(os.path.exists(info_before_move['Path']))
+        self.assertTrue(self.check_file_exists(info_before_move['Path']))
+
+        # move it to storage A
+        self.o.post(endpoint="/plugins/advanced-storage/move-storage",
+                    json={
+                        'Resources': [uploaded_instances_ids[0]],
+                        'TargetStorageId' : 'a'
+                    })
+        
+        # check its path after the move
+        info_after_move = self.o.get_json(endpoint=f"/instances/{uploaded_instances_ids[0]}/attachments/dicom/info")
+        self.assertIn('storage-a', info_after_move['Path'])
+        self.assertEqual("a", info_after_move['StorageId'])
+        # self.assertTrue(os.path.exists(info_after_move['Path']))
+        self.assertTrue(self.check_file_exists(info_after_move['Path']))
+
+        self.wait_until_no_more_pending_deletion_files()
+        # self.assertFalse(os.path.exists(info_before_move['Path']))
+        self.assertFalse(self.check_file_exists(info_before_move['Path']))
+
+        # move it to back to storage B
+        self.o.post(endpoint="/plugins/advanced-storage/move-storage",
+                    json={
+                        'Resources': [uploaded_instances_ids[0]],
+                        'TargetStorageId' : 'b'
+                    })
+        
+        # check its path after the move
+        info_after_move2 = self.o.get_json(endpoint=f"/instances/{uploaded_instances_ids[0]}/attachments/dicom/info")
+        self.assertIn('storage-b', info_after_move2['Path'])
+        self.assertEqual("b", info_after_move2['StorageId'])
+        # self.assertTrue(os.path.exists(info_after_move2['Path']))
+        self.assertTrue(self.check_file_exists(info_after_move2['Path']))
+
+        self.wait_until_no_more_pending_deletion_files()
+        # self.assertFalse(os.path.exists(info_after_move['Path']))
+        self.assertFalse(self.check_file_exists(info_after_move['Path']))
+
+    def test_adopt_abandon(self):
+
+        shutil.copy(here / "../../Database/Beaufix/IM-0001-0001.dcm", self.base_test_storage_path + "adopt-files/")
+        shutil.copy(here / "../../Database/Beaufix/IM-0001-0002.dcm", self.base_test_storage_path + "adopt-files/")
+        shutil.copy(here / "../../Database/Brainix/Epi/IM-0001-0003.dcm", self.base_test_storage_path + "adopt-files/")
+        shutil.copy(here / "../../Database/Brainix/Epi/IM-0001-0004.dcm", self.base_test_storage_path + "adopt-files/")
+        shutil.copy(here / "../../Database/Brainix/Epi/IM-0001-0005.dcm", self.base_test_storage_path + "adopt-files/")
+
+        # adopt a file
+        r1 = self.o.post(endpoint="/plugins/advanced-storage/adopt-instance",
+                        json={
+                            "Path": self.base_orthanc_storage_path + "adopt-files/IM-0001-0001.dcm",
+                            "TakeOwnership": False
+                        }).json()
+        r2 = self.o.post(endpoint="/plugins/advanced-storage/adopt-instance",
+                        json={
+                            "Path": self.base_orthanc_storage_path + "adopt-files/IM-0001-0002.dcm",
+                            "TakeOwnership": False
+                        }).json()
+        r3 = self.o.post(endpoint="/plugins/advanced-storage/adopt-instance",
+                        json={
+                            "Path": self.base_orthanc_storage_path + "adopt-files/IM-0001-0003.dcm",
+                            "TakeOwnership": True
+                        }).json()
+        r4 = self.o.post(endpoint="/plugins/advanced-storage/adopt-instance",
+                        json={
+                            "Path": self.base_orthanc_storage_path + "adopt-files/IM-0001-0004.dcm",
+                            "TakeOwnership": True
+                        }).json()
+        r5 = self.o.post(endpoint="/plugins/advanced-storage/adopt-instance",
+                        json={
+                            "Path": self.base_orthanc_storage_path + "adopt-files/IM-0001-0005.dcm",
+                            "TakeOwnership": True
+                        }).json()
+
+        # pprint.pprint(r1)
+
+        # check its path
+        info1 = self.o.get_json(endpoint=f"/instances/{r1['InstanceId']}/attachments/dicom/info")
+        self.assertNotIn('storage-b', info1['Path'])
+        self.assertNotIn('StorageId', info1)
+        self.assertFalse(info1['IsOwnedByOrthanc'])
+        self.assertFalse(info1['IsIndexed'])
+        # self.assertTrue(os.path.exists(info1['Path']))
+        self.assertTrue(self.check_file_exists(info1['Path']))
+        self.assertEqual(r1['AttachmentUuid'], info1['Uuid'])
+
+        info2 = self.o.get_json(endpoint=f"/instances/{r2['InstanceId']}/attachments/dicom/info")
+
+        # try to move an adopted file that does not belong to Orthanc -> it should fail
+        with self.assertRaises(orthanc_exceptions.HttpError) as ctx:
+            self.o.post(endpoint="/plugins/advanced-storage/move-storage",
+                        json={
+                            'Resources': [r1['InstanceId']],
+                            'TargetStorageId' : 'a'
+                        })
+
+        # delete an adopted file that does not belong to Orthanc -> the file shall not be removed
+        self.o.instances.delete(orthanc_id=r1['InstanceId'])
+        self.assertNotIn(r1['InstanceId'], self.o.instances.get_all_ids())
+        # self.assertTrue(os.path.exists(info1['Path']))
+        self.assertTrue(self.check_file_exists(info1['Path']))
+
+        # abandon an adopted file that does not belong to Orthanc -> the file shall not be removed (it shall be equivalent to a delete)
+        self.o.post(endpoint="/plugins/advanced-storage/abandon-instance",
+                    json={
+                        "Path": self.base_orthanc_storage_path + "adopt-files/IM-0001-0002.dcm"
+                    })
+        self.assertNotIn(r2['InstanceId'], self.o.instances.get_all_ids())
+        # self.assertTrue(os.path.exists(info2['Path']))
+        self.assertTrue(self.check_file_exists(info2['Path']))
+
+        info4 = self.o.get_json(endpoint=f"/instances/{r4['InstanceId']}/attachments/dicom/info")
+        self.assertTrue(info4['IsOwnedByOrthanc'])
+        self.assertFalse(info4['IsIndexed'])  # the file is not considered as indexed since it is owned by Orthanc
+        # abandon an adopted file that belongs to Orthanc -> the file shall be deleted
+        self.o.post(endpoint="/plugins/advanced-storage/abandon-instance",
+                    json={
+                        "Path": self.base_orthanc_storage_path + "adopt-files/IM-0001-0004.dcm"
+                    })
+        self.wait_until_no_more_pending_deletion_files()
+        self.assertFalse(self.check_file_exists(info4['Path']))
+
+        # delete an adopted file that belongs to Orthanc -> the file shall be removed
+        info5 = self.o.get_json(endpoint=f"/instances/{r5['InstanceId']}/attachments/dicom/info")
+        self.o.instances.delete(orthanc_id=r5['InstanceId'])
+        self.assertNotIn(r5['InstanceId'], self.o.instances.get_all_ids())
+        self.wait_until_no_more_pending_deletion_files()
+        self.assertFalse(self.check_file_exists(info5['Path']))
+
+        # try to move an adopted file that belongs to Orthanc -> it should not work.
+        with self.assertRaises(orthanc_exceptions.HttpError) as ctx:
+            self.o.post(endpoint="/plugins/advanced-storage/move-storage",
+                        json={
+                            'Resources': [r3['InstanceId']],
+                            'TargetStorageId' : 'a'
+                        })
+
+        # try to reconstruct an adopted file that belongs to Orthanc -> it shall move the file to the Orthanc Storage
+        self.o.post(endpoint=f"/instances/{r3['InstanceId']}/reconstruct",
+                    json={
+                        'ReconstructFiles': True
+                    })
+
+        info3 = self.o.get_json(endpoint=f"/instances/{r3['InstanceId']}/attachments/dicom/info")
+        self.assertIn('storage-b', info3['Path'])
+        self.assertEqual('b', info3['StorageId'])
+        self.assertTrue(self.check_file_exists(info3['Path']))
+
+        # after the reconstruction, the file is not considered as adopted anymore
+        self.assertTrue(info3['IsOwnedByOrthanc'])
+        self.assertFalse(info3['IsIndexed'])
+
+
+    def test_indexer(self):
+        # add 2 files to the 2 indexed folders
+        shutil.copy(here / "../../Database/Comunix/Ct/IM-0001-0001.dcm", self.base_test_storage_path + "indexed-files-a/")
+        shutil.copy(here / "../../Database/Comunix/Pet/IM-0001-0001.dcm", self.base_test_storage_path + "indexed-files-b/")
+
+        # wait for the files to be indexed
+        time.sleep(5)
+
+        # check that the study has been indexed
+        studies = self.o.studies.find(query={"PatientName": "COMUNIX"})
+        self.assertEqual(2, len(self.o.studies.get_series_ids(studies[0].orthanc_id)))
+        
+        instances_ids = self.o.studies.get_instances_ids(studies[0].orthanc_id)
+        info1 = self.o.get_json(endpoint=f"/instances/{instances_ids[0]}/attachments/dicom/info")
+        info2 = self.o.get_json(endpoint=f"/instances/{instances_ids[1]}/attachments/dicom/info")
+
+        self.assertFalse(info1['IsOwnedByOrthanc'])
+        self.assertFalse(info2['IsOwnedByOrthanc'])
+
+        # make sure we can read the files from disk (bug in 0.2.0)
+        self.o.get_binary(endpoint=f"/instances/{instances_ids[0]}/file")
+        self.o.get_binary(endpoint=f"/instances/{instances_ids[1]}/file")
+
+        # remove one of the file from the indexed folders -> it shall disappear from Orthanc
+        os.remove(self.base_test_storage_path + "indexed-files-b/IM-0001-0001.dcm")
+
+        time.sleep(5)
+        studies = self.o.studies.find(query={"PatientName": "COMUNIX"})
+        self.assertEqual(1, len(self.o.studies.get_series_ids(studies[0].orthanc_id)))
+
+        # delete the other file from the Orthanc API -> the file shall not be deleted since it is not owned by Orthanc
+        # and it shall not be indexed anymore ...
+
+        self.o.studies.delete(orthanc_id=studies[0].orthanc_id)
+        time.sleep(5)
+        
+        studies = self.o.studies.find(query={"PatientName": "COMUNIX"})
+        self.assertEqual(0, len(studies))
+        # self.assertTrue(os.path.exists(info2['Path']))
+        self.assertTrue(os.path.exists(self.base_test_storage_path + "indexed-files-a/IM-0001-0001.dcm"))
+
--- a/NewTests/Authorization/auth_service.py	Thu Apr 10 16:33:10 2025 +0200
+++ b/NewTests/Authorization/auth_service.py	Tue Nov 04 15:52:51 2025 +0100
@@ -78,3 +78,21 @@
 
     logging.info("validate token: " + response.json())
     return response
+
+@app.post("/tokens/decode")
+def decode_token(request: TokenDecoderRequest):
+
+    logging.info("decoding token: " + request.json())
+    response = TokenDecoderResponse(resources=[])
+
+    if request.token_value == "token-a-study" or request.token_value == "token-both-studies":
+        response.resources.append(OrthancResource(level=Levels.STUDY,
+                                                    orthanc_id="b9c08539-26f93bde-c81ab0d7-bffaf2cb-a4d0bdd0",
+                                                    dicom_uid="1.2.840.113619.2.176.2025.1499492.7391.1171285944.390"))
+    if request.token_value == "token-b-study" or request.token_value == "token-both-studies":
+        response.resources.append(OrthancResource(level=Levels.STUDY,
+                                                    orthanc_id="27f7126f-4f66fb14-03f4081b-f9341db2-53925988",
+                                                    dicom_uid="2.16.840.1.113669.632.20.1211.10000357775"))
+
+    logging.info("decoded token: " + response.json())
+    return response
--- a/NewTests/Authorization/models.py	Thu Apr 10 16:33:10 2025 +0200
+++ b/NewTests/Authorization/models.py	Tue Nov 04 15:52:51 2025 +0100
@@ -1,4 +1,4 @@
-from typing import Optional, List
+from typing import Optional, List, Dict
 from pydantic import BaseModel, Field
 from enum import Enum
 from datetime import datetime
@@ -31,6 +31,7 @@
     MEDDREAM_VIEWER_PUBLICATION = 'meddream-viewer-publication'  # a link to open the MedDream viewer valid for a long period
     STONE_VIEWER_PUBLICATION = 'stone-viewer-publication'  # a link to open the Stone viewer valid for a long period
     OHIF_VIEWER_PUBLICATION = 'ohif-viewer-publication'  # a link to open the OHIF viewer valid for a long period
+    VOLVIEW_VIEWER_PUBLICATION = 'volview-viewer-publication'  # a link to open the VolView viewer valid for a long period
 
     MEDDREAM_INSTANT_LINK = 'meddream-instant-link'  # a direct link to MedDream viewer that is valid only a few minutes to open the viewer directly
 
@@ -80,7 +81,6 @@
     level: Optional[Levels]
     method: Methods
     uri: Optional[str] = None
-#     labels: Optional[List[str]]
 
 
 class TokenValidationResponse(BaseModel):
@@ -97,7 +97,7 @@
     token_type: Optional[TokenType] = Field(alias="token-type", default=None)
     error_code: Optional[DecoderErrorCodes] = Field(alias="error-code", default=None)
     redirect_url: Optional[str] = Field(alias="redirect-url", default=None)
-
+    resources: List[OrthancResource]
 
 class UserProfileRequest(BaseModel):
     token_key: Optional[str] = Field(alias="token-key", default=None)
@@ -118,16 +118,30 @@
     SETTINGS = 'settings'
     API_VIEW = 'api-view'
     EDIT_LABELS = 'edit-labels'
+    ADMIN_PERMISSIONS = 'admin-permissions'
 
     SHARE = 'share'
 
 
-class UserProfileResponse(BaseModel):
-    name: str
+class RolePermissions(BaseModel):
     authorized_labels: List[str] = Field(alias="authorized-labels", default_factory=list)
     permissions: List[UserPermissions] = Field(default_factory=list)
+
+    class Config:
+        use_enum_values = True
+        populate_by_name = True  # allow creating object from dict (used when deserializing the permission file)
+
+
+class UserProfileResponse(RolePermissions):
+    name: str
+    # authorized_labels: List[str] = Field(alias="authorized-labels", default_factory=list)
+    # permissions: List[UserPermissions] = Field(default_factory=list)
     validity: int
 
     class Config:
         use_enum_values = True
-        populate_by_name = True
\ No newline at end of file
+        populate_by_name = True
+
+class RolesConfigurationModel(BaseModel):
+    roles: Dict[str, RolePermissions]                                                # role/permissions mapping
+    available_labels: List[str] = Field(alias="available-labels", default_factory=list)  # if empty, everyone can create additionnal labels, if not, they can only add/remove the listed labels
--- a/NewTests/Authorization/test_authorization.py	Thu Apr 10 16:33:10 2025 +0200
+++ b/NewTests/Authorization/test_authorization.py	Tue Nov 04 15:52:51 2025 +0100
@@ -99,6 +99,7 @@
         cls.label_a_instance_id = o.upload_file(here / "../../Database/Knix/Loc/IM-0001-0001.dcm")[0]
         cls.label_a_study_id = o.instances.get_parent_study_id(cls.label_a_instance_id)
         cls.label_a_series_id = o.instances.get_parent_series_id(cls.label_a_instance_id)
+        cls.label_a_patient_dicom_id = o.studies.get_tags(cls.label_a_study_id)["PatientID"]
         cls.label_a_study_dicom_id = o.studies.get_tags(cls.label_a_study_id)["StudyInstanceUID"]
         cls.label_a_series_dicom_id = o.series.get_tags(cls.label_a_series_id)["SeriesInstanceUID"]
         cls.label_a_instance_dicom_id = o.instances.get_tags(cls.label_a_instance_id)["SOPInstanceUID"]
@@ -107,6 +108,7 @@
         cls.label_b_instance_id = o.upload_file(here / "../../Database/Brainix/Epi/IM-0001-0001.dcm")[0]
         cls.label_b_study_id = o.instances.get_parent_study_id(cls.label_b_instance_id)
         cls.label_b_series_id = o.instances.get_parent_series_id(cls.label_b_instance_id)
+        cls.label_b_patient_dicom_id = o.studies.get_tags(cls.label_b_study_id)["PatientID"]
         cls.label_b_study_dicom_id = o.studies.get_tags(cls.label_b_study_id)["StudyInstanceUID"]
         cls.label_b_series_dicom_id = o.series.get_tags(cls.label_b_series_id)["SeriesInstanceUID"]
         cls.label_b_instance_dicom_id = o.instances.get_tags(cls.label_b_instance_id)["SOPInstanceUID"]
@@ -118,6 +120,7 @@
         cls.no_label_instance_id = o.upload_file(here / "../../Database/Comunix/Pet/IM-0001-0001.dcm")[0]
         cls.no_label_study_id = o.instances.get_parent_study_id(cls.no_label_instance_id)
         cls.no_label_series_id = o.instances.get_parent_series_id(cls.no_label_instance_id)
+        cls.no_label_patient_dicom_id = o.studies.get_tags(cls.no_label_study_id)["PatientID"]
         cls.no_label_study_dicom_id = o.studies.get_tags(cls.no_label_study_id)["StudyInstanceUID"]
         cls.no_label_series_dicom_id = o.series.get_tags(cls.no_label_series_id)["SeriesInstanceUID"]
         cls.no_label_instance_dicom_id = o.instances.get_tags(cls.no_label_instance_id)["SOPInstanceUID"]
@@ -315,6 +318,15 @@
             self.assertEqual(1, len(r["Labels"]))
             self.assertEqual("label_a", r["Labels"][0])
 
+        if o_admin.is_plugin_version_at_least("authorization", 0, 9, 2):
+            i = o.get_json(f"dicom-web/studies?StudyInstanceUID={self.label_a_study_dicom_id}")
+            
+            # this one is forbidden because we specify the study (and the study is forbidden)
+            self.assert_is_forbidden(lambda: o.get_json(f"dicom-web/studies?StudyInstanceUID={self.label_b_study_dicom_id}"))
+            
+            # this one is empty because no studies are specified
+            self.assertEqual(0, len(o.get_json(f"dicom-web/studies?PatientID={self.label_b_patient_dicom_id}")))
+
 
     def test_uploader_a(self):
         o_admin = OrthancApiClient(self.o._root_url, headers={"user-token-key": "token-admin"})
@@ -359,9 +371,11 @@
         # with a resource token, we can access only the given resource, not generic resources or resources from other studies
 
         # generic resources are forbidden
+        # note: even tools/find is still forbidden in 0.9.3 (but not /dicom-web/studies -> see below)
         self.assert_is_forbidden(lambda: o.studies.find(query={"PatientName": "KNIX"},  # tools/find is forbidden with a resource token
                                                         labels=['label_b'],
                                                         labels_constraint='Any'))
+
         self.assert_is_forbidden(lambda: o.get_all_labels())
         self.assert_is_forbidden(lambda: o.studies.get_all_ids())
         self.assert_is_forbidden(lambda: o.patients.get_all_ids())
@@ -403,6 +417,12 @@
         o.get_json(f"dicom-web/series?0020000D={self.label_a_study_dicom_id}")
         o.get_json(f"dicom-web/instances?0020000D={self.label_a_study_dicom_id}")
 
+        if o.is_plugin_version_at_least("authorization", 0, 9, 3):
+            # equivalent to the prior studies request in OHIF
+            self.assertEqual(1, len(o.get_json(f"dicom-web/studies?PatientID={self.label_a_patient_dicom_id}")))
+            self.assertEqual(0, len(o.get_json(f"dicom-web/studies?PatientID={self.label_b_patient_dicom_id}")))
+
+
         if self.o.is_orthanc_version_at_least(1, 12, 2):
             o.get_binary(f"tools/create-archive?resources={self.label_a_study_id}")
             o.get_binary(f"tools/create-archive?resources={self.label_a_series_id}")
--- a/NewTests/CGet/get-scp.py	Thu Apr 10 16:33:10 2025 +0200
+++ /dev/null	Thu Jan 01 00:00:00 1970 +0000
@@ -1,104 +0,0 @@
-import os
-
-from pydicom import dcmread
-from pydicom.dataset import Dataset
-
-from pydicom.uid import ImplicitVRLittleEndian, ExplicitVRLittleEndian
-from pynetdicom import AE, StoragePresentationContexts, evt, AllStoragePresentationContexts
-from pynetdicom.sop_class import PatientRootQueryRetrieveInformationModelGet, StudyRootQueryRetrieveInformationModelGet, MRImageStorage, CTImageStorage
-
-import logging
-
-# Configure logging
-logging.basicConfig(level=logging.DEBUG)
-
-def transform_to_transfer_syntax(dataset, target_transfer_syntax):
-    # Create a new dataset with the new transfer syntax
-    new_dataset = Dataset()
-    new_dataset.file_meta = Dataset()
-    new_dataset.file_meta.TransferSyntaxUID = target_transfer_syntax
-    new_dataset.update(dataset)
-    return new_dataset
-
-# Implement the handler for evt.EVT_C_GET
-def handle_get(event):
-    """Handle a C-GET request event."""
-    ds = event.identifier
-    if 'QueryRetrieveLevel' not in ds:
-        # Failure
-        yield 0xC000, None
-        return
-
-    # Import stored SOP Instances
-    instances = []
-    matching = []
-    fdir = '/home/alain/o/orthanc-tests/Database/Brainix/Epi'
-    for fpath in os.listdir(fdir):
-        instances.append(dcmread(os.path.join(fdir, fpath)))
-
-    if ds.QueryRetrieveLevel == 'PATIENT':
-        if 'PatientID' in ds:
-            matching = [
-                inst for inst in instances if inst.PatientID == ds.PatientID
-            ]
-    elif ds.QueryRetrieveLevel == 'STUDY':
-        if 'StudyInstanceUID' in ds:
-            matching = [
-                inst for inst in instances if inst.StudyInstanceUID == ds.StudyInstanceUID
-            ]
-
-    print(f"GET-SCP: instances to send: {len(instances)}")
-    # Yield the total number of C-STORE sub-operations required
-    yield len(instances)
-
-    # Yield the matching instances
-    for instance in matching:
-        # Check if C-CANCEL has been received
-        if event.is_cancelled:
-            yield (0xFE00, None)
-            return
-
-        # Pending
-        accepted_transfer_syntax = event.assoc.accepted_contexts[0].transfer_syntax
-
-        if accepted_transfer_syntax != instance.file_meta.TransferSyntaxUID:
-            transformed_instance = transform_to_transfer_syntax(instance, accepted_transfer_syntax)
-            yield (0xFF00, transformed_instance)
-        else:
-            yield (0xFF00, instance)
-        
-
-handlers = [(evt.EVT_C_GET, handle_get)]
-
-# Create application entity
-ae = AE("PYNETDICOM")
-
-accepted_transfer_syntaxes = [
-    '1.2.840.10008.1.2',  # Implicit VR Little Endian
-    '1.2.840.10008.1.2.1',  # Explicit VR Little Endian
-    '1.2.840.10008.1.2.2',  # Explicit VR Big Endian
-    '1.2.840.10008.1.2.4.50',  # JPEG Baseline (Process 1)
-    '1.2.840.10008.1.2.4.70',  # JPEG Lossless, Non-Hierarchical (Process 14)
-]
-
-# # Add the supported presentation contexts (Storage SCU)
-# ae.supported_contexts = StoragePresentationContexts
-
-# # Accept the association requestor's proposed SCP role in the
-# #   SCP/SCU Role Selection Negotiation items
-# for cx in ae.supported_contexts:
-#     cx.scp_role = True
-#     cx.scu_role = False
-
-# # Add a supported presentation context (QR Get SCP)
-ae.add_supported_context(PatientRootQueryRetrieveInformationModelGet)
-ae.add_supported_context(StudyRootQueryRetrieveInformationModelGet)
-# ae.add_supported_context(MRImageStorage, accepted_transfer_syntaxes, scu_role=True, scp_role=True)
-# ae.add_supported_context(CTImageStorage, accepted_transfer_syntaxes, scu_role=True, scp_role=True)
-
-
-for context in AllStoragePresentationContexts:
-    ae.add_supported_context(context.abstract_syntax, ImplicitVRLittleEndian)
-
-# Start listening for incoming association requests
-ae.start_server(("0.0.0.0", 11112), evt_handlers=handlers)
\ No newline at end of file
--- a/NewTests/CGet/test_cget.py	Thu Apr 10 16:33:10 2025 +0200
+++ b/NewTests/CGet/test_cget.py	Tue Nov 04 15:52:51 2025 +0100
@@ -4,7 +4,7 @@
 import threading
 from helpers import OrthancTestCase, Helpers
 
-from orthanc_api_client import OrthancApiClient, ChangeType
+from orthanc_api_client import OrthancApiClient, ChangeType, HttpError
 from orthanc_api_client import helpers as OrthancHelpers
 
 import pathlib
@@ -67,3 +67,15 @@
 
         oa.modalities.get_study(from_modality='b', dicom_id='2.16.840.1.113669.632.20.1211.10000357775')
         self.assertEqual(len(instances_ids), len(oa.instances.get_all_ids()))       
+
+    def test_cget_not_found(self):
+
+        oa, ob = self.clean_start()
+
+        instances_ids = ob.upload_folder( here / "../../Database/Brainix")
+
+        if oa.is_orthanc_version_at_least(1, 12, 10):
+            with self.assertRaises(HttpError) as ex:
+                oa.modalities.get_study(from_modality='b', dicom_id='5.6.7')
+            self.assertEqual(0xc000, ex.exception.dimse_error_status)
+            self.assertEqual(0, len(oa.instances.get_all_ids()))       
--- a/NewTests/Concurrency/docker-compose-transfers-concurrency.yml	Thu Apr 10 16:33:10 2025 +0200
+++ b/NewTests/Concurrency/docker-compose-transfers-concurrency.yml	Tue Nov 04 15:52:51 2025 +0100
@@ -1,4 +1,3 @@
-version: "3"
 services:
 
   orthanc-pg-a:
--- a/NewTests/DelayedDeletion/test_delayed_deletion.py	Thu Apr 10 16:33:10 2025 +0200
+++ b/NewTests/DelayedDeletion/test_delayed_deletion.py	Tue Nov 04 15:52:51 2025 +0100
@@ -8,6 +8,7 @@
 
 import pathlib
 import glob
+import pprint
 here = pathlib.Path(__file__).parent.resolve()
 
 
@@ -23,7 +24,8 @@
                 "DelayedDeletion": {
                     "Enable": True,
                     "ThrottleDelayMs": 200
-                }
+                },
+                "DatabaseServerIdentifier": "delayed-test"
             }
 
         config_path = cls.generate_configuration(
@@ -98,11 +100,15 @@
 
     def test_resumes_pending_deletion(self):
 
+        # plugin_status = self.o.get_json("plugins/delayed-deletion/status")
+        # pprint.pprint(plugin_status)
+        
         completed = False
         while not completed:
             print('-------------- waiting for DelayedDeletion to finish processing')
             time.sleep(1)
             plugin_status = self.o.get_json("plugins/delayed-deletion/status")
+            # pprint.pprint(plugin_status)
             completed = plugin_status["FilesPendingDeletion"] == 0
 
         self.assertTrue(completed)
--- a/NewTests/PostgresUpgrades/docker-compose.yml	Thu Apr 10 16:33:10 2025 +0200
+++ b/NewTests/PostgresUpgrades/docker-compose.yml	Tue Nov 04 15:52:51 2025 +0100
@@ -17,7 +17,7 @@
 
   # Orthanc previous version
   orthanc-pg-15-previous-revision:
-    image: orthancteam/orthanc:25.1.1
+    image: orthancteam/orthanc:25.5.1
     container_name: orthanc-pg-15-previous-revision
     depends_on: [pg-15]
     restart: unless-stopped
@@ -30,7 +30,7 @@
 
   # Orthanc previous version to run the integration tests
   orthanc-pg-15-previous-revision-for-integ-tests:
-    image: orthancteam/orthanc:25.1.1
+    image: orthancteam/orthanc:25.5.1
     container_name: orthanc-pg-15-previous-revision-for-integ-tests
     depends_on: [pg-15]
     restart: unless-stopped
--- a/NewTests/PostgresUpgrades/downgrade.sh	Thu Apr 10 16:33:10 2025 +0200
+++ b/NewTests/PostgresUpgrades/downgrade.sh	Tue Nov 04 15:52:51 2025 +0100
@@ -4,9 +4,12 @@
 
 apt-get update && apt-get install -y wget mercurial
 hg clone https://orthanc.uclouvain.be/hg/orthanc-databases
-hg update -r default
-psql -U postgres -f /scripts/orthanc-databases/PostgreSQL/Plugins/SQL/Downgrades/Rev4ToRev3.sql
+pushd /scripts/orthanc-databases/
+# hg update -r default
+
+psql -U postgres -f /scripts/orthanc-databases/PostgreSQL/Plugins/SQL/Downgrades/Rev6ToRev5.sql
 
 # if you want to test a downgrade procedure, you may use this code ...
 # psql -U postgres -f downgrade.sql
 popd
+popd
--- a/NewTests/README	Thu Apr 10 16:33:10 2025 +0200
+++ b/NewTests/README	Tue Nov 04 15:52:51 2025 +0100
@@ -220,4 +220,30 @@
 with Docker:
 
 python3 NewTests/main.py --pattern=CGet.test_cget.TestCGet.* \
-                         --orthanc_under_tests_docker_image=orthancteam/orthanc-pre-release:2025.01.20
\ No newline at end of file
+                         --orthanc_under_tests_docker_image=orthancteam/orthanc-pre-release:2025.01.20
+
+AdvancedStorage:
+--------------
+
+Run the AdvancedStorage tests with your locally build version and break before execution to allow you to start your debugger.
+
+python3 NewTests/main.py --pattern=AdvancedStorage.test_advanced_storage.TestAdvancedStorage.* \
+                         --orthanc_under_tests_exe=/home/alain/o/build/orthanc/Orthanc \
+                         --orthanc_under_tests_http_port=8043 \
+                         --db=sqlite \
+                         --plugin=/home/alain/o/build/orthanc-dicomweb/libOrthancDicomWeb.so \
+                         --plugin=/home/alain/o/build/advanced-storage/libAdvancedStorage.so \
+                         --break_after_preparation
+
+python3 NewTests/main.py --pattern=AdvancedStorage.test_advanced_storage_default_naming_scheme.TestAdvancedStorageDefaultNamingScheme.* \
+                         --orthanc_under_tests_exe=/home/alain/o/build/orthanc/Orthanc \
+                         --orthanc_under_tests_http_port=8043 \
+                         --db=sqlite \
+                         --plugin=/home/alain/o/build/orthanc-dicomweb/libOrthancDicomWeb.so \
+                         --plugin=/home/alain/o/build/advanced-storage/libAdvancedStorage.so \
+                         --break_after_preparation
+with Docker:
+
+python3 NewTests/main.py --pattern=AdvancedStorage.test_advanced_storage.TestAdvancedStorage.* \
+                         --orthanc_under_tests_docker_image=orthancteam/orthanc:current \
+                         --orthanc_under_tests_http_port=8043
--- a/NewTests/helpers.py	Thu Apr 10 16:33:10 2025 +0200
+++ b/NewTests/helpers.py	Tue Nov 04 15:52:51 2025 +0100
@@ -8,6 +8,7 @@
 import glob
 import time
 from threading import Thread
+from enum import StrEnum
 
 
 import pathlib
@@ -45,6 +46,11 @@
         time.sleep(1)
 
 
+class DB(StrEnum):
+    SQLITE = 'sqlite'
+    PG = 'pg'
+    UNSPECIFIED = 'unspecified'
+
 
 class Helpers:
 
@@ -54,11 +60,13 @@
     orthanc_under_tests_exe: str = None
     orthanc_previous_version_exe: str = None
     orthanc_under_tests_docker_image: str = None
+    db: DB = DB.UNSPECIFIED
     skip_preparation: bool = False
     break_after_preparation: bool = False
     break_before_preparation: bool = False
     plugins: typing.List[str] = []
 
+
     @classmethod
     def get_orthanc_url(cls):
         return f"http://{cls.orthanc_under_tests_hostname}:{cls.orthanc_under_tests_http_port}"
@@ -167,22 +175,29 @@
         if Helpers.is_exe():
 
             # clear the directory but keep it !
-            for root, dirs, files in os.walk(storage_path):
-                for f in files:
-                    os.unlink(os.path.join(root, f))
-                for d in dirs:
-                    shutil.rmtree(os.path.join(root, d))
-                    shutil.rmtree(storage_path, ignore_errors=True)
+            shutil.rmtree(storage_path, ignore_errors=True)
+            pathlib.Path(storage_path).mkdir(parents=True, exist_ok=True)
+
+            # for root, dirs, files in os.walk(storage_path):
+            #     for f in files:
+            #         os.unlink(os.path.join(root, f))
+            #     for d in dirs:
+            #         shutil.rmtree(os.path.join(root, d))
+            #         shutil.rmtree(storage_path, ignore_errors=True)
         else:
+            # create the directory with user ownership before docker creates it 
+            pathlib.Path(storage_path).mkdir(parents=True, exist_ok=True)
+
+            # clear the directory (but you need to be root from the container !)
             cmd = [
                     "docker", "run", "--rm", 
                     "-v", f"{storage_path}:/var/lib/orthanc/db/",
                     "--name", "storage-cleanup",
                     "debian:12-slim",
-                    "rm", "-rf", "/var/lib/orthanc/db/*"
+                    "bash", "-c", "rm -rf /var/lib/orthanc/db/*"
                 ]
+            subprocess.run(cmd, check=True)
 
-            subprocess.run(cmd, check=True)
 
     @classmethod
     def is_storage_empty(cls, storage_name: str):
@@ -196,7 +211,7 @@
             subprocess.run(["docker", "network", "create", network])
 
     @classmethod
-    def launch_orthanc_to_prepare_db(cls, config_name: str = None, config: object = None, config_path: str = None, storage_name: str = None, plugins = [], docker_network: str = None):
+    def launch_orthanc_to_prepare_db(cls, config_name: str = None, config: object = None, config_path: str = None, storage_name: str = None, plugins = [], docker_network: str = None, enable_verbose: bool = False):
         if config_name and storage_name and config:
             # generate the configuration file
             config_path = cls.generate_configuration(
@@ -220,7 +235,8 @@
                 storage_name=storage_name,
                 config_name=config_name,
                 config_path=config_path,
-                network=docker_network
+                network=docker_network,
+                enable_verbose=enable_verbose
             )
         else:
             raise RuntimeError("Invalid configuration, can not launch Orthanc")
--- a/NewTests/main.py	Thu Apr 10 16:33:10 2025 +0200
+++ b/NewTests/main.py	Tue Nov 04 15:52:51 2025 +0100
@@ -29,6 +29,7 @@
     parser.add_argument('--skip_preparation', action='store_true', help="if this is a multi stage tests with preparations, skip the preparation")
     parser.add_argument('--break_after_preparation', action='store_true', help="if this is a multi stage tests with preparations, pause after the preparation (such that you can start your own orthanc-under-tests in your debugger)")
     parser.add_argument('--break_before_preparation', action='store_true', help="if this is a multi stage tests with preparations, pause before the preparation (such that you can start your own orthanc-under-tests in your debugger)")
+    parser.add_argument('--db', type=str, default='unspecified', help="the DB engine to use")
     parser.add_argument('-p', '--plugin', dest='plugins', action='append', type=str, help='path to a plugin to add to configuration')
 
     args = parser.parse_args()
@@ -40,6 +41,7 @@
     Helpers.orthanc_under_tests_http_port = args.orthanc_under_tests_http_port
     Helpers.orthanc_under_tests_dicom_port = args.orthanc_under_tests_dicom_port
     Helpers.plugins = args.plugins
+    Helpers.db = args.db
 
     Helpers.orthanc_under_tests_exe = args.orthanc_under_tests_exe
     Helpers.orthanc_under_tests_docker_image = args.orthanc_under_tests_docker_image
--- a/NewTests/requirements.txt	Thu Apr 10 16:33:10 2025 +0200
+++ b/NewTests/requirements.txt	Tue Nov 04 15:52:51 2025 +0100
@@ -1,3 +1,3 @@
-orthanc-api-client>=0.18.5
-orthanc-tools>=0.13.0
+orthanc-api-client>=0.20.1
+orthanc-tools>=0.16.5
 uvicorn
\ No newline at end of file
--- a/Plugins/DicomWeb/DicomWeb.py	Thu Apr 10 16:33:10 2025 +0200
+++ b/Plugins/DicomWeb/DicomWeb.py	Tue Nov 04 15:52:51 2025 +0100
@@ -41,7 +41,7 @@
         body += bytearray('\r\n', 'ascii')
 
 
-def SendStowRaw(orthanc, uri, dicom):
+def SendStowRaw(orthanc, uri, dicom, partsContentType='application/dicom'):
     # We do not use Python's "email" package, as it uses LF (\n) for line
     # endings instead of CRLF (\r\n) for binary messages, as required by
     # RFC 1341
@@ -54,9 +54,9 @@
 
     if isinstance(dicom, list):
         for i in range(dicom):
-            _AttachPart(body, dicom[i], 'application/dicom', boundary)
+            _AttachPart(body, dicom[i], partsContentType, boundary)
     else:
-        _AttachPart(body, dicom, 'application/dicom', boundary)
+        _AttachPart(body, dicom, partsContentType, boundary)
 
     # Closing boundary
     body += bytearray('--%s--' % boundary, 'ascii')
@@ -72,8 +72,8 @@
     return (response.status, DecodeJson(content))
 
 
-def SendStow(orthanc, uri, dicom):
-    (status, content) = SendStowRaw(orthanc, uri, dicom)
+def SendStow(orthanc, uri, dicom, partsContentType='application/dicom'):
+    (status, content) = SendStowRaw(orthanc, uri, dicom, partsContentType)
     if not (status in [ 200 ]):
         raise Exception('Bad status: %d' % status)
     else:
--- a/Plugins/DicomWeb/Run.py	Thu Apr 10 16:33:10 2025 +0200
+++ b/Plugins/DicomWeb/Run.py	Tue Nov 04 15:52:51 2025 +0100
@@ -189,7 +189,10 @@
         a = SendStow(ORTHANC, args.dicomweb + '/studies', GetDatabasePath('Phenix/IM-0001-0001.dcm'))
         self.assertEqual(1, len(DoGet(ORTHANC, '/instances')))
 
-        self.assertEqual(4, len(a))
+        if IsPluginVersionAtLeast(ORTHANC, "dicom-web", 1, 19, 0):
+            self.assertEqual(3, len(a))  # DICOM_TAG_FAILED_SOP_SEQUENCE has been removed in 1.19
+        else:
+            self.assertEqual(4, len(a))
 
         # Specific character set
         self.assertTrue('00080005' in a)
@@ -198,8 +201,11 @@
         self.assertTrue(a['00081190']['Value'][0].endswith('studies/2.16.840.1.113669.632.20.1211.10000098591'))
         self.assertEqual('UR', a['00081190']['vr'])
         
-        self.assertFalse('Value' in a['00081198'])  # No error => empty sequence
-        self.assertEqual('SQ', a['00081198']['vr'])
+        if IsPluginVersionAtLeast(ORTHANC, "dicom-web", 1, 19, 0):
+            self.assertNotIn('00081198', a)  # No errors => the DICOM_TAG_FAILED_SOP_SEQUENCE shall not be present
+        else:
+            self.assertFalse('Value' in a['00081198'])  # No error => empty sequence
+            self.assertEqual('SQ', a['00081198']['vr'])
 
         self.assertEqual(1, len(a['00081199']['Value']))  # 1 success
         self.assertEqual('SQ', a['00081199']['vr'])
@@ -236,6 +242,16 @@
         self.assertEqual(1, len(parts))
         self.assertEqual(os.path.getsize(GetDatabasePath('Phenix/IM-0001-0001.dcm')), int(parts[0]))
 
+    def test_stow_like_dcm4chee(self):
+        # https://discourse.orthanc-server.org/t/orthanc-dicomweb-stowrs-server-request-response-compatibility/5763
+
+        self.assertEqual(0, len(DoGet(ORTHANC, '/instances')))
+        a = SendStow(ORTHANC, args.dicomweb + '/studies', GetDatabasePath('Phenix/IM-0001-0001.dcm'), 'application/dicom;transfer-syntax=1.2.840.10008.1.2.1')
+        self.assertEqual(1, len(DoGet(ORTHANC, '/instances')))
+
+        self.assertNotIn('00081198', a)  # No errors => the DICOM_TAG_FAILED_SOP_SEQUENCE shall not be present
+
+
         
     def test_server_get(self):
         try:
@@ -296,56 +312,85 @@
 
 
     def test_server_stow(self):
-        UploadInstance(ORTHANC, 'Knee/T1/IM-0001-0001.dcm')
+        # UploadInstance(ORTHANC, 'Knee/T1/IM-0001-0001.dcm')
+
+        # self.assertRaises(Exception, lambda: 
+        #                   DoPost(ORTHANC, '/dicom-web/servers/sample/stow',
+        #                          { 'Resources' : [ 'nope' ],
+        #                            'Synchronous' : True }))  # inexisting resource
 
-        self.assertRaises(Exception, lambda: 
-                          DoPost(ORTHANC, '/dicom-web/servers/sample/stow',
-                                 { 'Resources' : [ 'nope' ],
-                                   'Synchronous' : True }))  # inexisting resource
+        # if IsPluginVersionAtLeast(ORTHANC, "dicom-web", 1, 18, 0):
+        #     l = 4   # "Server" has been added
+        # else:
+        #     l = 3   # For >= 1.10.1
+
+        # # study
+        # r = DoPost(ORTHANC, '/dicom-web/servers/sample/stow',
+        #                                { 'Resources' : [ '0a9b3153-2512774b-2d9580de-1fc3dcf6-3bd83918' ],
+        #                                  'Synchronous' : True })
 
-        if IsPluginVersionAtLeast(ORTHANC, "dicom-web", 1, 18, 0):
-            l = 4   # "Server" has been added
-        else:
-            l = 3   # For >= 1.10.1
+        # self.assertEqual(l, len(r))
+        # self.assertEqual("0a9b3153-2512774b-2d9580de-1fc3dcf6-3bd83918", r['Resources']['Studies'][0])
+        # if IsPluginVersionAtLeast(ORTHANC, "dicom-web", 1, 18, 0):
+        #     self.assertEqual("sample", r['Server'])
 
-        # study
-        r = DoPost(ORTHANC, '/dicom-web/servers/sample/stow',
-                                       { 'Resources' : [ '0a9b3153-2512774b-2d9580de-1fc3dcf6-3bd83918' ],
-                                         'Synchronous' : True })
+        # # series
+        # r = DoPost(ORTHANC, '/dicom-web/servers/sample/stow',
+        #                                { 'Resources' : [ '6de73705-c4e65c1b-9d9ea1b5-cabcd8e7-f15e4285' ],
+        #                                  'Synchronous' : True })
+        # self.assertEqual(l, len(r))
+        # self.assertEqual("6de73705-c4e65c1b-9d9ea1b5-cabcd8e7-f15e4285", r['Resources']['Series'][0])
 
-        self.assertEqual(l, len(r))
-        self.assertEqual("0a9b3153-2512774b-2d9580de-1fc3dcf6-3bd83918", r['Resources']['Studies'][0])
-        if IsPluginVersionAtLeast(ORTHANC, "dicom-web", 1, 18, 0):
-            self.assertEqual("sample", r['Server'])
+        # # instances
+        # r = DoPost(ORTHANC, '/dicom-web/servers/sample/stow',
+        #                                { 'Resources' : [ 'c8df6478-d7794217-0f11c293-a41237c9-31d98357' ],
+        #                                  'Synchronous' : True })
+        # self.assertEqual(l, len(r))
+        # self.assertEqual("c8df6478-d7794217-0f11c293-a41237c9-31d98357", r['Resources']['Instances'][0])
 
-        # series
-        r = DoPost(ORTHANC, '/dicom-web/servers/sample/stow',
-                                       { 'Resources' : [ '6de73705-c4e65c1b-9d9ea1b5-cabcd8e7-f15e4285' ],
-                                         'Synchronous' : True })
-        self.assertEqual(l, len(r))
-        self.assertEqual("6de73705-c4e65c1b-9d9ea1b5-cabcd8e7-f15e4285", r['Resources']['Series'][0])
+        # # altogether
+        # r = DoPost(ORTHANC, '/dicom-web/servers/sample/stow',
+        #                                { 'Resources' : [ 
+        #                                    'ca29faea-b6a0e17f-067743a1-8b778011-a48b2a17',
+        #                                    '0a9b3153-2512774b-2d9580de-1fc3dcf6-3bd83918',
+        #                                    '6de73705-c4e65c1b-9d9ea1b5-cabcd8e7-f15e4285',
+        #                                    'c8df6478-d7794217-0f11c293-a41237c9-31d98357' ],
+        #                                  'Synchronous' : True })
+        # # pprint.pprint(r)
+        # self.assertEqual(l, len(r))
+        # self.assertEqual("ca29faea-b6a0e17f-067743a1-8b778011-a48b2a17", r['Resources']['Patients'][0])
+        # self.assertEqual("0a9b3153-2512774b-2d9580de-1fc3dcf6-3bd83918", r['Resources']['Studies'][0])
+        # self.assertEqual("6de73705-c4e65c1b-9d9ea1b5-cabcd8e7-f15e4285", r['Resources']['Series'][0])
+        # self.assertEqual("c8df6478-d7794217-0f11c293-a41237c9-31d98357", r['Resources']['Instances'][0])
 
-        # instances
-        r = DoPost(ORTHANC, '/dicom-web/servers/sample/stow',
-                                       { 'Resources' : [ 'c8df6478-d7794217-0f11c293-a41237c9-31d98357' ],
-                                         'Synchronous' : True })
-        self.assertEqual(l, len(r))
-        self.assertEqual("c8df6478-d7794217-0f11c293-a41237c9-31d98357", r['Resources']['Instances'][0])
+
+        if IsPluginVersionAtLeast(ORTHANC, "dicom-web", 1, 20, 0):
+            a = UploadInstance(ORTHANC, 'Brainix/Epi/IM-0001-0001.dcm')
+            b = UploadInstance(ORTHANC, 'Brainix/Epi/IM-0001-0002.dcm')
 
-        # altogether
-        r = DoPost(ORTHANC, '/dicom-web/servers/sample/stow',
-                                       { 'Resources' : [ 
-                                           'ca29faea-b6a0e17f-067743a1-8b778011-a48b2a17',
-                                           '0a9b3153-2512774b-2d9580de-1fc3dcf6-3bd83918',
-                                           '6de73705-c4e65c1b-9d9ea1b5-cabcd8e7-f15e4285',
-                                           'c8df6478-d7794217-0f11c293-a41237c9-31d98357' ],
-                                         'Synchronous' : True })
-        # pprint.pprint(r)
-        self.assertEqual(l, len(r))
-        self.assertEqual("ca29faea-b6a0e17f-067743a1-8b778011-a48b2a17", r['Resources']['Patients'][0])
-        self.assertEqual("0a9b3153-2512774b-2d9580de-1fc3dcf6-3bd83918", r['Resources']['Studies'][0])
-        self.assertEqual("6de73705-c4e65c1b-9d9ea1b5-cabcd8e7-f15e4285", r['Resources']['Series'][0])
-        self.assertEqual("c8df6478-d7794217-0f11c293-a41237c9-31d98357", r['Resources']['Instances'][0])
+            # study
+            r = DoPost(ORTHANC, '/dicom-web/servers/sample/stow',
+                                        { 'Resources' : [ a['ParentStudy'] ],
+                                          'Synchronous' : True })
+
+            self.assertEqual(1, len(r['Resources']['Studies']))
+            self.assertNotIn('Series', r['Resources'])
+            self.assertEqual(2, len(r['Resources']['Instances']))
+            self.assertEqual(a['ParentStudy'], r['Resources']['Studies'][0])
+            self.assertIn(a['ID'], r['Resources']['Instances'])
+            self.assertIn(b['ID'], r['Resources']['Instances'])
+
+            # series
+            r = DoPost(ORTHANC, '/dicom-web/servers/sample/stow',
+                                        { 'Resources' : [ a['ParentSeries'] ],
+                                          'Synchronous' : True })
+
+            self.assertEqual(1, len(r['Resources']['Series']))
+            self.assertNotIn('Studies', r['Resources'])
+            self.assertEqual(2, len(r['Resources']['Instances']))
+            self.assertEqual(a['ParentSeries'], r['Resources']['Series'][0])
+            self.assertIn(a['ID'], r['Resources']['Instances'])
+            self.assertIn(b['ID'], r['Resources']['Instances'])
 
 
 
@@ -708,19 +753,24 @@
 
         
     def test_stow_errors(self):
-        def CheckSequences(a):
-            self.assertEqual(3, len(a))
+        def CheckSequences(a, expectFailedSopSequence):
+            if IsPluginVersionAtLeast(ORTHANC, "dicom-web", 1, 19, 0) and not expectFailedSopSequence:
+                self.assertEqual(2, len(a))
+                self.assertNotIn('00081198', a)
+            else:
+                self.assertEqual(3, len(a))
+                self.assertTrue('00081198' in a)
+                self.assertEqual('SQ', a['00081198']['vr'])
+
             self.assertTrue('00080005' in a)
-            self.assertTrue('00081198' in a)
             self.assertTrue('00081199' in a)
             self.assertEqual('CS', a['00080005']['vr'])
-            self.assertEqual('SQ', a['00081198']['vr'])
             self.assertEqual('SQ', a['00081199']['vr'])
         
         # Pushing an instance to a study that is not its parent
         (status, a) = SendStowRaw(ORTHANC, args.dicomweb + '/studies/nope', GetDatabasePath('Phenix/IM-0001-0001.dcm'))
         self.assertEqual(409, status)
-        CheckSequences(a)
+        CheckSequences(a, True)
 
         self.assertFalse('Value' in a['00081199'])  # No success instance
         
@@ -735,23 +785,21 @@
         # Pushing an instance with missing tags
         (status, a) = SendStowRaw(ORTHANC, args.dicomweb + '/studies', GetDatabasePath('Issue111.dcm'))
         self.assertEqual(400, status)
-        CheckSequences(a)
+        CheckSequences(a, False) # No failed instance, as tags are missing
 
-        self.assertFalse('Value' in a['00081198'])  # No failed instance, as tags are missing
         self.assertFalse('Value' in a['00081199'])  # No success instance
 
         # Pushing a file that is not in the DICOM format
         (status, a) = SendStowRaw(ORTHANC, args.dicomweb + '/studies', GetDatabasePath('Issue111.dump'))
         self.assertEqual(400, status)
-        CheckSequences(a)
+        CheckSequences(a, False) # No failed instance, as non-DICOM
 
-        self.assertFalse('Value' in a['00081198'])  # No failed instance, as non-DICOM
         self.assertFalse('Value' in a['00081199'])  # No success instance
 
         # Pushing a DICOM instance with only SOP class and instance UID
         (status, a) = SendStowRaw(ORTHANC, args.dicomweb + '/studies', GetDatabasePath('Issue196.dcm'))
         self.assertEqual(400, status)
-        CheckSequences(a)
+        CheckSequences(a, True)
 
         self.assertFalse('Value' in a['00081199'])  # No success instance
 
@@ -1196,6 +1244,39 @@
         self.assertEqual(len(a), len(c))
         self.assertEqual(a, c)
 
+        if IsPluginVersionAtLeast(ORTHANC, "dicom-web", 1, 20, 0):
+            # test with 2 instances: https://discourse.orthanc-server.org/t/thumbnail-orthanc-stone-viewer-issue/5827/3
+            i = UploadInstance(ORTHANC, 'Brainix/Epi/IM-0001-0001.dcm') ['ID']
+            UploadInstance(ORTHANC, 'Brainix/Epi/IM-0001-0002.dcm') ['ID']
+
+            study = DoGet(ORTHANC, '/instances/%s/tags?simplify' % i) ['StudyInstanceUID']
+            series = DoGet(ORTHANC, '/instances/%s/tags?simplify' % i) ['SeriesInstanceUID']
+            instance = DoGet(ORTHANC, '/instances/%s/tags?simplify' % i) ['SOPInstanceUID']
+
+            a = DoPost(ORTHANC, '/dicom-web/servers/sample/get', {
+                'Uri' : '/studies/%s/series/%s/instances/%s/rendered' % (study, series, instance)
+            })
+            
+            im = UncompressImage(a)
+            self.assertEqual("L", im.mode)
+            self.assertEqual(256, im.size[0])
+            self.assertEqual(256, im.size[1])
+
+            b = DoPost(ORTHANC, '/dicom-web/servers/sample/get', {
+                'Uri' : '/studies/%s/series/%s/rendered' % (study, series)
+            })
+            
+            self.assertEqual(len(a), len(b))
+            self.assertEqual(a, b)
+
+            c = DoPost(ORTHANC, '/dicom-web/servers/sample/get', {
+                'Uri' : '/studies/%s/rendered' % study
+            })
+            
+            self.assertEqual(len(a), len(c))
+            self.assertEqual(a, c)
+
+
 
     def test_multiple_mime_accept_wado_rs(self):
         # "Multiple MIME type Accept Headers for Wado-RS"
--- a/README	Thu Apr 10 16:33:10 2025 +0200
+++ b/README	Tue Nov 04 15:52:51 2025 +0100
@@ -125,12 +125,12 @@
 
 # python ./Tests/Run.py
 
-To run a single test with by fixing the Orthanc 0.8.6 executable path:
-# python2 ./Tests/Run.py --force --orthanc /home/alain/Releases/Orthanc-0.8.6/Build/Orthanc Orthanc.test_peer_store_straight
+To run a single test with by fixing the Orthanc 1.12.5 executable path:
+# python2.7 ./Tests/Run.py --force --orthanc /home/alain/Releases/Orthanc-1.12.5 Orthanc.test_peer_store_straight
 
-To run a plugin test (no need for Orthanc 0.8.6)
-# python2 ./Plugins/DicomWeb/Run.py --force
-# python2 ./Plugins/DicomWeb/Run.py --force Orthanc.test_forwarded_headers
+To run a plugin test (no need for another Orthanc)
+# python ./Plugins/DicomWeb/Run.py --force
+# python ./Plugins/DicomWeb/Run.py --force Orthanc.test_forwarded_headers
 
 Use the flag "--help" to get the full list of arguments. These
 arguments will notably allow you to specify the network parameters
--- a/Tests/Tests.py	Thu Apr 10 16:33:10 2025 +0200
+++ b/Tests/Tests.py	Tue Nov 04 15:52:51 2025 +0100
@@ -538,6 +538,32 @@
         self.assertEqual(0, len(DoGet(_REMOTE, '/patients')))
 
 
+    def test_delete_cascade_with_multiple_instances(self):
+        # make sure deleting the last instance of a study deletes the series, study and patient
+
+        self.assertEqual(0, len(DoGet(_REMOTE, '/instances')))  # make sure orthanc is empty when starting the test
+        a = UploadInstance(_REMOTE, 'Knix/Loc/IM-0001-0001.dcm')
+        b = UploadInstance(_REMOTE, 'Knix/Loc/IM-0001-0002.dcm')
+
+        self.assertEqual(2, len(DoGet(_REMOTE, '/instances')))
+        self.assertEqual(1, len(DoGet(_REMOTE, '/series')))
+        self.assertEqual(1, len(DoGet(_REMOTE, '/studies')))
+        self.assertEqual(1, len(DoGet(_REMOTE, '/patients')))
+
+        DoDelete(_REMOTE, '/instances/%s' % b['ID'])        
+
+        self.assertEqual(1, len(DoGet(_REMOTE, '/instances')))
+        self.assertEqual(1, len(DoGet(_REMOTE, '/series')))
+        self.assertEqual(1, len(DoGet(_REMOTE, '/studies')))
+        self.assertEqual(1, len(DoGet(_REMOTE, '/patients')))
+
+        DoDelete(_REMOTE, '/instances/%s' % a['ID'])        
+
+        self.assertEqual(0, len(DoGet(_REMOTE, '/instances')))
+        self.assertEqual(0, len(DoGet(_REMOTE, '/series')))
+        self.assertEqual(0, len(DoGet(_REMOTE, '/studies')))
+        self.assertEqual(0, len(DoGet(_REMOTE, '/patients')))
+
     def test_multiframe(self):
         i = UploadInstance(_REMOTE, 'Multiframe.dcm')['ID']
         self.assertEqual(76, len(DoGet(_REMOTE, '/instances/%s/frames' % i)))
@@ -881,9 +907,19 @@
         self.assertEqual(0, DoGet(_REMOTE, '/patients/%s/protected' % a))
         DoPut(_REMOTE, '/patients/%s/protected' % a, '1', 'text/plain')
         self.assertEqual(1, DoGet(_REMOTE, '/patients/%s/protected' % a))
+
+        if IsOrthancVersionAbove(_REMOTE, 1, 12, 8):
+            p = DoGet(_REMOTE, '/patients/%s' % a)
+            self.assertIn('IsProtected', p)
+            self.assertTrue(p['IsProtected'])
+
         DoPut(_REMOTE, '/patients/%s/protected' % a, '0', 'text/plain')
         self.assertEqual(0, DoGet(_REMOTE, '/patients/%s/protected' % a))
 
+        if IsOrthancVersionAbove(_REMOTE, 1, 12, 8):
+            p = DoGet(_REMOTE, '/patients/%s' % a)
+            self.assertIn('IsProtected', p)
+            self.assertFalse(p['IsProtected'])
 
     def test_raw_tags(self):
         i = UploadInstance(_REMOTE, 'PrivateTags.dcm')['ID']
@@ -991,6 +1027,78 @@
                 self.assertNotEqual('Jodogne', DoGet(_REMOTE, '/instances/%s/content/0010-0010' % j).strip())
 
 
+    def change_patient_id_case_in_patient_keep_source_false(self):
+        UploadInstance(_REMOTE, 'Brainix/Flair/IM-0001-0001.dcm')
+
+        # original PatientID is 5Yp0E, only change the casing of one letter
+        originPatient = DoGet(_REMOTE, '/patients')[0]
+        newPatient = DoPost(_REMOTE, '/patients/%s/modify' % originPatient,
+                            json.dumps({
+                                "Replace": { "PatientID": "5YP0E"},
+                                "Keep": ["StudyInstanceUID", "SeriesInstanceUID", "SOPInstanceUID"],
+                                "Force": True, 
+                                "KeepSource": False
+                            }), 'application/json')['ID']
+
+        self.assertNotEqual(originPatient, newPatient)
+        allStudies = DoGet(_REMOTE, '/studies?expand')
+        self.assertEqual(1, len(allStudies))
+        self.assertEqual('5YP0E', allStudies[0]['PatientMainDicomTags']['PatientID'])
+
+
+    def change_patient_id_case_in_patient_keep_source_true(self):
+        UploadInstance(_REMOTE, 'Brainix/Flair/IM-0001-0001.dcm')
+
+        # original PatientID is 5Yp0E, only change the casing of one letter
+        originPatient = DoGet(_REMOTE, '/patients')[0]
+        newPatient = DoPost(_REMOTE, '/patients/%s/modify' % originPatient,
+                            json.dumps({
+                                "Replace": { "PatientID": "5YP0E"},
+                                "Keep": ["StudyInstanceUID", "SeriesInstanceUID", "SOPInstanceUID"],
+                                "Force": True, 
+                                "KeepSource": True
+                            }), 'application/json')['ID']
+
+        self.assertNotEqual(originPatient, newPatient)
+        self.assertEqual(2, len(DoGet(_REMOTE, '/studies')))
+
+
+    def change_patient_id_case_in_study_keep_source_false(self):
+        UploadInstance(_REMOTE, 'Brainix/Flair/IM-0001-0001.dcm')
+
+        # original PatientID is 5Yp0E, only change the casing of one letter
+        originStudy = DoGet(_REMOTE, '/studies')[0]
+        newStudy = DoPost(_REMOTE, '/studies/%s/modify' % originStudy,
+                            json.dumps({
+                                "Replace": { "PatientID": "5YP0E"},
+                                "Keep": ["StudyInstanceUID", "SeriesInstanceUID", "SOPInstanceUID"],
+                                "Force": True, 
+                                "KeepSource": False
+                            }), 'application/json')['ID']
+
+        self.assertNotEqual(originStudy, newStudy)
+        allStudies = DoGet(_REMOTE, '/studies?expand')
+        self.assertEqual(1, len(allStudies))
+        self.assertEqual('5YP0E', allStudies[0]['PatientMainDicomTags']['PatientID'])
+
+
+    def change_patient_id_case_in_study_keep_source_true(self):
+        UploadInstance(_REMOTE, 'Brainix/Flair/IM-0001-0001.dcm')
+
+        # original PatientID is 5Yp0E, only change the casing of one letter
+        originStudy = DoGet(_REMOTE, '/studies')[0]
+        newStudy = DoPost(_REMOTE, '/studies/%s/modify' % originStudy,
+                            json.dumps({
+                                "Replace": { "PatientID": "5YP0E"},
+                                "Keep": ["StudyInstanceUID", "SeriesInstanceUID", "SOPInstanceUID"],
+                                "Force": True, 
+                                "KeepSource": True
+                            }), 'application/json')['ID']
+
+        self.assertNotEqual(originStudy, newStudy)
+        self.assertEqual(2, len(DoGet(_REMOTE, '/studies')))
+
+
     def test_anonymize_series(self):
         # Upload 4 images from the same series
         for i in range(4):
@@ -1315,11 +1423,16 @@
         series = DoGet(_REMOTE, '/series')[0]
 
         m = DoGet(_REMOTE, '/patients/%s/metadata' % p)
-        if IsOrthancVersionAbove(_REMOTE, 1, 11, 0):
+        if IsOrthancVersionAbove(_REMOTE, 1, 12, 9) and HasPostgresIndexPlugin(_REMOTE):
+            self.assertEqual(3, len(m))
+            self.assertTrue('MainDicomTagsSignature' in m)
+            self.assertTrue('PatientRecyclingOrder' in m)
+        elif IsOrthancVersionAbove(_REMOTE, 1, 11, 0):
             self.assertEqual(2, len(m))
             self.assertTrue('MainDicomTagsSignature' in m)
         else:
             self.assertEqual(1, len(m))
+
         self.assertTrue('LastUpdate' in m)
 
         # The lines below failed on Orthanc <= 1.8.2
@@ -1391,7 +1504,11 @@
             self.assertFalse('etag' in headers)
             
         m = DoGet(_REMOTE, '/patients/%s/metadata' % p)
-        if IsOrthancVersionAbove(_REMOTE, 1, 11, 0):
+        if IsOrthancVersionAbove(_REMOTE, 1, 12, 9) and HasPostgresIndexPlugin(_REMOTE):
+            self.assertEqual(4, len(m))
+            self.assertTrue('MainDicomTagsSignature' in m)
+            self.assertTrue('PatientRecyclingOrder' in m)
+        elif IsOrthancVersionAbove(_REMOTE, 1, 11, 0):
             self.assertEqual(3, len(m))
             self.assertTrue('MainDicomTagsSignature' in m)
         else:
@@ -1419,7 +1536,11 @@
             DoDelete(_REMOTE, '/patients/%s/metadata/5555' % p)
             
         m = DoGet(_REMOTE, '/patients/%s/metadata' % p)
-        if IsOrthancVersionAbove(_REMOTE, 1, 11, 0):
+        if IsOrthancVersionAbove(_REMOTE, 1, 12, 9) and HasPostgresIndexPlugin(_REMOTE):
+            self.assertEqual(3, len(m))
+            self.assertTrue('MainDicomTagsSignature' in m)
+            self.assertTrue('PatientRecyclingOrder' in m)
+        elif IsOrthancVersionAbove(_REMOTE, 1, 11, 0):
             self.assertEqual(2, len(m))
             self.assertTrue('MainDicomTagsSignature' in m)
         else:
@@ -1616,56 +1737,56 @@
         UploadInstance(_REMOTE, 'ColorTestImageJ.dcm')
 
         i = CallFindScu([ '-k', '0008,0052=PATIENT', '-k', '0010,0010' ])
-        patientNames = re.findall('\(0010,0010\).*?\[(.*?)\]', i)
+        patientNames = re.findall(r'\(0010,0010\).*?\[(.*?)\]', i)
         self.assertEqual(2, len(patientNames))
         self.assertTrue('Test Patient BG ' in patientNames)
         self.assertTrue('Anonymized' in patientNames)
 
         i = CallFindScu([ '-k', '0008,0052=PATIENT', '-k', '0010,0010=*' ])
-        patientNames = re.findall('\(0010,0010\).*?\[(.*?)\]', i)
+        patientNames = re.findall(r'\(0010,0010\).*?\[(.*?)\]', i)
         self.assertEqual(2, len(patientNames))
         self.assertTrue('Test Patient BG ' in patientNames)
         self.assertTrue('Anonymized' in patientNames)
 
         i = CallFindScu([ '-k', '0008,0052=SERIES', '-k', '0008,0021' ])
-        series = re.findall('\(0008,0021\).*?\[\s*(.*?)\s*\]', i)
+        series = re.findall(r'\(0008,0021\).*?\[\s*(.*?)\s*\]', i)
         self.assertEqual(2, len(series))
         self.assertTrue('20070208' in series)
         self.assertTrue('19980312' in series)
         
         i = CallFindScu([ '-k', '0008,0052=SERIES', '-k', '0008,0021', '-k', 'Modality=MR\\XA' ])
-        series = re.findall('\(0008,0021\).*?\[\s*(.*?)\s*\]', i)
+        series = re.findall(r'\(0008,0021\).*?\[\s*(.*?)\s*\]', i)
         self.assertEqual(1, len(series))
         self.assertTrue('19980312' in series)
         
         i = CallFindScu([ '-k', '0008,0052=SERIES', '-k', 'PatientName=Anonymized' ])
-        series = re.findall('\(0010,0010\).*?\[\s*(.*?)\s*\]', i)
+        series = re.findall(r'\(0010,0010\).*?\[\s*(.*?)\s*\]', i)
         self.assertEqual(1, len(series))
 
         # Test the "CaseSentitivePN" flag (false by default)
         i = CallFindScu([ '-k', '0008,0052=SERIES', '-k', 'PatientName=anonymized' ])
-        series = re.findall('\(0010,0010\).*?\[\s*(.*?)\s*\]', i)
+        series = re.findall(r'\(0010,0010\).*?\[\s*(.*?)\s*\]', i)
         self.assertEqual(1, len(series))
 
         # Test range search (buggy if Orthanc <= 0.9.6)
         i = CallFindScu([ '-k', '0008,0052=STUDY', '-k', 'StudyDate=19980312-' ])
-        studies = re.findall('\(0008,0020\).*?\[\s*(.*?)\s*\]', i)
+        studies = re.findall(r'\(0008,0020\).*?\[\s*(.*?)\s*\]', i)
         self.assertEqual(2, len(studies))
         self.assertTrue('20070208' in studies)
         self.assertTrue('19980312' in studies)
         i = CallFindScu([ '-k', '0008,0052=STUDY', '-k', 'StudyDate=19980312-19980312' ])
-        studies = re.findall('\(0008,0020\).*?\[\s*(.*?)\s*\]', i)
+        studies = re.findall(r'\(0008,0020\).*?\[\s*(.*?)\s*\]', i)
         self.assertEqual(1, len(studies))
         self.assertTrue('19980312' in studies)
         i = CallFindScu([ '-k', '0008,0052=STUDY', '-k', 'StudyDate=-19980312' ])
-        studies = re.findall('\(0008,0020\).*?\[\s*(.*?)\s*\]', i)
+        studies = re.findall(r'\(0008,0020\).*?\[\s*(.*?)\s*\]', i)
         self.assertEqual(1, len(studies))
         self.assertTrue('19980312' in studies)
 
         # Test that "Retrieve AE Title (0008,0054)" is present, which
         # was *not* the case in Orthanc <= 1.7.2
         i = CallFindScu([ '-k', '0008,0052=INSTANCE' ])
-        instances = re.findall('\(0008,0054\).*?\[\s*(.*?)\s*\]', i)
+        instances = re.findall(r'\(0008,0054\).*?\[\s*(.*?)\s*\]', i)
         self.assertEqual(2, len(instances))
         self.assertEqual('ORTHANC', instances[0].strip())
         self.assertEqual('ORTHANC', instances[1].strip())
@@ -1680,14 +1801,14 @@
 
         # Test returning sequence values (only since Orthanc 0.9.5)
         i = CallFindScu([ '-k', '0008,0052=SERIES', '-k', '0008,2112' ])  # "ColorTestImageJ" has this sequence tag
-        sequences = re.findall('\(0008,2112\)', i)
+        sequences = re.findall(r'\(0008,2112\)', i)
         self.assertEqual(1, len(sequences))
 
         # Test returning a non-main DICOM tag,
         # "SecondaryCaptureDeviceID" (0018,1010), whose value is
         # "MEDPC" in "ColorTestImageJ.dcm"
         i = CallFindScu([ '-k', '0008,0052=SERIES', '-k', '0018,1010' ])
-        tags = re.findall('\(0018,1010\).*MEDPC', i)
+        tags = re.findall(r'\(0018,1010\).*MEDPC', i)
         self.assertEqual(1, len(tags))
 
         
@@ -1699,7 +1820,7 @@
         UploadInstance(_REMOTE, 'ColorTestImageJ.dcm')
 
         i = CallFindScu([ '-k', '0008,0052=SERIES', '-k', '0018,1010=MEDPC' ])
-        sequences = re.findall('\(0018,1010\)', i)
+        sequences = re.findall(r'\(0018,1010\)', i)
         self.assertEqual(1, len(sequences))
 
         
@@ -2715,6 +2836,32 @@
         self.assertEqual(0, len(a))
 
 
+    def test_lookup_find_case_sensitivity(self):
+        UploadInstance(_REMOTE, 'DummyCT.dcm')
+
+        a = DoPost(_REMOTE, '/tools/lookup', 'ozp00SjY2xG')
+        self.assertEqual(1, len(a))
+
+        # the lookup is actually case insensitive (because it looks only in the DicomIdentifiers table that contains only uppercase values)
+        a = DoPost(_REMOTE, '/tools/lookup', 'OZP00SjY2xG')
+        self.assertEqual(1, len(a))
+
+        a = DoPost(_REMOTE, '/tools/find', { 'Level' : 'Patient',
+                                             'CaseSensitive' : True,
+                                             'Query' : { 'PatientID' : 'ozp00SjY2xG' }})
+        self.assertEqual(1, len(a))
+
+        a = DoPost(_REMOTE, '/tools/find', { 'Level' : 'Patient',
+                                             'CaseSensitive' : True,
+                                             'Query' : { 'PatientID' : 'OZP00SjY2xG' }})
+        self.assertEqual(0, len(a))
+
+        a = DoPost(_REMOTE, '/tools/find', { 'Level' : 'Patient',
+                                             'CaseSensitive' : False,
+                                             'Query' : { 'PatientID' : 'OZP00SjY2xG' }})
+        self.assertEqual(1, len(a))
+
+
     def test_autorouting(self):
         knee1 = 'Knee/T1/IM-0001-0001.dcm'
         knee2 = 'Knee/T2/IM-0001-0002.dcm'
@@ -3759,45 +3906,45 @@
         UploadInstance(_REMOTE, 'Comunix/Pet/IM-0001-0002.dcm')
 
         i = CallFindScu([ '-k', '0008,0052=PATIENT', '-k', 'NumberOfPatientRelatedStudies' ])
-        s = re.findall('\(0020,1200\).*?\[(.*?)\]', i)
+        s = re.findall(r'\(0020,1200\).*?\[(.*?)\]', i)
         self.assertEqual(1, len(s))
         self.assertTrue('1 ' in s)
 
         i = CallFindScu([ '-k', '0008,0052=PATIENT', '-k', 'NumberOfPatientRelatedSeries' ])
-        s = re.findall('\(0020,1202\).*?\[(.*?)\]', i)
+        s = re.findall(r'\(0020,1202\).*?\[(.*?)\]', i)
         self.assertEqual(1, len(s))
         self.assertTrue('2 ' in s)
 
         i = CallFindScu([ '-k', '0008,0052=PATIENT', '-k', 'NumberOfPatientRelatedInstances' ])
-        s = re.findall('\(0020,1204\).*?\[(.*?)\]', i)
+        s = re.findall(r'\(0020,1204\).*?\[(.*?)\]', i)
         self.assertEqual(1, len(s))
         self.assertTrue('3 ' in s)
 
         i = CallFindScu([ '-k', '0008,0052=STUDY', '-k', 'NumberOfStudyRelatedSeries' ])
-        s = re.findall('\(0020,1206\).*?\[(.*?)\]', i)
+        s = re.findall(r'\(0020,1206\).*?\[(.*?)\]', i)
         self.assertEqual(1, len(s))
         self.assertTrue('2 ' in s)
 
         i = CallFindScu([ '-k', '0008,0052=STUDY', '-k', 'NumberOfStudyRelatedInstances' ])
-        s = re.findall('\(0020,1208\).*?\[(.*?)\]', i)
+        s = re.findall(r'\(0020,1208\).*?\[(.*?)\]', i)
         self.assertEqual(1, len(s))
         self.assertTrue('3 ' in s)
 
         i = CallFindScu([ '-k', '0008,0052=SERIES', '-k', 'NumberOfSeriesRelatedInstances' ])
-        s = re.findall('\(0020,1209\).*?\[(.*?)\]', i)
+        s = re.findall(r'\(0020,1209\).*?\[(.*?)\]', i)
         self.assertEqual(2, len(s))
         self.assertTrue('1 ' in s)
         self.assertTrue('2 ' in s)
 
         i = CallFindScu([ '-k', '0008,0052=STUDY', '-k', 'ModalitiesInStudy' ])
-        s = re.findall('\(0008,0061\).*?\[(.*?)\]', i)
+        s = re.findall(r'\(0008,0061\).*?\[(.*?)\]', i)
         self.assertEqual(1, len(s))
         t = map(lambda x: x.strip(), s[0].split('\\'))
         self.assertTrue('PT' in t)
         self.assertTrue('CT' in t)
 
         i = CallFindScu([ '-k', '0008,0052=STUDY', '-k', 'SOPClassesInStudy' ])
-        s = re.findall('\(0008,0062\).*?\[(.*?)\]', i)
+        s = re.findall(r'\(0008,0062\).*?\[(.*?)\]', i)
         self.assertEqual(1, len(s))
         t = map(lambda x: x.strip('\x00'), s[0].split('\\'))
         self.assertTrue('1.2.840.10008.5.1.4.1.1.2' in t)
@@ -4198,11 +4345,11 @@
                               '-k', 'SpecificCharacterSet',  
                               '-k', 'PatientName' ])
 
-            characterSet = re.findall('\(0008,0005\).*?\[(.*?)\]', i)
+            characterSet = re.findall(r'\(0008,0005\).*?\[(.*?)\]', i)
             self.assertEqual(1, len(characterSet))
             self.assertEqual(ENCODINGS[name][0], characterSet[0].strip())
 
-            patientName = re.findall('\(0010,0010\).*?\[(.*?)\]', i)
+            patientName = re.findall(r'\(0010,0010\).*?\[(.*?)\]', i)
             self.assertEqual(1, len(patientName))
 
             expected = TEST.encode(ENCODINGS[name][1], 'ignore')
@@ -4224,15 +4371,15 @@
                                   '-k', 'PatientName' ])
                 i = i.decode(ENCODINGS[master][1])
 
-                characterSet = re.findall('\(0008,0005\).*?\[(.*?)\]', i)
+                characterSet = re.findall(r'\(0008,0005\).*?\[(.*?)\]', i)
                 self.assertEqual(1, len(characterSet))
                 self.assertEqual(ENCODINGS[master][0], characterSet[0].strip())
 
-                patientId = re.findall('\(0010,0020\).*?\[(.*?)\]', i)
+                patientId = re.findall(r'\(0010,0020\).*?\[(.*?)\]', i)
                 self.assertEqual(1, len(patientId))
                 self.assertEqual(ENCODINGS[name][1], patientId[0].strip())
 
-                patientName = re.findall('\(0010,0010\).*?\[(.*?)\]', i)
+                patientName = re.findall(r'\(0010,0010\).*?\[(.*?)\]', i)
                 self.assertEqual(1, len(patientName))
 
                 tmp = ENCODINGS[name][1]
@@ -4659,7 +4806,7 @@
             a = CallFindScu([ '-k', '0008,0005=ISO_IR 192',  # Use UTF-8
                               '-k', '0008,0052=PATIENT',
                               '-k', 'PatientName=%s' % name ])
-            patientNames = re.findall('\(0010,0010\).*?\[(.*?)\]', a)
+            patientNames = re.findall(r'\(0010,0010\).*?\[(.*?)\]', a)
             self.assertEqual(expected, len(patientNames))
 
             a = DoPost(_REMOTE, '/tools/find', { 'Level' : 'Patient',
@@ -5539,13 +5686,13 @@
     def test_invalid_findscp(self):
         UploadInstance(_REMOTE, 'DummyCT.dcm')
         findscu = CallFindScu([ '-S', '-k', '8,52=IMAGE', '-k', '8,16', '-k', '2,2' ])
-        self.assertEqual(0, len(re.findall('\(0002,0002\)', findscu)))
+        self.assertEqual(0, len(re.findall(r'\(0002,0002\)', findscu)))
 
 
     def test_bitbucket_issue_90(self):
         def CountDicomResults(sex):
             a = CallFindScu([ '-S', '-k', '8,52=STUDY', '-k', sex ])
-            return len(re.findall('\(0010,0040\)', a))
+            return len(re.findall(r'\(0010,0040\)', a))
 
         def CountRestResults(sex):
             a = DoPost(_REMOTE, '/tools/find',
@@ -5707,18 +5854,18 @@
         i = CallFindScu([ '-k', '0008,0052=IMAGES', '-k', 'PatientName', '-k', 'Rows', '-k', 'Columns' ])
 
         # We have 2 instances...
-        patientNames = re.findall('\(0010,0010\).*?\[(.*?)\]', i)
+        patientNames = re.findall(r'\(0010,0010\).*?\[(.*?)\]', i)
         self.assertEqual(2, len(patientNames))
         self.assertEqual('KNIX', patientNames[0])
         self.assertEqual('KNIX', patientNames[1])
 
-        columns = re.findall('\(0028,0011\) US ([0-9]+)', i)
+        columns = re.findall(r'\(0028,0011\) US ([0-9]+)', i)
         self.assertEqual(2, len(columns))
         self.assertEqual('512', columns[0])
         self.assertEqual('512', columns[1])
         
         # ...but only 1 value for the "Rows" tag
-        rows = re.findall('\(0028,0010\) US ([0-9]+)', i)
+        rows = re.findall(r'\(0028,0010\) US ([0-9]+)', i)
         self.assertEqual(1, len(rows))
         self.assertEqual('512', rows[0])
 
@@ -5798,7 +5945,7 @@
     def test_bitbucket_issue_136(self):
         UploadInstance(_REMOTE, 'Issue137.dcm')
         i = CallFindScu([ '-k', '0008,0052=STUDY', '-k', '0010,0010', '-k', '0028,0010', '-k', '0040,0275' ])
-        patientNames = re.findall('\(0010,0010\).*?\[(.*?)\]', i)
+        patientNames = re.findall(r'\(0010,0010\).*?\[(.*?)\]', i)
         self.assertEqual(1, len(patientNames))
         self.assertEqual('John Doe', patientNames[0])
 
@@ -6443,7 +6590,7 @@
         i = CallFindScu([ '-k', 'QueryRetrieveLevel=SERIES',
                           '-k', 'StudyInstanceUID=%s' % study,
                           '-k', 'SeriesInstanceUID=%s\\%s' % (series1, series2) ])
-        series = re.findall('\(0020,000e\).*?\[(.*?)\]', i)
+        series = re.findall(r'\(0020,000e\).*?\[(.*?)\]', i)
         self.assertEqual(2, len(series))
         self.assertTrue(series1 in series)
         self.assertTrue(series2 in series)
@@ -7490,14 +7637,14 @@
         study = '1.3.46.670589.7.5.8.80001255161.20000323.151537.1'
         
         i = CallFindScu([ '-k', '0008,0052=STUDY', '-k', 'StudyInstanceUID' ])
-        result = re.findall('\(0020,000d\).*?\[(.*?)\]', i)
+        result = re.findall(r'\(0020,000d\).*?\[(.*?)\]', i)
         self.assertEqual(2, len(result))
 
         # The "StudyInstanceUID" is set as a list of 5 times the same
         # study, leading to a string of 249 characters
         i = CallFindScu([ '-k', '0008,0052=STUDY', '-k',
                           'StudyInstanceUID=%s\\%s\\%s\\%s\\%s' % (( study, ) * 5) ])
-        result = re.findall('\(0020,000d\).*?\[(.*?)\]', i)
+        result = re.findall(r'\(0020,000d\).*?\[(.*?)\]', i)
         self.assertEqual(1, len(result))
         
         # The "StudyInstanceUID" is set as a list of 6 times the same
@@ -7509,7 +7656,7 @@
         # studies (i.e. 2). This issue was fixed in Orthanc 1.7.3.
         i = CallFindScu([ '-k', '0008,0052=STUDY', '-k',
                           'StudyInstanceUID=%s\\%s\\%s\\%s\\%s\\%s' % (( study, ) * 6) ])
-        result = re.findall('\(0020,000d\).*?\[(.*?)\]', i)
+        result = re.findall(r'\(0020,000d\).*?\[(.*?)\]', i)
         self.assertEqual(1, len(result))
 
 
@@ -7641,7 +7788,7 @@
         UploadInstance(_REMOTE, 'Comunix/Pet/IM-0001-0001.dcm')
 
         i = CallFindScu([ '-k', '0008,0052=STUDY', '-k', '0020,000d=', '-k', '0008,0061=' ])
-        modalitiesInStudy = re.findall('\(0008,0061\).*?\[(.*?)\]', i)
+        modalitiesInStudy = re.findall(r'\(0008,0061\).*?\[(.*?)\]', i)
         self.assertEqual(1, len(modalitiesInStudy))
         self.assertEqual('CT\\PT ', modalitiesInStudy[0])
         
@@ -7659,7 +7806,7 @@
             self.assertEqual(expected, len(a))
 
             i = CallFindScu([ '-k', '0008,0052=STUDY', '-k', '0020,000d=', '-k', '0008,0061=%s' % i ])
-            studyInstanceUid = re.findall('\(0020,000d\).*?\[(.*?)\]', i)
+            studyInstanceUid = re.findall(r'\(0020,000d\).*?\[(.*?)\]', i)
             self.assertEqual(expected, len(studyInstanceUid))
         
 
@@ -9068,12 +9215,17 @@
         self.assertEqual('Patient', a[0]['Type'])
         self.assertEqual('KNEE', a[0]['MainDicomTags']['PatientName'])
         self.assertTrue('Metadata' in a[0])
-        if IsOrthancVersionAbove(_REMOTE, 1, 11, 0):
+        if IsOrthancVersionAbove(_REMOTE, 1, 12, 9) and HasPostgresIndexPlugin(_REMOTE):
+            self.assertEqual(3, len(a[0]['Metadata']))
+            self.assertTrue('MainDicomTagsSignature' in a[0]['Metadata'])
+            self.assertTrue('PatientRecyclingOrder' in a[0]['Metadata'])
+        elif IsOrthancVersionAbove(_REMOTE, 1, 11, 0):
             self.assertEqual(2, len(a[0]['Metadata']))
             self.assertTrue('MainDicomTagsSignature' in a[0]['Metadata'])
         else:
             self.assertEqual(1, len(a[0]['Metadata']))
-            self.assertTrue('LastUpdate' in a[0]['Metadata'])
+
+        self.assertTrue('LastUpdate' in a[0]['Metadata'])
 
         for level in [ 'Instance', 'Series', 'Study', 'Patient' ]:
             a = DoPost(_REMOTE, '/tools/bulk-content', { 'Resources' : [ knee1, brainix ],
@@ -10701,7 +10853,7 @@
 
         i = CallFindScu([ '-k', '0008,0052=PATIENT', '-k', '0008,0000=22' ])  # GE like C-Find that includes group-length
         # print(i)
-        s = re.findall('\(0008,0000\).*?\[(.*?)\]', i)
+        s = re.findall(r'\(0008,0000\).*?\[(.*?)\]', i)
         self.assertEqual(0, len(s))
 
 
@@ -10846,13 +10998,21 @@
 
         a = DoGet(_REMOTE, '/patients?expand')
         self.assertEqual(1, len(a))
-        self.assertEqual(7, len(a[0]))
+        if IsOrthancVersionAbove(_REMOTE, 1, 12, 8):
+            self.assertEqual(8, len(a[0]))
+            self.assertTrue('IsProtected' in a[0])
+        else:
+            self.assertEqual(7, len(a[0]))
         CheckPatientContent(a[0])
         self.assertFalse('RequestedTags' in a[0])
 
         a = DoGet(_REMOTE, '/patients?expand&requestedTags=%s' % requestedTags)
         self.assertEqual(1, len(a))
-        self.assertEqual(8, len(a[0]))
+        if IsOrthancVersionAbove(_REMOTE, 1, 12, 8):
+            self.assertEqual(9, len(a[0]))
+            self.assertTrue('IsProtected' in a[0])
+        else:
+            self.assertEqual(8, len(a[0]))
         CheckPatientContent(a[0])
         CheckRequestedTags(a[0])
 
@@ -10893,12 +11053,20 @@
         CheckRequestedTags(a[0])
 
         a = DoGet(_REMOTE, '/patients/%s' % u['ParentPatient'])
-        self.assertEqual(7, len(a))
+        if IsOrthancVersionAbove(_REMOTE, 1, 12, 8):
+            self.assertEqual(8, len(a))
+            self.assertTrue('IsProtected' in a)
+        else:
+            self.assertEqual(7, len(a))
         CheckPatientContent(a)
         self.assertFalse('RequestedTags' in a)
 
         a = DoGet(_REMOTE, '/patients/%s?requestedTags=%s' % (u['ParentPatient'], requestedTags))
-        self.assertEqual(8, len(a))
+        if IsOrthancVersionAbove(_REMOTE, 1, 12, 8):
+            self.assertEqual(9, len(a))
+            self.assertTrue('IsProtected' in a)
+        else:
+            self.assertEqual(8, len(a))
         CheckPatientContent(a)
         CheckRequestedTags(a)
 
@@ -11045,20 +11213,20 @@
 
         # without requesting PatientComments, we get the computed tags
         i = CallFindScu([ '-k', 'PatientID=WITH_COMMENTS',  '-k', 'QueryRetrieveLevel=Study', '-k', 'ModalitiesInStudy', '-k', 'NumberOfStudyRelatedSeries', '-k', 'NumberOfStudyRelatedInstances' ])
-        modalitiesInStudy = re.findall('\(0008,0061\).*?\[(.*?)\]', i)
+        modalitiesInStudy = re.findall(r'\(0008,0061\).*?\[(.*?)\]', i)
         self.assertEqual(1, len(modalitiesInStudy))
         self.assertEqual('CT', modalitiesInStudy[0])
 
         if IsOrthancVersionAbove(_REMOTE, 1, 12, 5):
             # when requesting PatientComments, with 1.12.4, we did not get the computed tags
             i = CallFindScu([ '-k', 'PatientID=WITH_COMMENTS',  '-k', 'QueryRetrieveLevel=Study', '-k', 'ModalitiesInStudy', '-k', 'NumberOfStudyRelatedSeries', '-k', 'NumberOfStudyRelatedInstances', '-k', 'PatientComments' ])
-            modalitiesInStudy = re.findall('\(0008,0061\).*?\[(.*?)\]', i)
+            modalitiesInStudy = re.findall(r'\(0008,0061\).*?\[(.*?)\]', i)
             self.assertEqual(1, len(modalitiesInStudy))
             self.assertEqual('CT', modalitiesInStudy[0])
-            numberOfStudyRelatedSeries = re.findall('\(0020,1206\).*?\[(.*?)\]', i)
+            numberOfStudyRelatedSeries = re.findall(r'\(0020,1206\).*?\[(.*?)\]', i)
             self.assertEqual(1, len(numberOfStudyRelatedSeries))
             self.assertEqual(1, int(numberOfStudyRelatedSeries[0]))
-            numberOfStudyRelatedInstances = re.findall('\(0020,1208\).*?\[(.*?)\]', i)
+            numberOfStudyRelatedInstances = re.findall(r'\(0020,1208\).*?\[(.*?)\]', i)
             self.assertEqual(1, len(numberOfStudyRelatedInstances))
             self.assertEqual(1, int(numberOfStudyRelatedInstances[0]))
 
@@ -11493,6 +11661,7 @@
             self.assertIn('IsStable', a[0])
             self.assertNotIn('Attachments', a[0])
             self.assertNotIn('Metadata', a[0])
+            self.assertNotIn('IsProtected', a[0])
 
 
             a = DoPost(_REMOTE, '/tools/find', {    'Level' : 'Series',
@@ -11516,6 +11685,7 @@
             self.assertNotIn('IsStable', a[0])
             self.assertNotIn('Attachments', a[0])
             self.assertNotIn('Metadata', a[0])
+            self.assertNotIn('IsProtected', a[0])
 
 
             a = DoPost(_REMOTE, '/tools/find', {    'Level' : 'Series',
@@ -11538,6 +11708,7 @@
             self.assertIn('Status', a[0])
             self.assertIn('IsStable', a[0])
             self.assertNotIn('Attachments', a[0])
+            self.assertNotIn('IsProtected', a[0])
 
 
             a = DoPost(_REMOTE, '/tools/find', {    'Level' : 'Instances',
@@ -11559,6 +11730,7 @@
             self.assertIn('Labels', a[0])
             self.assertNotIn('Attachments', a[0])
             self.assertNotIn('Metadata', a[0])
+            self.assertNotIn('IsProtected', a[0])
 
             a = DoPost(_REMOTE, '/tools/find', {    'Level' : 'Instances',
                                                     'Query' : { 
@@ -11590,6 +11762,18 @@
             self.assertIn('RequestedTags', a[0]) # the RequestedTags are always in the response as soon as you have requested them
             self.assertIn('SOPClassUID', a[0]['RequestedTags'])
 
+        if IsOrthancVersionAbove(_REMOTE, 1, 12, 8):
+            a = DoPost(_REMOTE, '/tools/find', {    'Level' : 'Patients',
+                                                    'Query' : { 
+                                                    },
+                                                    'ResponseContent' : ['IsProtected']
+                                                    })
+
+            self.assertIn('ID', a[0])            # the ID is always in the response
+            self.assertIn('Type', a[0])          # the Type is always in the response
+            self.assertIn('IsProtected', a[0])
+
+
 
     def test_extended_find_full(self):
         if IsOrthancVersionAbove(_REMOTE, 1, 12, 5) and HasExtendedFind(_REMOTE):
@@ -12058,3 +12242,57 @@
         self.assertEqual('200', resp['status'])
         self.assertEqual(len(embedded), len(grayscale))
         self.assertEqual(embedded, grayscale)
+
+
+    def test_encodings_iso_ir13(self):
+        if IsOrthancVersionAbove(_REMOTE, 1, 12, 9):
+            # from https://discourse.orthanc-server.org/t/issue-with-special-characters-when-scans-where-uploaded-with-specificcharacterset-dicom-tag-value-as-iso-ir-13/5962
+            instanceId = UploadInstance(_REMOTE, 'Encodings/ISO_IR13.dcm')['ID']
+            tags = DoGet(_REMOTE, '/instances/%s/tags?simplify' % instanceId)
+            self.assertEqual(r'ORIGINAL\PRIMARY\M\NORM\DIS2D\FM\FIL', tags['ImageType'])
+
+
+    def test_jobs_user_data(self):
+        if IsOrthancVersionAbove(_REMOTE, 1, 12, 9):
+            u = UploadInstance(_REMOTE, 'DummyCT.dcm')
+
+            job = DoPost(_REMOTE, '/studies/%s/modify' % u['ParentStudy'],
+                                json.dumps({
+                                    "Replace": {"PatientName": "toto"},
+                                    "UserData": { "user-data": "titi"
+                                                },
+                                    "Asynchronous": True
+                                }))
+            jobDetails = DoGet(_REMOTE, '/jobs/%s' % job['ID'])
+            self.assertEqual('titi', jobDetails['UserData']['user-data'])
+
+            job = DoPost(_REMOTE, '/tools/create-archive',
+                                json.dumps({
+                                    "Resources": [u['ParentStudy']],
+                                    "UserData": "simple-string",
+                                    "Asynchronous": True
+                                }))
+            jobDetails = DoGet(_REMOTE, '/jobs/%s' % job['ID'])
+            self.assertEqual('simple-string', jobDetails['UserData'])
+
+            job = DoPost(_REMOTE, '/modalities/orthanctest/move', { 
+                'Level' : 'Study',
+                'Asynchronous': True,
+                "UserData": "simple-string",
+                'Resources' : [
+                    { 
+                        'StudyInstanceUID' : '1.2.840.113619.2.176.2025.1499492.7391.1171285944.390'
+                    }
+                ]})
+
+            jobDetails = DoGet(_REMOTE, '/jobs/%s' % job['ID'])
+            self.assertEqual('simple-string', jobDetails['UserData'])
+
+            job = DoPost(_REMOTE, '/modalities/orthanctest/store', { 
+                'Level' : 'Study',
+                'Asynchronous': True,
+                "UserData": "simple-string",
+                'Resources' : [u['ParentStudy']]})
+
+            jobDetails = DoGet(_REMOTE, '/jobs/%s' % job['ID'])
+            self.assertEqual('simple-string', jobDetails['UserData'])
--- a/Tests/Toolbox.py	Thu Apr 10 16:33:10 2025 +0200
+++ b/Tests/Toolbox.py	Tue Nov 04 15:52:51 2025 +0100
@@ -479,6 +479,10 @@
     plugins = DoGet(orthanc, '/plugins')
     return ('gdcm' in plugins)
 
+def HasPostgresIndexPlugin(orthanc):
+    plugins = DoGet(orthanc, '/plugins')
+    return ('postgresql-index' in plugins)
+
 
 def _GetMaxImageDifference(im1, im2):
     h = ImageChops.difference(im1, im2).histogram()