Mercurial > hg > orthanc
comparison OrthancFramework/Sources/Cache/MemoryStringCache.cpp @ 5420:d37dff2c0028 am-new-cache
Optimized the MemoryStringCache to prevent loading the same file multiple times if multiple users request the same file at the same time
author | Alain Mazy <am@osimis.io> |
---|---|
date | Mon, 13 Nov 2023 17:01:59 +0100 |
parents | 0ea402b4d901 |
children | c65e036d649b |
comparison
equal
deleted
inserted
replaced
5419:ac4e9fb87615 | 5420:d37dff2c0028 |
---|---|
51 { | 51 { |
52 return content_.size(); | 52 return content_.size(); |
53 } | 53 } |
54 }; | 54 }; |
55 | 55 |
56 | |
57 MemoryStringCache::Accessor::Accessor(MemoryStringCache& cache) | |
58 : cache_(cache), | |
59 shouldAdd_(false) | |
60 { | |
61 } | |
62 | |
63 | |
64 MemoryStringCache::Accessor::~Accessor() | |
65 { | |
66 // if this accessor was the one in charge of loading and adding the data into the cache | |
67 // and it failed to add, remove the key from the list to make sure others accessor | |
68 // stop waiting for it. | |
69 if (shouldAdd_) | |
70 { | |
71 cache_.RemoveFromItemsBeingLoaded(keyToAdd_); | |
72 } | |
73 } | |
74 | |
75 | |
76 bool MemoryStringCache::Accessor::Fetch(std::string& value, const std::string& key) | |
77 { | |
78 // if multiple accessors are fetching at the same time: | |
79 // the first one will return false and will be in charge of adding to the cache. | |
80 // others will wait. | |
81 // if the first one fails to add, or, if the content was too large to fit in the cache, | |
82 // the next one will be in charge of adding ... | |
83 if (!cache_.Fetch(value, key)) | |
84 { | |
85 shouldAdd_ = true; | |
86 keyToAdd_ = key; | |
87 return false; | |
88 } | |
89 | |
90 shouldAdd_ = false; | |
91 keyToAdd_.clear(); | |
92 | |
93 return true; | |
94 } | |
95 | |
96 | |
97 void MemoryStringCache::Accessor::Add(const std::string& key, const std::string& value) | |
98 { | |
99 cache_.Add(key, value); | |
100 shouldAdd_ = false; | |
101 } | |
102 | |
103 | |
104 void MemoryStringCache::Accessor::Add(const std::string& key, const char* buffer, size_t size) | |
105 { | |
106 cache_.Add(key, buffer, size); | |
107 shouldAdd_ = false; | |
108 } | |
109 | |
110 | |
111 MemoryStringCache::MemoryStringCache() : | |
112 currentSize_(0), | |
113 maxSize_(100 * 1024 * 1024) // 100 MB | |
114 { | |
115 } | |
116 | |
117 | |
118 MemoryStringCache::~MemoryStringCache() | |
119 { | |
120 Recycle(0); | |
121 assert(content_.IsEmpty()); | |
122 } | |
123 | |
124 | |
56 size_t MemoryStringCache::GetMaximumSize() | 125 size_t MemoryStringCache::GetMaximumSize() |
57 { | 126 { |
58 return cache_.GetMaximumSize(); | 127 return maxSize_; |
59 } | 128 } |
129 | |
60 | 130 |
61 void MemoryStringCache::SetMaximumSize(size_t size) | 131 void MemoryStringCache::SetMaximumSize(size_t size) |
62 { | 132 { |
63 cache_.SetMaximumSize(size); | 133 if (size == 0) |
64 } | 134 { |
135 throw OrthancException(ErrorCode_ParameterOutOfRange); | |
136 } | |
137 | |
138 // // Make sure no accessor is currently open (as its data may be | |
139 // // removed if recycling is needed) | |
140 // WriterLock contentLock(contentMutex_); | |
141 | |
142 // Lock the global structure of the cache | |
143 boost::mutex::scoped_lock cacheLock(cacheMutex_); | |
144 | |
145 Recycle(size); | |
146 maxSize_ = size; | |
147 } | |
148 | |
65 | 149 |
66 void MemoryStringCache::Add(const std::string& key, | 150 void MemoryStringCache::Add(const std::string& key, |
67 const std::string& value) | 151 const std::string& value) |
68 { | 152 { |
69 cache_.Acquire(key, new StringValue(value)); | 153 std::unique_ptr<StringValue> item(new StringValue(value)); |
70 } | 154 size_t size = value.size(); |
155 | |
156 boost::mutex::scoped_lock cacheLock(cacheMutex_); | |
157 | |
158 if (size > maxSize_) | |
159 { | |
160 // This object is too large to be stored in the cache, discard it | |
161 } | |
162 else if (content_.Contains(key)) | |
163 { | |
164 // Value already stored, don't overwrite the old value but put it on top of the cache | |
165 content_.MakeMostRecent(key); | |
166 } | |
167 else | |
168 { | |
169 Recycle(maxSize_ - size); // Post-condition: currentSize_ <= maxSize_ - size | |
170 assert(currentSize_ + size <= maxSize_); | |
171 | |
172 content_.Add(key, item.release()); | |
173 currentSize_ += size; | |
174 } | |
175 | |
176 RemoveFromItemsBeingLoadedInternal(key); | |
177 } | |
178 | |
71 | 179 |
72 void MemoryStringCache::Add(const std::string& key, | 180 void MemoryStringCache::Add(const std::string& key, |
73 const void* buffer, | 181 const void* buffer, |
74 size_t size) | 182 size_t size) |
75 { | 183 { |
76 cache_.Acquire(key, new StringValue(reinterpret_cast<const char*>(buffer), size)); | 184 Add(key, std::string(reinterpret_cast<const char*>(buffer), size)); |
77 } | 185 } |
186 | |
78 | 187 |
79 void MemoryStringCache::Invalidate(const std::string &key) | 188 void MemoryStringCache::Invalidate(const std::string &key) |
80 { | 189 { |
81 cache_.Invalidate(key); | 190 boost::mutex::scoped_lock cacheLock(cacheMutex_); |
82 } | 191 |
83 | 192 StringValue* item = NULL; |
193 if (content_.Contains(key, item)) | |
194 { | |
195 assert(item != NULL); | |
196 const size_t size = item->GetMemoryUsage(); | |
197 delete item; | |
198 | |
199 content_.Invalidate(key); | |
200 | |
201 assert(currentSize_ >= size); | |
202 currentSize_ -= size; | |
203 } | |
204 | |
205 RemoveFromItemsBeingLoadedInternal(key); | |
206 } | |
207 | |
208 | |
84 bool MemoryStringCache::Fetch(std::string& value, | 209 bool MemoryStringCache::Fetch(std::string& value, |
85 const std::string& key) | 210 const std::string& key) |
86 { | 211 { |
87 MemoryObjectCache::Accessor reader(cache_, key, false /* multiple readers are allowed */); | 212 boost::mutex::scoped_lock cacheLock(cacheMutex_); |
88 | 213 |
89 if (reader.IsValid()) | 214 StringValue* item; |
90 { | 215 |
91 value = dynamic_cast<StringValue&>(reader.GetValue()).GetContent(); | 216 // if another client is currently loading the item, wait for it. |
217 while (itemsBeingLoaded_.find(key) != itemsBeingLoaded_.end() && !content_.Contains(key, item)) | |
218 { | |
219 cacheCond_.wait(cacheLock); | |
220 } | |
221 | |
222 if (content_.Contains(key, item)) | |
223 { | |
224 value = dynamic_cast<StringValue&>(*item).GetContent(); | |
225 content_.MakeMostRecent(key); | |
226 | |
92 return true; | 227 return true; |
93 } | 228 } |
94 else | 229 else |
95 { | 230 { |
231 // note that this accessor will be in charge of loading and adding. | |
232 itemsBeingLoaded_.insert(key); | |
96 return false; | 233 return false; |
97 } | 234 } |
98 } | 235 } |
236 | |
237 | |
238 void MemoryStringCache::RemoveFromItemsBeingLoaded(const std::string& key) | |
239 { | |
240 boost::mutex::scoped_lock cacheLock(cacheMutex_); | |
241 RemoveFromItemsBeingLoadedInternal(key); | |
242 } | |
243 | |
244 | |
245 void MemoryStringCache::RemoveFromItemsBeingLoadedInternal(const std::string& key) | |
246 { | |
247 // notify all waiting users, some of them potentially waiting for this item | |
248 itemsBeingLoaded_.erase(key); | |
249 cacheCond_.notify_all(); | |
250 } | |
251 | |
252 void MemoryStringCache::Recycle(size_t targetSize) | |
253 { | |
254 // WARNING: "cacheMutex_" must be locked | |
255 while (currentSize_ > targetSize) | |
256 { | |
257 assert(!content_.IsEmpty()); | |
258 | |
259 StringValue* item = NULL; | |
260 content_.RemoveOldest(item); | |
261 | |
262 assert(item != NULL); | |
263 const size_t size = item->GetMemoryUsage(); | |
264 delete item; | |
265 | |
266 assert(currentSize_ >= size); | |
267 currentSize_ -= size; | |
268 } | |
269 | |
270 // Post-condition: "currentSize_ <= targetSize" | |
271 | |
272 } | |
99 } | 273 } |