Blender V4.3
MEM_CacheLimiter.h
Go to the documentation of this file.
1/* SPDX-FileCopyrightText: 2006-2022 Blender Authors
2 *
3 * SPDX-License-Identifier: GPL-2.0-or-later */
4
9#ifndef __MEM_CACHELIMITER_H__
10#define __MEM_CACHELIMITER_H__
11
44#include "MEM_Allocator.h"
45#include <list>
46#include <queue>
47#include <vector>
48
49template<class T> class MEM_CacheLimiter;
50
51#ifndef __MEM_CACHELIMITERC_API_H__
52extern "C" {
53void MEM_CacheLimiter_set_maximum(size_t m);
55void MEM_CacheLimiter_set_disabled(bool disabled);
57};
58#endif
59
60template<class T> class MEM_CacheLimiterHandle {
61 public:
62 explicit MEM_CacheLimiterHandle(T *data_, MEM_CacheLimiter<T> *parent_)
63 : data(data_), refcount(0), parent(parent_)
64 {
65 }
66
67 void ref()
68 {
69 refcount++;
70 }
71
72 void unref()
73 {
74 refcount--;
75 }
76
77 T *get()
78 {
79 return data;
80 }
81
82 const T *get() const
83 {
84 return data;
85 }
86
87 int get_refcount() const
88 {
89 return refcount;
90 }
91
92 bool can_destroy() const
93 {
94 return !data || !refcount;
95 }
96
98 {
99 if (can_destroy()) {
100 delete data;
101 data = NULL;
102 unmanage();
103 return true;
104 }
105 return false;
106 }
107
108 void unmanage()
109 {
110 parent->unmanage(this);
111 }
112
113 void touch()
114 {
115 parent->touch(this);
116 }
117
118 private:
119 friend class MEM_CacheLimiter<T>;
120
121 T *data;
122 int refcount;
123 int pos;
124 MEM_CacheLimiter<T> *parent;
125};
126
127template<class T> class MEM_CacheLimiter {
128 public:
129 typedef size_t (*MEM_CacheLimiter_DataSize_Func)(void *data);
130 typedef int (*MEM_CacheLimiter_ItemPriority_Func)(void *item, int default_priority);
131 typedef bool (*MEM_CacheLimiter_ItemDestroyable_Func)(void *item);
132
133 MEM_CacheLimiter(MEM_CacheLimiter_DataSize_Func data_size_func) : data_size_func(data_size_func)
134 {
135 }
136
138 {
139 int i;
140 for (i = 0; i < queue.size(); i++) {
141 delete queue[i];
142 }
143 }
144
146 {
147 queue.push_back(new MEM_CacheLimiterHandle<T>(elem, this));
148 queue.back()->pos = queue.size() - 1;
149 return queue.back();
150 }
151
153 {
154 int pos = handle->pos;
155 queue[pos] = queue.back();
156 queue[pos]->pos = pos;
157 queue.pop_back();
158 delete handle;
159 }
160
162 {
163 size_t size = 0;
164 if (data_size_func) {
165 int i;
166 for (i = 0; i < queue.size(); i++) {
167 size += data_size_func(queue[i]->get()->get_data());
168 }
169 }
170 else {
171 size = MEM_get_memory_in_use();
172 }
173 return size;
174 }
175
177 {
178 size_t max = MEM_CacheLimiter_get_maximum();
180 size_t mem_in_use, cur_size;
181
182 if (is_disabled) {
183 return;
184 }
185
186 if (max == 0) {
187 return;
188 }
189
191
192 if (mem_in_use <= max) {
193 return;
194 }
195
196 while (!queue.empty() && mem_in_use > max) {
197 MEM_CacheElementPtr elem = get_least_priority_destroyable_element();
198
199 if (!elem) {
200 break;
201 }
202
203 if (data_size_func) {
204 cur_size = data_size_func(elem->get()->get_data());
205 }
206 else {
207 cur_size = mem_in_use;
208 }
209
210 if (elem->destroy_if_possible()) {
211 if (data_size_func) {
212 mem_in_use -= cur_size;
213 }
214 else {
215 mem_in_use -= cur_size - MEM_get_memory_in_use();
216 }
217 }
218 }
219 }
220
222 {
223 /* If we're using custom priority callback re-arranging the queue
224 * doesn't make much sense because we'll iterate it all to get
225 * least priority element anyway.
226 */
227 if (item_priority_func == NULL) {
228 queue[handle->pos] = queue.back();
229 queue[handle->pos]->pos = handle->pos;
230 queue.pop_back();
231 queue.push_back(handle);
232 handle->pos = queue.size() - 1;
233 }
234 }
235
237 {
238 this->item_priority_func = item_priority_func;
239 }
240
242 {
243 this->item_destroyable_func = item_destroyable_func;
244 }
245
246 private:
247 typedef MEM_CacheLimiterHandle<T> *MEM_CacheElementPtr;
248 typedef std::vector<MEM_CacheElementPtr, MEM_Allocator<MEM_CacheElementPtr>> MEM_CacheQueue;
249 typedef typename MEM_CacheQueue::iterator iterator;
250
251 /* Check whether element can be destroyed when enforcing cache limits */
252 bool can_destroy_element(MEM_CacheElementPtr &elem)
253 {
254 if (!elem->can_destroy()) {
255 /* Element is referenced */
256 return false;
257 }
258 if (item_destroyable_func) {
259 if (!item_destroyable_func(elem->get()->get_data())) {
260 return false;
261 }
262 }
263 return true;
264 }
265
266 MEM_CacheElementPtr get_least_priority_destroyable_element(void)
267 {
268 if (queue.empty()) {
269 return NULL;
270 }
271
272 MEM_CacheElementPtr best_match_elem = NULL;
273
274 if (!item_priority_func) {
275 for (iterator it = queue.begin(); it != queue.end(); it++) {
276 MEM_CacheElementPtr elem = *it;
277 if (!can_destroy_element(elem)) {
278 continue;
279 }
280 best_match_elem = elem;
281 break;
282 }
283 }
284 else {
285 int best_match_priority = 0;
286 int i;
287
288 for (i = 0; i < queue.size(); i++) {
289 MEM_CacheElementPtr elem = queue[i];
290
291 if (!can_destroy_element(elem)) {
292 continue;
293 }
294
295 /* By default 0 means highest priority element. */
296 /* Casting a size type to int is questionable,
297 * but unlikely to cause problems. */
298 int priority = -((int)(queue.size()) - i - 1);
299 priority = item_priority_func(elem->get()->get_data(), priority);
300
301 if (priority < best_match_priority || best_match_elem == NULL) {
302 best_match_priority = priority;
303 best_match_elem = elem;
304 }
305 }
306 }
307
308 return best_match_elem;
309 }
310
311 MEM_CacheQueue queue;
312 MEM_CacheLimiter_DataSize_Func data_size_func;
313 MEM_CacheLimiter_ItemPriority_Func item_priority_func;
314 MEM_CacheLimiter_ItemDestroyable_Func item_destroyable_func;
315};
316
317#endif // __MEM_CACHELIMITER_H__
static bool is_disabled
void MEM_CacheLimiter_set_maximum(size_t m)
void MEM_CacheLimiter_set_disabled(bool disabled)
size_t MEM_CacheLimiter_get_maximum()
bool MEM_CacheLimiter_is_disabled(void)
static DBVT_INLINE btScalar size(const btDbvtVolume &a)
Definition btDbvt.cpp:52
MEM_CacheLimiterHandle(T *data_, MEM_CacheLimiter< T > *parent_)
const T * get() const
void unmanage(MEM_CacheLimiterHandle< T > *handle)
int(* MEM_CacheLimiter_ItemPriority_Func)(void *item, int default_priority)
bool(* MEM_CacheLimiter_ItemDestroyable_Func)(void *item)
size_t(* MEM_CacheLimiter_DataSize_Func)(void *data)
MEM_CacheLimiter(MEM_CacheLimiter_DataSize_Func data_size_func)
void set_item_destroyable_func(MEM_CacheLimiter_ItemDestroyable_Func item_destroyable_func)
void touch(MEM_CacheLimiterHandle< T > *handle)
void set_item_priority_func(MEM_CacheLimiter_ItemPriority_Func item_priority_func)
MEM_CacheLimiterHandle< T > * insert(T *elem)
#define NULL
draw_view push_constant(Type::INT, "radiance_src") .push_constant(Type capture_info_buf storage_buf(1, Qualifier::READ, "ObjectBounds", "bounds_buf[]") .push_constant(Type draw_view int
size_t(* MEM_get_memory_in_use)(void)
Definition mallocn.cc:62
static size_t mem_in_use