|
1 /* GLIB - Library of useful routines for C programming |
|
2 * Copyright (C) 1995-1997 Peter Mattis, Spencer Kimball and Josh MacDonald |
|
3 * |
|
4 * This library is free software; you can redistribute it and/or |
|
5 * modify it under the terms of the GNU Lesser General Public |
|
6 * License as published by the Free Software Foundation; either |
|
7 * version 2 of the License, or (at your option) any later version. |
|
8 * |
|
9 * This library is distributed in the hope that it will be useful, |
|
10 * but WITHOUT ANY WARRANTY; without even the implied warranty of |
|
11 * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU |
|
12 * Lesser General Public License for more details. |
|
13 * |
|
14 * You should have received a copy of the GNU Lesser General Public |
|
15 * License along with this library; if not, write to the |
|
16 * Free Software Foundation, Inc., 59 Temple Place - Suite 330, |
|
17 * Boston, MA 02111-1307, USA. |
|
18 */ |
|
19 |
|
20 /* |
|
21 * Modified by the GLib Team and others 1997-2000. See the AUTHORS |
|
22 * file for a list of people on the GLib Team. See the ChangeLog |
|
23 * files for a list of changes. These files are distributed with |
|
24 * GLib at ftp://ftp.gtk.org/pub/gtk/. |
|
25 */ |
|
26 |
|
27 /* |
|
28 * MT safe |
|
29 */ |
|
30 |
|
31 #include "config.h" |
|
32 |
|
33 #include <stdlib.h> |
|
34 #include <string.h> |
|
35 #include <signal.h> |
|
36 |
|
37 #include "glib.h" |
|
38 |
|
39 /* notes on macros: |
|
40 * if ENABLE_GC_FRIENDLY is defined, freed memory should be 0-wiped. |
|
41 */ |
|
42 |
|
43 #define MEM_PROFILE_TABLE_SIZE 4096 |
|
44 |
|
45 #define MEM_AREA_SIZE 4L |
|
46 |
|
47 static guint mem_chunk_recursion = 0; |
|
48 # define MEM_CHUNK_ROUTINE_COUNT() (mem_chunk_recursion) |
|
49 # define ENTER_MEM_CHUNK_ROUTINE() (mem_chunk_recursion = MEM_CHUNK_ROUTINE_COUNT () + 1) |
|
50 # define LEAVE_MEM_CHUNK_ROUTINE() (mem_chunk_recursion = MEM_CHUNK_ROUTINE_COUNT () - 1) |
|
51 |
|
52 /* --- old memchunk prototypes --- */ |
|
53 void old_mem_chunks_init (void); |
|
54 GMemChunk* old_mem_chunk_new (const gchar *name, |
|
55 gint atom_size, |
|
56 gulong area_size, |
|
57 gint type); |
|
58 void old_mem_chunk_destroy (GMemChunk *mem_chunk); |
|
59 gpointer old_mem_chunk_alloc (GMemChunk *mem_chunk); |
|
60 gpointer old_mem_chunk_alloc0 (GMemChunk *mem_chunk); |
|
61 void old_mem_chunk_free (GMemChunk *mem_chunk, |
|
62 gpointer mem); |
|
63 void old_mem_chunk_clean (GMemChunk *mem_chunk); |
|
64 void old_mem_chunk_reset (GMemChunk *mem_chunk); |
|
65 void old_mem_chunk_print (GMemChunk *mem_chunk); |
|
66 void old_mem_chunk_info (void); |
|
67 |
|
68 |
|
69 /* --- MemChunks --- */ |
|
70 #ifndef G_ALLOC_AND_FREE |
|
71 typedef struct _GAllocator GAllocator; |
|
72 typedef struct _GMemChunk GMemChunk; |
|
73 #define G_ALLOC_ONLY 1 |
|
74 #define G_ALLOC_AND_FREE 2 |
|
75 #endif |
|
76 |
|
77 typedef struct _GFreeAtom GFreeAtom; |
|
78 typedef struct _GMemArea GMemArea; |
|
79 |
|
80 struct _GFreeAtom |
|
81 { |
|
82 GFreeAtom *next; |
|
83 }; |
|
84 |
|
85 struct _GMemArea |
|
86 { |
|
87 GMemArea *next; /* the next mem area */ |
|
88 GMemArea *prev; /* the previous mem area */ |
|
89 gulong index; /* the current index into the "mem" array */ |
|
90 gulong free; /* the number of free bytes in this mem area */ |
|
91 gulong allocated; /* the number of atoms allocated from this area */ |
|
92 gulong mark; /* is this mem area marked for deletion */ |
|
93 gchar mem[MEM_AREA_SIZE]; /* the mem array from which atoms get allocated |
|
94 * the actual size of this array is determined by |
|
95 * the mem chunk "area_size". ANSI says that it |
|
96 * must be declared to be the maximum size it |
|
97 * can possibly be (even though the actual size |
|
98 * may be less). |
|
99 */ |
|
100 }; |
|
101 |
|
102 struct _GMemChunk |
|
103 { |
|
104 const gchar *name; /* name of this MemChunk...used for debugging output */ |
|
105 gint type; /* the type of MemChunk: ALLOC_ONLY or ALLOC_AND_FREE */ |
|
106 gint num_mem_areas; /* the number of memory areas */ |
|
107 gint num_marked_areas; /* the number of areas marked for deletion */ |
|
108 guint atom_size; /* the size of an atom */ |
|
109 gulong area_size; /* the size of a memory area */ |
|
110 GMemArea *mem_area; /* the current memory area */ |
|
111 GMemArea *mem_areas; /* a list of all the mem areas owned by this chunk */ |
|
112 GMemArea *free_mem_area; /* the free area...which is about to be destroyed */ |
|
113 GFreeAtom *free_atoms; /* the free atoms list */ |
|
114 GTree *mem_tree; /* tree of mem areas sorted by memory address */ |
|
115 GMemChunk *next; /* pointer to the next chunk */ |
|
116 GMemChunk *prev; /* pointer to the previous chunk */ |
|
117 }; |
|
118 |
|
119 |
|
120 static gulong old_mem_chunk_compute_size (gulong size, |
|
121 gulong min_size) G_GNUC_CONST; |
|
122 static gint old_mem_chunk_area_compare (GMemArea *a, |
|
123 GMemArea *b); |
|
124 static gint old_mem_chunk_area_search (GMemArea *a, |
|
125 gchar *addr); |
|
126 |
|
127 /* here we can't use StaticMutexes, as they depend upon a working |
|
128 * g_malloc, the same holds true for StaticPrivate |
|
129 */ |
|
130 static GMutex *mem_chunks_lock = NULL; |
|
131 static GMemChunk *mem_chunks = NULL; |
|
132 |
|
133 void |
|
134 old_mem_chunks_init (void) |
|
135 { |
|
136 mem_chunks_lock = g_mutex_new (); |
|
137 } |
|
138 |
|
139 GMemChunk* |
|
140 old_mem_chunk_new (const gchar *name, |
|
141 gint atom_size, |
|
142 gulong area_size, |
|
143 gint type) |
|
144 { |
|
145 GMemChunk *mem_chunk; |
|
146 gulong rarea_size; |
|
147 |
|
148 g_return_val_if_fail (atom_size > 0, NULL); |
|
149 g_return_val_if_fail (area_size >= atom_size, NULL); |
|
150 |
|
151 ENTER_MEM_CHUNK_ROUTINE (); |
|
152 |
|
153 area_size = (area_size + atom_size - 1) / atom_size; |
|
154 area_size *= atom_size; |
|
155 |
|
156 mem_chunk = g_new (GMemChunk, 1); |
|
157 mem_chunk->name = name; |
|
158 mem_chunk->type = type; |
|
159 mem_chunk->num_mem_areas = 0; |
|
160 mem_chunk->num_marked_areas = 0; |
|
161 mem_chunk->mem_area = NULL; |
|
162 mem_chunk->free_mem_area = NULL; |
|
163 mem_chunk->free_atoms = NULL; |
|
164 mem_chunk->mem_tree = NULL; |
|
165 mem_chunk->mem_areas = NULL; |
|
166 mem_chunk->atom_size = atom_size; |
|
167 |
|
168 if (mem_chunk->type == G_ALLOC_AND_FREE) |
|
169 mem_chunk->mem_tree = g_tree_new ((GCompareFunc) old_mem_chunk_area_compare); |
|
170 |
|
171 if (mem_chunk->atom_size % G_MEM_ALIGN) |
|
172 mem_chunk->atom_size += G_MEM_ALIGN - (mem_chunk->atom_size % G_MEM_ALIGN); |
|
173 |
|
174 rarea_size = area_size + sizeof (GMemArea) - MEM_AREA_SIZE; |
|
175 rarea_size = old_mem_chunk_compute_size (rarea_size, atom_size + sizeof (GMemArea) - MEM_AREA_SIZE); |
|
176 mem_chunk->area_size = rarea_size - (sizeof (GMemArea) - MEM_AREA_SIZE); |
|
177 |
|
178 g_mutex_lock (mem_chunks_lock); |
|
179 mem_chunk->next = mem_chunks; |
|
180 mem_chunk->prev = NULL; |
|
181 if (mem_chunks) |
|
182 mem_chunks->prev = mem_chunk; |
|
183 mem_chunks = mem_chunk; |
|
184 g_mutex_unlock (mem_chunks_lock); |
|
185 |
|
186 LEAVE_MEM_CHUNK_ROUTINE (); |
|
187 |
|
188 return mem_chunk; |
|
189 } |
|
190 |
|
191 void |
|
192 old_mem_chunk_destroy (GMemChunk *mem_chunk) |
|
193 { |
|
194 GMemArea *mem_areas; |
|
195 GMemArea *temp_area; |
|
196 |
|
197 g_return_if_fail (mem_chunk != NULL); |
|
198 |
|
199 ENTER_MEM_CHUNK_ROUTINE (); |
|
200 |
|
201 mem_areas = mem_chunk->mem_areas; |
|
202 while (mem_areas) |
|
203 { |
|
204 temp_area = mem_areas; |
|
205 mem_areas = mem_areas->next; |
|
206 g_free (temp_area); |
|
207 } |
|
208 |
|
209 g_mutex_lock (mem_chunks_lock); |
|
210 if (mem_chunk->next) |
|
211 mem_chunk->next->prev = mem_chunk->prev; |
|
212 if (mem_chunk->prev) |
|
213 mem_chunk->prev->next = mem_chunk->next; |
|
214 |
|
215 if (mem_chunk == mem_chunks) |
|
216 mem_chunks = mem_chunks->next; |
|
217 g_mutex_unlock (mem_chunks_lock); |
|
218 |
|
219 if (mem_chunk->type == G_ALLOC_AND_FREE) |
|
220 g_tree_destroy (mem_chunk->mem_tree); |
|
221 |
|
222 g_free (mem_chunk); |
|
223 |
|
224 LEAVE_MEM_CHUNK_ROUTINE (); |
|
225 } |
|
226 |
|
227 gpointer |
|
228 old_mem_chunk_alloc (GMemChunk *mem_chunk) |
|
229 { |
|
230 GMemArea *temp_area; |
|
231 gpointer mem; |
|
232 |
|
233 ENTER_MEM_CHUNK_ROUTINE (); |
|
234 |
|
235 g_return_val_if_fail (mem_chunk != NULL, NULL); |
|
236 |
|
237 while (mem_chunk->free_atoms) |
|
238 { |
|
239 /* Get the first piece of memory on the "free_atoms" list. |
|
240 * We can go ahead and destroy the list node we used to keep |
|
241 * track of it with and to update the "free_atoms" list to |
|
242 * point to its next element. |
|
243 */ |
|
244 mem = mem_chunk->free_atoms; |
|
245 mem_chunk->free_atoms = mem_chunk->free_atoms->next; |
|
246 |
|
247 /* Determine which area this piece of memory is allocated from */ |
|
248 temp_area = g_tree_search (mem_chunk->mem_tree, |
|
249 (GCompareFunc) old_mem_chunk_area_search, |
|
250 mem); |
|
251 |
|
252 /* If the area has been marked, then it is being destroyed. |
|
253 * (ie marked to be destroyed). |
|
254 * We check to see if all of the segments on the free list that |
|
255 * reference this area have been removed. This occurs when |
|
256 * the ammount of free memory is less than the allocatable size. |
|
257 * If the chunk should be freed, then we place it in the "free_mem_area". |
|
258 * This is so we make sure not to free the mem area here and then |
|
259 * allocate it again a few lines down. |
|
260 * If we don't allocate a chunk a few lines down then the "free_mem_area" |
|
261 * will be freed. |
|
262 * If there is already a "free_mem_area" then we'll just free this mem area. |
|
263 */ |
|
264 if (temp_area->mark) |
|
265 { |
|
266 /* Update the "free" memory available in that area */ |
|
267 temp_area->free += mem_chunk->atom_size; |
|
268 |
|
269 if (temp_area->free == mem_chunk->area_size) |
|
270 { |
|
271 if (temp_area == mem_chunk->mem_area) |
|
272 mem_chunk->mem_area = NULL; |
|
273 |
|
274 if (mem_chunk->free_mem_area) |
|
275 { |
|
276 mem_chunk->num_mem_areas -= 1; |
|
277 |
|
278 if (temp_area->next) |
|
279 temp_area->next->prev = temp_area->prev; |
|
280 if (temp_area->prev) |
|
281 temp_area->prev->next = temp_area->next; |
|
282 if (temp_area == mem_chunk->mem_areas) |
|
283 mem_chunk->mem_areas = mem_chunk->mem_areas->next; |
|
284 |
|
285 if (mem_chunk->type == G_ALLOC_AND_FREE) |
|
286 g_tree_remove (mem_chunk->mem_tree, temp_area); |
|
287 g_free (temp_area); |
|
288 } |
|
289 else |
|
290 mem_chunk->free_mem_area = temp_area; |
|
291 |
|
292 mem_chunk->num_marked_areas -= 1; |
|
293 } |
|
294 } |
|
295 else |
|
296 { |
|
297 /* Update the number of allocated atoms count. |
|
298 */ |
|
299 temp_area->allocated += 1; |
|
300 |
|
301 /* The area wasn't marked...return the memory |
|
302 */ |
|
303 goto outa_here; |
|
304 } |
|
305 } |
|
306 |
|
307 /* If there isn't a current mem area or the current mem area is out of space |
|
308 * then allocate a new mem area. We'll first check and see if we can use |
|
309 * the "free_mem_area". Otherwise we'll just malloc the mem area. |
|
310 */ |
|
311 if ((!mem_chunk->mem_area) || |
|
312 ((mem_chunk->mem_area->index + mem_chunk->atom_size) > mem_chunk->area_size)) |
|
313 { |
|
314 if (mem_chunk->free_mem_area) |
|
315 { |
|
316 mem_chunk->mem_area = mem_chunk->free_mem_area; |
|
317 mem_chunk->free_mem_area = NULL; |
|
318 } |
|
319 else |
|
320 { |
|
321 #ifdef ENABLE_GC_FRIENDLY |
|
322 mem_chunk->mem_area = (GMemArea*) g_malloc0 (sizeof (GMemArea) - |
|
323 MEM_AREA_SIZE + |
|
324 mem_chunk->area_size); |
|
325 #else /* !ENABLE_GC_FRIENDLY */ |
|
326 mem_chunk->mem_area = (GMemArea*) g_malloc (sizeof (GMemArea) - |
|
327 MEM_AREA_SIZE + |
|
328 mem_chunk->area_size); |
|
329 #endif /* ENABLE_GC_FRIENDLY */ |
|
330 |
|
331 mem_chunk->num_mem_areas += 1; |
|
332 mem_chunk->mem_area->next = mem_chunk->mem_areas; |
|
333 mem_chunk->mem_area->prev = NULL; |
|
334 |
|
335 if (mem_chunk->mem_areas) |
|
336 mem_chunk->mem_areas->prev = mem_chunk->mem_area; |
|
337 mem_chunk->mem_areas = mem_chunk->mem_area; |
|
338 |
|
339 if (mem_chunk->type == G_ALLOC_AND_FREE) |
|
340 g_tree_insert (mem_chunk->mem_tree, mem_chunk->mem_area, mem_chunk->mem_area); |
|
341 } |
|
342 |
|
343 mem_chunk->mem_area->index = 0; |
|
344 mem_chunk->mem_area->free = mem_chunk->area_size; |
|
345 mem_chunk->mem_area->allocated = 0; |
|
346 mem_chunk->mem_area->mark = 0; |
|
347 } |
|
348 |
|
349 /* Get the memory and modify the state variables appropriately. |
|
350 */ |
|
351 mem = (gpointer) &mem_chunk->mem_area->mem[mem_chunk->mem_area->index]; |
|
352 mem_chunk->mem_area->index += mem_chunk->atom_size; |
|
353 mem_chunk->mem_area->free -= mem_chunk->atom_size; |
|
354 mem_chunk->mem_area->allocated += 1; |
|
355 |
|
356 outa_here: |
|
357 |
|
358 LEAVE_MEM_CHUNK_ROUTINE (); |
|
359 |
|
360 return mem; |
|
361 } |
|
362 |
|
363 gpointer |
|
364 old_mem_chunk_alloc0 (GMemChunk *mem_chunk) |
|
365 { |
|
366 gpointer mem; |
|
367 |
|
368 mem = old_mem_chunk_alloc (mem_chunk); |
|
369 if (mem) |
|
370 { |
|
371 memset (mem, 0, mem_chunk->atom_size); |
|
372 } |
|
373 |
|
374 return mem; |
|
375 } |
|
376 |
|
377 void |
|
378 old_mem_chunk_free (GMemChunk *mem_chunk, |
|
379 gpointer mem) |
|
380 { |
|
381 GMemArea *temp_area; |
|
382 GFreeAtom *free_atom; |
|
383 |
|
384 g_return_if_fail (mem_chunk != NULL); |
|
385 g_return_if_fail (mem != NULL); |
|
386 |
|
387 ENTER_MEM_CHUNK_ROUTINE (); |
|
388 |
|
389 #ifdef ENABLE_GC_FRIENDLY |
|
390 memset (mem, 0, mem_chunk->atom_size); |
|
391 #endif /* ENABLE_GC_FRIENDLY */ |
|
392 |
|
393 /* Don't do anything if this is an ALLOC_ONLY chunk |
|
394 */ |
|
395 if (mem_chunk->type == G_ALLOC_AND_FREE) |
|
396 { |
|
397 /* Place the memory on the "free_atoms" list |
|
398 */ |
|
399 free_atom = (GFreeAtom*) mem; |
|
400 free_atom->next = mem_chunk->free_atoms; |
|
401 mem_chunk->free_atoms = free_atom; |
|
402 |
|
403 temp_area = g_tree_search (mem_chunk->mem_tree, |
|
404 (GCompareFunc) old_mem_chunk_area_search, |
|
405 mem); |
|
406 |
|
407 temp_area->allocated -= 1; |
|
408 |
|
409 if (temp_area->allocated == 0) |
|
410 { |
|
411 temp_area->mark = 1; |
|
412 mem_chunk->num_marked_areas += 1; |
|
413 } |
|
414 } |
|
415 |
|
416 LEAVE_MEM_CHUNK_ROUTINE (); |
|
417 } |
|
418 |
|
419 /* This doesn't free the free_area if there is one */ |
|
420 void |
|
421 old_mem_chunk_clean (GMemChunk *mem_chunk) |
|
422 { |
|
423 GMemArea *mem_area; |
|
424 GFreeAtom *prev_free_atom; |
|
425 GFreeAtom *temp_free_atom; |
|
426 gpointer mem; |
|
427 |
|
428 g_return_if_fail (mem_chunk != NULL); |
|
429 |
|
430 ENTER_MEM_CHUNK_ROUTINE (); |
|
431 |
|
432 if (mem_chunk->type == G_ALLOC_AND_FREE) |
|
433 { |
|
434 prev_free_atom = NULL; |
|
435 temp_free_atom = mem_chunk->free_atoms; |
|
436 |
|
437 while (temp_free_atom) |
|
438 { |
|
439 mem = (gpointer) temp_free_atom; |
|
440 |
|
441 mem_area = g_tree_search (mem_chunk->mem_tree, |
|
442 (GCompareFunc) old_mem_chunk_area_search, |
|
443 mem); |
|
444 |
|
445 /* If this mem area is marked for destruction then delete the |
|
446 * area and list node and decrement the free mem. |
|
447 */ |
|
448 if (mem_area->mark) |
|
449 { |
|
450 if (prev_free_atom) |
|
451 prev_free_atom->next = temp_free_atom->next; |
|
452 else |
|
453 mem_chunk->free_atoms = temp_free_atom->next; |
|
454 temp_free_atom = temp_free_atom->next; |
|
455 |
|
456 mem_area->free += mem_chunk->atom_size; |
|
457 if (mem_area->free == mem_chunk->area_size) |
|
458 { |
|
459 mem_chunk->num_mem_areas -= 1; |
|
460 mem_chunk->num_marked_areas -= 1; |
|
461 |
|
462 if (mem_area->next) |
|
463 mem_area->next->prev = mem_area->prev; |
|
464 if (mem_area->prev) |
|
465 mem_area->prev->next = mem_area->next; |
|
466 if (mem_area == mem_chunk->mem_areas) |
|
467 mem_chunk->mem_areas = mem_chunk->mem_areas->next; |
|
468 if (mem_area == mem_chunk->mem_area) |
|
469 mem_chunk->mem_area = NULL; |
|
470 |
|
471 if (mem_chunk->type == G_ALLOC_AND_FREE) |
|
472 g_tree_remove (mem_chunk->mem_tree, mem_area); |
|
473 g_free (mem_area); |
|
474 } |
|
475 } |
|
476 else |
|
477 { |
|
478 prev_free_atom = temp_free_atom; |
|
479 temp_free_atom = temp_free_atom->next; |
|
480 } |
|
481 } |
|
482 } |
|
483 LEAVE_MEM_CHUNK_ROUTINE (); |
|
484 } |
|
485 |
|
486 void |
|
487 old_mem_chunk_reset (GMemChunk *mem_chunk) |
|
488 { |
|
489 GMemArea *mem_areas; |
|
490 GMemArea *temp_area; |
|
491 |
|
492 g_return_if_fail (mem_chunk != NULL); |
|
493 |
|
494 ENTER_MEM_CHUNK_ROUTINE (); |
|
495 |
|
496 mem_areas = mem_chunk->mem_areas; |
|
497 mem_chunk->num_mem_areas = 0; |
|
498 mem_chunk->mem_areas = NULL; |
|
499 mem_chunk->mem_area = NULL; |
|
500 |
|
501 while (mem_areas) |
|
502 { |
|
503 temp_area = mem_areas; |
|
504 mem_areas = mem_areas->next; |
|
505 g_free (temp_area); |
|
506 } |
|
507 |
|
508 mem_chunk->free_atoms = NULL; |
|
509 |
|
510 if (mem_chunk->mem_tree) |
|
511 { |
|
512 g_tree_destroy (mem_chunk->mem_tree); |
|
513 mem_chunk->mem_tree = g_tree_new ((GCompareFunc) old_mem_chunk_area_compare); |
|
514 } |
|
515 |
|
516 LEAVE_MEM_CHUNK_ROUTINE (); |
|
517 } |
|
518 |
|
519 void |
|
520 old_mem_chunk_print (GMemChunk *mem_chunk) |
|
521 { |
|
522 GMemArea *mem_areas; |
|
523 gulong mem; |
|
524 |
|
525 g_return_if_fail (mem_chunk != NULL); |
|
526 |
|
527 mem_areas = mem_chunk->mem_areas; |
|
528 mem = 0; |
|
529 |
|
530 while (mem_areas) |
|
531 { |
|
532 mem += mem_chunk->area_size - mem_areas->free; |
|
533 mem_areas = mem_areas->next; |
|
534 } |
|
535 |
|
536 g_log (G_LOG_DOMAIN, G_LOG_LEVEL_INFO, |
|
537 "%s: %ld bytes using %d mem areas", |
|
538 mem_chunk->name, mem, mem_chunk->num_mem_areas); |
|
539 } |
|
540 |
|
541 void |
|
542 old_mem_chunk_info (void) |
|
543 { |
|
544 GMemChunk *mem_chunk; |
|
545 gint count; |
|
546 |
|
547 count = 0; |
|
548 g_mutex_lock (mem_chunks_lock); |
|
549 mem_chunk = mem_chunks; |
|
550 while (mem_chunk) |
|
551 { |
|
552 count += 1; |
|
553 mem_chunk = mem_chunk->next; |
|
554 } |
|
555 g_mutex_unlock (mem_chunks_lock); |
|
556 |
|
557 g_log (G_LOG_DOMAIN, G_LOG_LEVEL_INFO, "%d mem chunks", count); |
|
558 |
|
559 g_mutex_lock (mem_chunks_lock); |
|
560 mem_chunk = mem_chunks; |
|
561 g_mutex_unlock (mem_chunks_lock); |
|
562 |
|
563 while (mem_chunk) |
|
564 { |
|
565 old_mem_chunk_print ((GMemChunk*) mem_chunk); |
|
566 mem_chunk = mem_chunk->next; |
|
567 } |
|
568 } |
|
569 |
|
570 static gulong |
|
571 old_mem_chunk_compute_size (gulong size, |
|
572 gulong min_size) |
|
573 { |
|
574 gulong power_of_2; |
|
575 gulong lower, upper; |
|
576 |
|
577 power_of_2 = 16; |
|
578 while (power_of_2 < size) |
|
579 power_of_2 <<= 1; |
|
580 |
|
581 lower = power_of_2 >> 1; |
|
582 upper = power_of_2; |
|
583 |
|
584 if (size - lower < upper - size && lower >= min_size) |
|
585 return lower; |
|
586 else |
|
587 return upper; |
|
588 } |
|
589 |
|
590 static gint |
|
591 old_mem_chunk_area_compare (GMemArea *a, |
|
592 GMemArea *b) |
|
593 { |
|
594 if (a->mem > b->mem) |
|
595 return 1; |
|
596 else if (a->mem < b->mem) |
|
597 return -1; |
|
598 return 0; |
|
599 } |
|
600 |
|
601 static gint |
|
602 old_mem_chunk_area_search (GMemArea *a, |
|
603 gchar *addr) |
|
604 { |
|
605 if (a->mem <= addr) |
|
606 { |
|
607 if (addr < &a->mem[a->index]) |
|
608 return 0; |
|
609 return 1; |
|
610 } |
|
611 return -1; |
|
612 } |