Merge 14981:15268 from trunk
[reactos.git] / reactos / lib / rtl / heap.c
1 /*
2 * Win32 heap functions
3 *
4 * Copyright 1996 Alexandre Julliard
5 * Copyright 1998 Ulrich Weigand
6 */
7
8
9 /* Note: the heap data structures are based on what Pietrek describes in his
10 * book 'Windows 95 System Programming Secrets'. The layout is not exactly
11 * the same, but could be easily adapted if it turns out some programs
12 * require it.
13 */
14
15 #include "rtl.h"
16
17 #define NDEBUG
18 #include <debug.h>
19
20 #define WARN_ON(x) (1)
21
22 #ifdef NDEBUG
23 #define TRACE_ON(x) (0)
24 #else
25 #define TRACE_ON(x) (1)
26 #endif
27
28 static RTL_CRITICAL_SECTION RtlpProcessHeapsListLock;
29
30
31 typedef struct tagARENA_INUSE
32 {
33 ULONG size; /* Block size; must be the first field */
34 USHORT threadId; /* Allocating thread id */
35 USHORT magic; /* Magic number */
36 }
37 ARENA_INUSE;
38
39 typedef struct tagARENA_FREE
40 {
41 ULONG size; /* Block size; must be the first field */
42 USHORT threadId; /* Freeing thread id */
43 USHORT magic; /* Magic number */
44 struct tagARENA_FREE *next; /* Next free arena */
45 struct tagARENA_FREE *prev; /* Prev free arena */
46 }
47 ARENA_FREE;
48
49 #define ARENA_FLAG_FREE 0x00000001 /* flags OR'ed with arena size */
50 #define ARENA_FLAG_PREV_FREE 0x00000002
51 #define ARENA_SIZE_MASK 0xfffffffc
52 #define ARENA_INUSE_MAGIC 0x4842 /* Value for arena 'magic' field */
53 #define ARENA_FREE_MAGIC 0x4846 /* Value for arena 'magic' field */
54
55 #define ARENA_INUSE_FILLER 0x55
56 #define ARENA_FREE_FILLER 0xaa
57
58 #define QUIET 1 /* Suppress messages */
59 #define NOISY 0 /* Report all errors */
60
61 #define HEAP_NB_FREE_LISTS 4 /* Number of free lists */
62
63 /* Max size of the blocks on the free lists */
64 static const ULONG HEAP_freeListSizes[HEAP_NB_FREE_LISTS] =
65 {
66 0x20, 0x80, 0x200, 0xffffffff
67 };
68
69 typedef struct
70 {
71 ULONG size;
72 ARENA_FREE arena;
73 }
74 FREE_LIST_ENTRY;
75
76 struct tagHEAP;
77
78 typedef struct tagSUBHEAP
79 {
80 ULONG size; /* Size of the whole sub-heap */
81 ULONG commitSize; /* Committed size of the sub-heap */
82 ULONG headerSize; /* Size of the heap header */
83 struct tagSUBHEAP *next; /* Next sub-heap */
84 struct tagHEAP *heap; /* Main heap structure */
85 ULONG magic; /* Magic number */
86 }
87 SUBHEAP, *PSUBHEAP;
88
89 #define SUBHEAP_MAGIC ((ULONG)('S' | ('U'<<8) | ('B'<<16) | ('H'<<24)))
90
91 typedef struct tagHEAP
92 {
93 SUBHEAP subheap; /* First sub-heap */
94 struct tagHEAP *next; /* Next heap for this process */
95 FREE_LIST_ENTRY freeList[HEAP_NB_FREE_LISTS]; /* Free lists */
96 RTL_CRITICAL_SECTION critSection; /* Critical section for serialization */
97 ULONG flags; /* Heap flags */
98 ULONG magic; /* Magic number */
99 UCHAR filler[4]; /* Make multiple of 8 bytes */
100 }
101 HEAP, *PHEAP;
102
103 #define HEAP_MAGIC ((ULONG)('H' | ('E'<<8) | ('A'<<16) | ('P'<<24)))
104
105 #define HEAP_DEF_SIZE 0x110000 /* Default heap size = 1Mb + 64Kb */
106 #define HEAP_MIN_BLOCK_SIZE (sizeof(ARENA_FREE) + 8) /* Min. heap block size */
107 #define COMMIT_MASK 0xffff /* bitmask for commit/decommit granularity */
108
109
110 static BOOLEAN HEAP_IsRealArena( HANDLE heap, ULONG flags, PVOID block, BOOLEAN quiet );
111
112
113 /***********************************************************************
114 * HEAP_Dump
115 */
116 VOID
117 HEAP_Dump(PHEAP heap)
118 {
119 int i;
120 SUBHEAP *subheap;
121 char *ptr;
122
123 DPRINT( "Heap: %p\n", heap );
124 DPRINT( "Next: %p Sub-heaps: %p",
125 heap->next, &heap->subheap );
126 subheap = &heap->subheap;
127 while (subheap->next)
128 {
129 DPRINT( " -> %p", subheap->next );
130 subheap = subheap->next;
131 }
132
133 DPRINT( "\nFree lists:\n Block Stat Size Id\n" );
134 for (i = 0; i < HEAP_NB_FREE_LISTS; i++)
135 DPRINT( "%p free %08lx %04x prev=%p next=%p\n",
136 &heap->freeList[i].arena,
137 heap->freeList[i].arena.size,
138 heap->freeList[i].arena.threadId,
139 heap->freeList[i].arena.prev,
140 heap->freeList[i].arena.next );
141
142 subheap = &heap->subheap;
143 while (subheap)
144 {
145 ULONG freeSize = 0, usedSize = 0, arenaSize = subheap->headerSize;
146 DPRINT( "\n\nSub-heap %p: size=%08lx committed=%08lx\n",
147 subheap, subheap->size, subheap->commitSize );
148
149 DPRINT( "\n Block Stat Size Id\n" );
150 ptr = (char*)subheap + subheap->headerSize;
151 while (ptr < (char *)subheap + subheap->size)
152 {
153 if (*(PULONG)ptr & ARENA_FLAG_FREE)
154 {
155 ARENA_FREE *pArena = (ARENA_FREE *)ptr;
156 DPRINT( "%p free %08lx %04x prev=%p next=%p\n",
157 pArena, pArena->size & ARENA_SIZE_MASK,
158 pArena->threadId, pArena->prev,
159 pArena->next);
160 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
161 arenaSize += sizeof(ARENA_FREE);
162 freeSize += pArena->size & ARENA_SIZE_MASK;
163 }
164 else if (*(PULONG)ptr & ARENA_FLAG_PREV_FREE)
165 {
166 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
167 DPRINT( "%p Used %08lx %04x back=%08lx\n",
168 pArena, pArena->size & ARENA_SIZE_MASK,
169 pArena->threadId, *((PULONG)pArena - 1));
170 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
171 arenaSize += sizeof(ARENA_INUSE);
172 usedSize += pArena->size & ARENA_SIZE_MASK;
173 }
174 else
175 {
176 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
177 DPRINT( "%p used %08lx %04x\n",
178 pArena, pArena->size & ARENA_SIZE_MASK,
179 pArena->threadId);
180 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
181 arenaSize += sizeof(ARENA_INUSE);
182 usedSize += pArena->size & ARENA_SIZE_MASK;
183 }
184 }
185 DPRINT( "\nTotal: Size=%08lx Committed=%08lx Free=%08lx Used=%08lx Arenas=%08lx (%ld%%)\n\n",
186 subheap->size, subheap->commitSize, freeSize, usedSize,
187 arenaSize, (arenaSize * 100) / subheap->size );
188 subheap = subheap->next;
189 }
190 }
191
192
193 /***********************************************************************
194 * HEAP_GetPtr
195 * RETURNS
196 * Pointer to the heap
197 * NULL: Failure
198 */
199 static PHEAP
200 HEAP_GetPtr(HANDLE heap) /* [in] Handle to the heap */
201 {
202 HEAP *heapPtr = (HEAP *)heap;
203 if (!heapPtr || (heapPtr->magic != HEAP_MAGIC))
204 {
205 DPRINT("Invalid heap %08x!\n", heap );
206 return NULL;
207 }
208 if (TRACE_ON(heap) && !HEAP_IsRealArena( heap, 0, NULL, NOISY ))
209 {
210 HEAP_Dump( heapPtr );
211 ASSERT( FALSE );
212 return NULL;
213 }
214 return heapPtr;
215 }
216
217
218 /***********************************************************************
219 * HEAP_InsertFreeBlock
220 *
221 * Insert a free block into the free list.
222 */
223 static VOID
224 HEAP_InsertFreeBlock(PHEAP heap,
225 ARENA_FREE *pArena,
226 BOOLEAN last)
227 {
228 FREE_LIST_ENTRY *pEntry = heap->freeList;
229 while (pEntry->size < pArena->size)
230 pEntry++;
231 if (last)
232 {
233 /* insert at end of free list, i.e. before next free list entry */
234 pEntry++;
235 if (pEntry == &heap->freeList[HEAP_NB_FREE_LISTS])
236 {
237 pEntry = heap->freeList;
238 }
239 pArena->prev = pEntry->arena.prev;
240 pArena->prev->next = pArena;
241 pArena->next = &pEntry->arena;
242 pEntry->arena.prev = pArena;
243 }
244 else
245 {
246 /* insert at head of free list */
247 pArena->next = pEntry->arena.next;
248 pArena->next->prev = pArena;
249 pArena->prev = &pEntry->arena;
250 pEntry->arena.next = pArena;
251 }
252 pArena->size |= ARENA_FLAG_FREE;
253 }
254
255
256 /***********************************************************************
257 * HEAP_FindSubHeap
258 * Find the sub-heap containing a given address.
259 *
260 * RETURNS
261 * Pointer: Success
262 * NULL: Failure
263 */
264 static PSUBHEAP
265 HEAP_FindSubHeap(HEAP *heap, /* [in] Heap pointer */
266 PVOID ptr) /* [in] Address */
267 {
268 PSUBHEAP sub = &heap->subheap;
269 while (sub)
270 {
271 if (((char *)ptr >= (char *)sub) &&
272 ((char *)ptr < (char *)sub + sub->size))
273 return sub;
274 sub = sub->next;
275 }
276 return NULL;
277 }
278
279
280 /***********************************************************************
281 * HEAP_Commit
282 *
283 * Make sure the heap storage is committed up to (not including) ptr.
284 */
285 static inline BOOLEAN
286 HEAP_Commit(SUBHEAP *subheap,
287 PVOID ptr,
288 ULONG flags)
289 {
290 ULONG size = (ULONG)((char *)ptr - (char *)subheap);
291 NTSTATUS Status;
292 PVOID address;
293 ULONG commitsize;
294
295 size = (size + COMMIT_MASK) & ~COMMIT_MASK;
296 if (size > subheap->size)
297 size = subheap->size;
298 if (size <= subheap->commitSize)
299 return TRUE;
300
301 address = (PVOID)((char *)subheap + subheap->commitSize);
302 commitsize = size - subheap->commitSize;
303
304 Status = NtAllocateVirtualMemory(NtCurrentProcess(),
305 &address,
306 0,
307 &commitsize,
308 MEM_COMMIT,
309 PAGE_EXECUTE_READWRITE);
310 if (!NT_SUCCESS(Status))
311 {
312 DPRINT( "Could not commit %08lx bytes at %p for heap %p\n",
313 size - subheap->commitSize,
314 ((char *)subheap + subheap->commitSize),
315 subheap->heap );
316 return FALSE;
317 }
318
319 subheap->commitSize += commitsize;
320 return TRUE;
321 }
322
323
324 /***********************************************************************
325 * HEAP_Decommit
326 *
327 * If possible, decommit the heap storage from (including) 'ptr'.
328 */
329 static inline BOOLEAN HEAP_Decommit( SUBHEAP *subheap, PVOID ptr, ULONG flags )
330 {
331 ULONG size = (ULONG)((char *)ptr - (char *)subheap);
332 PVOID address;
333 ULONG decommitsize;
334 NTSTATUS Status;
335 /* round to next block and add one full block */
336 size = ((size + COMMIT_MASK) & ~COMMIT_MASK) + COMMIT_MASK + 1;
337 if (size >= subheap->commitSize)
338 return TRUE;
339
340 address = (PVOID)((char *)subheap + size);
341 decommitsize = subheap->commitSize - size;
342
343 Status = ZwFreeVirtualMemory(NtCurrentProcess(),
344 &address,
345 &decommitsize,
346 MEM_DECOMMIT);
347 if (!NT_SUCCESS(Status))
348 {
349 DPRINT( "Could not decommit %08lx bytes at %p for heap %p\n",
350 subheap->commitSize - size,
351 ((char *)subheap + size),
352 subheap->heap );
353 return FALSE;
354 }
355
356 subheap->commitSize -= decommitsize;
357 return TRUE;
358 }
359
360
361 /***********************************************************************
362 * HEAP_CreateFreeBlock
363 *
364 * Create a free block at a specified address. 'size' is the size of the
365 * whole block, including the new arena.
366 */
367 static VOID HEAP_CreateFreeBlock( SUBHEAP *subheap, PVOID ptr, ULONG size )
368 {
369 ARENA_FREE *pFree;
370 BOOLEAN last;
371
372 /* Create a free arena */
373
374 pFree = (ARENA_FREE *)ptr;
375 pFree->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
376 pFree->magic = ARENA_FREE_MAGIC;
377
378 /* If debugging, erase the freed block content */
379
380 if (TRACE_ON(heap))
381 {
382 char *pEnd = (char *)ptr + size;
383 if (pEnd > (char *)subheap + subheap->commitSize)
384 pEnd = (char *)subheap + subheap->commitSize;
385 if (pEnd > (char *)(pFree + 1))
386 memset( pFree + 1, ARENA_FREE_FILLER, pEnd - (char *)(pFree + 1) );
387 }
388
389 /* Check if next block is free also */
390
391 if (((char *)ptr + size < (char *)subheap + subheap->size) &&
392 (*(PULONG)((char *)ptr + size) & ARENA_FLAG_FREE))
393 {
394 /* Remove the next arena from the free list */
395 ARENA_FREE *pNext = (ARENA_FREE *)((char *)ptr + size);
396 pNext->next->prev = pNext->prev;
397 pNext->prev->next = pNext->next;
398 size += (pNext->size & ARENA_SIZE_MASK) + sizeof(*pNext);
399 if (TRACE_ON(heap))
400 memset( pNext, ARENA_FREE_FILLER, sizeof(ARENA_FREE) );
401 }
402
403 /* Set the next block PREV_FREE flag and pointer */
404
405 last = ((char *)ptr + size >= (char *)subheap + subheap->size);
406 if (!last)
407 {
408 PULONG pNext = (PULONG)((char *)ptr + size);
409 *pNext |= ARENA_FLAG_PREV_FREE;
410 *(ARENA_FREE **)(pNext - 1) = pFree;
411 }
412
413 /* Last, insert the new block into the free list */
414
415 pFree->size = size - sizeof(*pFree);
416 HEAP_InsertFreeBlock( subheap->heap, pFree, last );
417 }
418
419
420 /***********************************************************************
421 * HEAP_MakeInUseBlockFree
422 *
423 * Turn an in-use block into a free block. Can also decommit the end of
424 * the heap, and possibly even free the sub-heap altogether.
425 */
426 static VOID HEAP_MakeInUseBlockFree( SUBHEAP *subheap, ARENA_INUSE *pArena,
427 ULONG flags)
428 {
429 ARENA_FREE *pFree;
430 ULONG size = (pArena->size & ARENA_SIZE_MASK) + sizeof(*pArena);
431 ULONG dummySize = 0;
432
433 /* Check if we can merge with previous block */
434
435 if (pArena->size & ARENA_FLAG_PREV_FREE)
436 {
437 pFree = *((ARENA_FREE **)pArena - 1);
438 size += (pFree->size & ARENA_SIZE_MASK) + sizeof(ARENA_FREE);
439 /* Remove it from the free list */
440 pFree->next->prev = pFree->prev;
441 pFree->prev->next = pFree->next;
442 }
443 else
444 pFree = (ARENA_FREE *)pArena;
445
446 /* Create a free block */
447
448 HEAP_CreateFreeBlock( subheap, pFree, size );
449 size = (pFree->size & ARENA_SIZE_MASK) + sizeof(ARENA_FREE);
450 if ((char *)pFree + size < (char *)subheap + subheap->size)
451 return; /* Not the last block, so nothing more to do */
452
453 /* Free the whole sub-heap if it's empty and not the original one */
454
455 if (((char *)pFree == (char *)subheap + subheap->headerSize) &&
456 (subheap != &subheap->heap->subheap))
457 {
458 SUBHEAP *pPrev = &subheap->heap->subheap;
459 /* Remove the free block from the list */
460 pFree->next->prev = pFree->prev;
461 pFree->prev->next = pFree->next;
462 /* Remove the subheap from the list */
463 while (pPrev && (pPrev->next != subheap))
464 pPrev = pPrev->next;
465 if (pPrev)
466 pPrev->next = subheap->next;
467 /* Free the memory */
468 subheap->magic = 0;
469 ZwFreeVirtualMemory(NtCurrentProcess(),
470 (PVOID*)&subheap,
471 &dummySize,
472 MEM_RELEASE);
473 return;
474 }
475
476 /* Decommit the end of the heap */
477 }
478
479
480 /***********************************************************************
481 * HEAP_ShrinkBlock
482 *
483 * Shrink an in-use block.
484 */
485 static void HEAP_ShrinkBlock(SUBHEAP *subheap, ARENA_INUSE *pArena, ULONG size)
486 {
487 if ((pArena->size & ARENA_SIZE_MASK) >= size + HEAP_MIN_BLOCK_SIZE)
488 {
489 HEAP_CreateFreeBlock( subheap, (char *)(pArena + 1) + size,
490 (pArena->size & ARENA_SIZE_MASK) - size );
491 /* assign size plus previous arena flags */
492 pArena->size = size | (pArena->size & ~ARENA_SIZE_MASK);
493 }
494 else
495 {
496 /* Turn off PREV_FREE flag in next block */
497 char *pNext = (char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK);
498 if (pNext < (char *)subheap + subheap->size)
499 *(PULONG)pNext &= ~ARENA_FLAG_PREV_FREE;
500 }
501 }
502
503 /***********************************************************************
504 * HEAP_InitSubHeap
505 */
506 static BOOLEAN HEAP_InitSubHeap( HEAP *heap, PVOID address, ULONG flags,
507 ULONG commitSize, ULONG totalSize )
508 {
509 SUBHEAP *subheap = (SUBHEAP *)address;
510 FREE_LIST_ENTRY *pEntry;
511 int i;
512 NTSTATUS Status;
513
514 /* Commit memory */
515 Status = ZwAllocateVirtualMemory(NtCurrentProcess(),
516 &address,
517 0,
518 (PULONG)&commitSize,
519 MEM_COMMIT,
520 PAGE_EXECUTE_READWRITE);
521 if (!NT_SUCCESS(Status))
522 {
523 DPRINT("Could not commit %08lx bytes for sub-heap %p\n",
524 commitSize, address);
525 return FALSE;
526 }
527
528 /* Fill the sub-heap structure */
529
530 subheap = (PSUBHEAP)address;
531 subheap->heap = heap;
532 subheap->size = totalSize;
533 subheap->commitSize = commitSize;
534 subheap->magic = SUBHEAP_MAGIC;
535
536 if ( subheap != (SUBHEAP *)heap )
537 {
538 /* If this is a secondary subheap, insert it into list */
539
540 subheap->headerSize = sizeof(SUBHEAP);
541 subheap->next = heap->subheap.next;
542 heap->subheap.next = subheap;
543 }
544 else
545 {
546 /* If this is a primary subheap, initialize main heap */
547
548 subheap->headerSize = sizeof(HEAP);
549 subheap->next = NULL;
550 heap->next = NULL;
551 heap->flags = flags;
552 heap->magic = HEAP_MAGIC;
553
554 /* Build the free lists */
555
556 for (i = 0, pEntry = heap->freeList; i < HEAP_NB_FREE_LISTS; i++, pEntry++)
557 {
558 pEntry->size = HEAP_freeListSizes[i];
559 pEntry->arena.size = 0 | ARENA_FLAG_FREE;
560 pEntry->arena.next = i < HEAP_NB_FREE_LISTS-1 ?
561 &heap->freeList[i+1].arena : &heap->freeList[0].arena;
562 pEntry->arena.prev = i ? &heap->freeList[i-1].arena :
563 &heap->freeList[HEAP_NB_FREE_LISTS-1].arena;
564 pEntry->arena.threadId = 0;
565 pEntry->arena.magic = ARENA_FREE_MAGIC;
566 }
567
568 /* Initialize critical section */
569
570 RtlInitializeCriticalSection( &heap->critSection );
571 }
572
573 /* Create the first free block */
574
575 HEAP_CreateFreeBlock( subheap, (PUCHAR)subheap + subheap->headerSize,
576 subheap->size - subheap->headerSize );
577
578 return TRUE;
579 }
580
581 /***********************************************************************
582 * HEAP_CreateSubHeap
583 *
584 * Create a sub-heap of the given size.
585 * If heap == NULL, creates a main heap.
586 */
587 static PSUBHEAP
588 HEAP_CreateSubHeap(PVOID BaseAddress,
589 HEAP *heap, ULONG flags,
590 ULONG commitSize, ULONG totalSize )
591 {
592 PVOID address;
593 NTSTATUS Status;
594
595 /* Round-up sizes on a 64K boundary */
596
597 totalSize = (totalSize + 0xffff) & 0xffff0000;
598 commitSize = (commitSize + 0xffff) & 0xffff0000;
599 if (!commitSize)
600 commitSize = 0x10000;
601 if (totalSize < commitSize)
602 totalSize = commitSize;
603
604 /* Allocate the memory block */
605 address = BaseAddress;
606 if (!address)
607 {
608 Status = ZwAllocateVirtualMemory(NtCurrentProcess(),
609 &address,
610 0,
611 (PULONG)&totalSize,
612 MEM_RESERVE | MEM_COMMIT,
613 PAGE_EXECUTE_READWRITE);
614 if (!NT_SUCCESS(Status))
615 {
616 DPRINT("Could not VirtualAlloc %08lx bytes\n",
617 totalSize );
618 return NULL;
619 }
620 }
621
622 /* Initialize subheap */
623
624 if (!HEAP_InitSubHeap( heap? heap : (HEAP *)address,
625 address, flags, commitSize, totalSize ))
626 {
627 if (!BaseAddress)
628 {
629 ULONG dummySize = 0;
630 ZwFreeVirtualMemory(NtCurrentProcess(),
631 &address,
632 &dummySize,
633 MEM_RELEASE);
634 }
635 return NULL;
636 }
637
638 return (SUBHEAP *)address;
639 }
640
641
642 /***********************************************************************
643 * HEAP_FindFreeBlock
644 *
645 * Find a free block at least as large as the requested size, and make sure
646 * the requested size is committed.
647 */
648 static ARENA_FREE *HEAP_FindFreeBlock( HEAP *heap, ULONG size,
649 SUBHEAP **ppSubHeap )
650 {
651 SUBHEAP *subheap;
652 ARENA_FREE *pArena;
653 FREE_LIST_ENTRY *pEntry = heap->freeList;
654
655 /* Find a suitable free list, and in it find a block large enough */
656
657 while (pEntry->size < size)
658 pEntry++;
659 pArena = pEntry->arena.next;
660 while (pArena != &heap->freeList[0].arena)
661 {
662 ULONG arena_size = (pArena->size & ARENA_SIZE_MASK) +
663 sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
664 if (arena_size >= size)
665 {
666 subheap = HEAP_FindSubHeap( heap, pArena );
667 if (!HEAP_Commit( subheap, (char *)pArena + sizeof(ARENA_INUSE)
668 + size + HEAP_MIN_BLOCK_SIZE,
669 heap->flags))
670 return NULL;
671 *ppSubHeap = subheap;
672 return pArena;
673 }
674
675 pArena = pArena->next;
676 }
677
678 /* If no block was found, attempt to grow the heap */
679
680 if (!(heap->flags & HEAP_GROWABLE))
681 {
682 DPRINT("Not enough space in heap %p for %08lx bytes\n",
683 heap, size );
684 return NULL;
685 }
686 /* make sure that we have a big enough size *committed* to fit another
687 * last free arena in !
688 * So just one heap struct, one first free arena which will eventually
689 * get inuse, and HEAP_MIN_BLOCK_SIZE for the second free arena that
690 * might get assigned all remaining free space in HEAP_ShrinkBlock() */
691 size += sizeof(SUBHEAP) + sizeof(ARENA_FREE) + HEAP_MIN_BLOCK_SIZE;
692 if (!(subheap = HEAP_CreateSubHeap( NULL, heap, heap->flags, size,
693 max( HEAP_DEF_SIZE, size ) )))
694 return NULL;
695
696 DPRINT("created new sub-heap %p of %08lx bytes for heap %p\n",
697 subheap, size, heap );
698
699 *ppSubHeap = subheap;
700 return (ARENA_FREE *)(subheap + 1);
701 }
702
703
704 /***********************************************************************
705 * HEAP_IsValidArenaPtr
706 *
707 * Check that the pointer is inside the range possible for arenas.
708 */
709 static BOOLEAN HEAP_IsValidArenaPtr( HEAP *heap, PVOID ptr )
710 {
711 int i;
712 SUBHEAP *subheap = HEAP_FindSubHeap( heap, ptr );
713 if (!subheap)
714 return FALSE;
715 if ((char *)ptr >= (char *)subheap + subheap->headerSize)
716 return TRUE;
717 if (subheap != &heap->subheap)
718 return FALSE;
719 for (i = 0; i < HEAP_NB_FREE_LISTS; i++)
720 if (ptr == (PVOID)&heap->freeList[i].arena)
721 return TRUE;
722 return FALSE;
723 }
724
725
726 /***********************************************************************
727 * HEAP_ValidateFreeArena
728 */
729 static BOOLEAN HEAP_ValidateFreeArena( SUBHEAP *subheap, ARENA_FREE *pArena )
730 {
731 char *heapEnd = (char *)subheap + subheap->size;
732
733 /* Check magic number */
734 if (pArena->magic != ARENA_FREE_MAGIC)
735 {
736 DPRINT("Heap %p: invalid free arena magic for %p\n",
737 subheap->heap, pArena);
738 return FALSE;
739 }
740 /* Check size flags */
741 if (!(pArena->size & ARENA_FLAG_FREE) ||
742 (pArena->size & ARENA_FLAG_PREV_FREE))
743 {
744 DPRINT("Heap %p: bad flags %lx for free arena %p\n",
745 subheap->heap, pArena->size & ~ARENA_SIZE_MASK, pArena);
746 }
747 /* Check arena size */
748 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) > heapEnd)
749 {
750 DPRINT("Heap %p: bad size %08lx for free arena %p\n",
751 subheap->heap, (ULONG)pArena->size & ARENA_SIZE_MASK, pArena );
752 return FALSE;
753 }
754 /* Check that next pointer is valid */
755 if (!HEAP_IsValidArenaPtr( subheap->heap, pArena->next ))
756 {
757 DPRINT("Heap %p: bad next ptr %p for arena %p\n",
758 subheap->heap, pArena->next, pArena);
759 return FALSE;
760 }
761 /* Check that next arena is free */
762 if (!(pArena->next->size & ARENA_FLAG_FREE) ||
763 (pArena->next->magic != ARENA_FREE_MAGIC))
764 {
765 DPRINT("Heap %p: next arena %p invalid for %p\n",
766 subheap->heap, pArena->next, pArena);
767 return FALSE;
768 }
769 /* Check that prev pointer is valid */
770 if (!HEAP_IsValidArenaPtr( subheap->heap, pArena->prev ))
771 {
772 DPRINT("Heap %p: bad prev ptr %p for arena %p\n",
773 subheap->heap, pArena->prev, pArena);
774 return FALSE;
775 }
776 /* Check that prev arena is free */
777 if (!(pArena->prev->size & ARENA_FLAG_FREE) ||
778 (pArena->prev->magic != ARENA_FREE_MAGIC))
779 {
780 DPRINT("Heap %p: prev arena %p invalid for %p\n",
781 subheap->heap, pArena->prev, pArena);
782 return FALSE;
783 }
784 /* Check that next block has PREV_FREE flag */
785 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) < heapEnd)
786 {
787 if (!(*(PULONG)((char *)(pArena + 1) +
788 (pArena->size & ARENA_SIZE_MASK)) & ARENA_FLAG_PREV_FREE))
789 {
790 DPRINT("Heap %p: free arena %p next block has no PREV_FREE flag\n",
791 subheap->heap, pArena);
792 return FALSE;
793 }
794 /* Check next block back pointer */
795 if (*((ARENA_FREE **)((char *)(pArena + 1) +
796 (pArena->size & ARENA_SIZE_MASK)) - 1) != pArena)
797 {
798 DPRINT("Heap %p: arena %p has wrong back ptr %p\n",
799 subheap->heap, pArena,
800 *((PULONG)((char *)(pArena+1)+ (pArena->size & ARENA_SIZE_MASK)) - 1));
801 return FALSE;
802 }
803 }
804 return TRUE;
805 }
806
807
808 /***********************************************************************
809 * HEAP_ValidateInUseArena
810 */
811 static BOOLEAN HEAP_ValidateInUseArena( SUBHEAP *subheap, ARENA_INUSE *pArena, BOOLEAN quiet )
812 {
813 char *heapEnd = (char *)subheap + subheap->size;
814
815 /* Check magic number */
816 if (pArena->magic != ARENA_INUSE_MAGIC)
817 {
818 if (quiet == NOISY)
819 {
820 DPRINT("Heap %p: invalid in-use arena magic for %p\n",
821 subheap->heap, pArena);
822 if (TRACE_ON(heap))
823 HEAP_Dump( subheap->heap );
824 }
825 else if (WARN_ON(heap))
826 {
827 DPRINT("Heap %p: invalid in-use arena magic for %p\n",
828 subheap->heap, pArena);
829 if (TRACE_ON(heap))
830 HEAP_Dump( subheap->heap );
831 }
832 return FALSE;
833 }
834 /* Check size flags */
835 if (pArena->size & ARENA_FLAG_FREE)
836 {
837 DPRINT("Heap %p: bad flags %lx for in-use arena %p\n",
838 subheap->heap, pArena->size & ~ARENA_SIZE_MASK, pArena);
839 return FALSE;
840 }
841 /* Check arena size */
842 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) > heapEnd)
843 {
844 DPRINT("Heap %p: bad size %08lx for in-use arena %p\n",
845 subheap->heap, (ULONG)pArena->size & ARENA_SIZE_MASK, pArena);
846 return FALSE;
847 }
848 /* Check next arena PREV_FREE flag */
849 if (((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) < heapEnd) &&
850 (*(PULONG)((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK)) & ARENA_FLAG_PREV_FREE))
851 {
852 DPRINT("Heap %p: in-use arena %p next block has PREV_FREE flag\n",
853 subheap->heap, pArena);
854 return FALSE;
855 }
856 /* Check prev free arena */
857 if (pArena->size & ARENA_FLAG_PREV_FREE)
858 {
859 ARENA_FREE *pPrev = *((ARENA_FREE **)pArena - 1);
860 /* Check prev pointer */
861 if (!HEAP_IsValidArenaPtr( subheap->heap, pPrev ))
862 {
863 DPRINT("Heap %p: bad back ptr %p for arena %p\n",
864 subheap->heap, pPrev, pArena );
865 return FALSE;
866 }
867 /* Check that prev arena is free */
868 if (!(pPrev->size & ARENA_FLAG_FREE) ||
869 (pPrev->magic != ARENA_FREE_MAGIC))
870 {
871 DPRINT("Heap %p: prev arena %p invalid for in-use %p\n",
872 subheap->heap, pPrev, pArena);
873 return FALSE;
874 }
875 /* Check that prev arena is really the previous block */
876 if ((char *)(pPrev + 1) + (pPrev->size & ARENA_SIZE_MASK) != (char *)pArena)
877 {
878 DPRINT("Heap %p: prev arena %p is not prev for in-use %p\n",
879 subheap->heap, pPrev, pArena );
880 return FALSE;
881 }
882 }
883 return TRUE;
884 }
885
886
887 /***********************************************************************
888 * HEAP_IsInsideHeap
889 * Checks whether the pointer points to a block inside a given heap.
890 *
891 * NOTES
892 * Should this return BOOL32?
893 *
894 * RETURNS
895 * !0: Success
896 * 0: Failure
897 */
898 int HEAP_IsInsideHeap(
899 HANDLE heap, /* [in] Heap */
900 ULONG flags, /* [in] Flags */
901 PVOID ptr /* [in] Pointer */
902 )
903 {
904 HEAP *heapPtr = HEAP_GetPtr( heap );
905 SUBHEAP *subheap;
906 int ret;
907
908 /* Validate the parameters */
909
910 if (!heapPtr)
911 return 0;
912 flags |= heapPtr->flags;
913 if (!(flags & HEAP_NO_SERIALIZE))
914 RtlEnterCriticalSection( &heapPtr->critSection );
915 ret = (((subheap = HEAP_FindSubHeap( heapPtr, ptr )) != NULL) &&
916 (((char *)ptr >= (char *)subheap + subheap->headerSize
917 + sizeof(ARENA_INUSE))));
918 if (!(flags & HEAP_NO_SERIALIZE))
919 RtlLeaveCriticalSection( &heapPtr->critSection );
920 return ret;
921 }
922
923
924 void DumpStackFrames ( PULONG Frame, ULONG FrameCount )
925 {
926 ULONG i=0;
927
928 DbgPrint("Frames: ");
929 if ( !Frame )
930 {
931 #if defined __GNUC__
932 __asm__("mov %%ebp, %%ebx" : "=b" (Frame) : );
933 #elif defined(_MSC_VER)
934 __asm mov [Frame], ebp
935 #endif
936 Frame = (PULONG)Frame[0]; // step out of DumpStackFrames
937 }
938 while ( Frame != 0 && (ULONG)Frame != 0xDEADBEEF && (ULONG)Frame != 0xcdcdcdcd && (ULONG)Frame != 0xcccccccc && i++ < FrameCount )
939 {
940 DbgPrint("<%X>", (PVOID)Frame[1]);
941 if (Frame[1] == 0xdeadbeef)
942 break;
943 Frame = (PULONG)Frame[0];
944 DbgPrint(" ");
945 }
946 DbgPrint("\n");
947 }
948
949 /***********************************************************************
950 * HEAP_IsRealArena [Internal]
951 * Validates a block is a valid arena.
952 *
953 * RETURNS
954 * TRUE: Success
955 * FALSE: Failure
956 */
957 static BOOLEAN HEAP_IsRealArena(
958 HANDLE heap, /* [in] Handle to the heap */
959 ULONG flags, /* [in] Bit flags that control access during operation */
960 PVOID block, /* [in] Optional pointer to memory block to validate */
961 BOOLEAN quiet /* [in] Flag - if true, HEAP_ValidateInUseArena
962 * does not complain */
963 )
964 {
965 SUBHEAP *subheap;
966 HEAP *heapPtr = (HEAP *)(heap);
967 BOOLEAN ret = TRUE;
968
969 if (!heapPtr || (heapPtr->magic != HEAP_MAGIC))
970 {
971 DPRINT("Invalid heap %08x!\n", heap );
972 return FALSE;
973 }
974
975 flags &= HEAP_NO_SERIALIZE;
976 flags |= heapPtr->flags;
977 /* calling HeapLock may result in infinite recursion, so do the critsect directly */
978 if (!(flags & HEAP_NO_SERIALIZE))
979 RtlEnterCriticalSection( &heapPtr->critSection );
980
981 if (block)
982 {
983 /* Only check this single memory block */
984
985 /* The following code is really HEAP_IsInsideHeap *
986 * with serialization already done. */
987 if (!(subheap = HEAP_FindSubHeap( heapPtr, block )) ||
988 ((char *)block < (char *)subheap + subheap->headerSize
989 + sizeof(ARENA_INUSE)))
990 {
991 if (quiet == NOISY)
992 {
993 DPRINT("Heap %p: block %p is not inside heap\n",
994 heap, block );
995 DumpStackFrames(NULL,10);
996 }
997 else if (WARN_ON(heap))
998 {
999 DPRINT1("Heap %p: block %p is not inside heap\n",
1000 heap, block );
1001 DumpStackFrames(NULL,10);
1002 }
1003 ret = FALSE;
1004 }
1005 else
1006 ret = HEAP_ValidateInUseArena( subheap, (ARENA_INUSE *)block - 1, quiet );
1007
1008 if (!(flags & HEAP_NO_SERIALIZE))
1009 RtlLeaveCriticalSection( &heapPtr->critSection );
1010 return ret;
1011 }
1012
1013 subheap = &heapPtr->subheap;
1014 while (subheap && ret)
1015 {
1016 char *ptr = (char *)subheap + subheap->headerSize;
1017 while (ptr < (char *)subheap + subheap->size)
1018 {
1019 if (*(PULONG)ptr & ARENA_FLAG_FREE)
1020 {
1021 if (!HEAP_ValidateFreeArena( subheap, (ARENA_FREE *)ptr ))
1022 {
1023 ret = FALSE;
1024 break;
1025 }
1026 ptr += sizeof(ARENA_FREE) + (*(PULONG)ptr & ARENA_SIZE_MASK);
1027 }
1028 else
1029 {
1030 if (!HEAP_ValidateInUseArena( subheap, (ARENA_INUSE *)ptr, NOISY ))
1031 {
1032 ret = FALSE;
1033 break;
1034 }
1035 ptr += sizeof(ARENA_INUSE) + (*(PULONG)ptr & ARENA_SIZE_MASK);
1036 }
1037 }
1038 subheap = subheap->next;
1039 }
1040
1041 if (!(flags & HEAP_NO_SERIALIZE))
1042 RtlLeaveCriticalSection( &heapPtr->critSection );
1043 return ret;
1044 }
1045
1046
1047 /***********************************************************************
1048 * HeapCreate (KERNEL32.336)
1049 * RETURNS
1050 * Handle of heap: Success
1051 * NULL: Failure
1052 *
1053 * @implemented
1054 */
1055 HANDLE STDCALL
1056 RtlCreateHeap(ULONG flags,
1057 PVOID BaseAddress,
1058 ULONG maxSize,
1059 ULONG initialSize,
1060 PVOID Unknown,
1061 PRTL_HEAP_DEFINITION Definition)
1062 {
1063 SUBHEAP *subheap;
1064 HEAP *heapPtr;
1065
1066 /* Allocate the heap block */
1067
1068 if (!maxSize)
1069 {
1070 maxSize = HEAP_DEF_SIZE;
1071 flags |= HEAP_GROWABLE;
1072 }
1073 if (!(subheap = HEAP_CreateSubHeap( BaseAddress, NULL, flags, initialSize, maxSize )))
1074 {
1075 return 0;
1076 }
1077
1078 /* link it into the per-process heap list */
1079 RtlEnterCriticalSection (&RtlpProcessHeapsListLock);
1080
1081 heapPtr = subheap->heap;
1082 heapPtr->next = (HEAP*)NtCurrentPeb()->ProcessHeaps;
1083 NtCurrentPeb()->ProcessHeaps = (HANDLE)heapPtr;
1084 NtCurrentPeb()->NumberOfHeaps++;
1085
1086 RtlLeaveCriticalSection (&RtlpProcessHeapsListLock);
1087
1088 return (HANDLE)subheap;
1089 }
1090
1091 /***********************************************************************
1092 * HeapDestroy (KERNEL32.337)
1093 * RETURNS
1094 * TRUE: Success
1095 * FALSE: Failure
1096 *
1097 * @implemented
1098 *
1099 * RETURNS
1100 * Success: A NULL HANDLE, if heap is NULL or it was destroyed
1101 * Failure: The Heap handle, if heap is the process heap.
1102 */
1103 HANDLE STDCALL
1104 RtlDestroyHeap(HANDLE heap) /* [in] Handle of heap */
1105 {
1106 HEAP *heapPtr = HEAP_GetPtr( heap );
1107 SUBHEAP *subheap;
1108 ULONG flags;
1109 HEAP **pptr;
1110
1111 DPRINT("%08x\n", heap );
1112 if (!heapPtr)
1113 return heap;
1114
1115 if (heap == NtCurrentPeb()->ProcessHeap)
1116 return heap; /* cannot delete the main process heap */
1117
1118 /* remove it from the per-process list */
1119 RtlEnterCriticalSection (&RtlpProcessHeapsListLock);
1120
1121 pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps;
1122 while (*pptr && *pptr != heapPtr) pptr = &(*pptr)->next;
1123 if (*pptr) *pptr = (*pptr)->next;
1124 NtCurrentPeb()->NumberOfHeaps--;
1125
1126 RtlLeaveCriticalSection (&RtlpProcessHeapsListLock);
1127
1128 RtlDeleteCriticalSection( &heapPtr->critSection );
1129 subheap = &heapPtr->subheap;
1130 // We must save the flags. The first subheap is located after
1131 // the heap structure. If we release the first subheap,
1132 // we release also the heap structure.
1133 flags = heapPtr->flags;
1134 while (subheap)
1135 {
1136 SUBHEAP *next = subheap->next;
1137 ULONG dummySize = 0;
1138 ZwFreeVirtualMemory(NtCurrentProcess(),
1139 (PVOID*)&subheap,
1140 &dummySize,
1141 MEM_RELEASE);
1142 subheap = next;
1143 }
1144 return (HANDLE)NULL;
1145 }
1146
1147
1148 /***********************************************************************
1149 * HeapAlloc (KERNEL32.334)
1150 * RETURNS
1151 * Pointer to allocated memory block
1152 * NULL: Failure
1153 * 0x7d030f60--invalid flags in RtlHeapAllocate
1154 * @implemented
1155 */
1156 PVOID STDCALL
1157 RtlAllocateHeap(HANDLE heap, /* [in] Handle of private heap block */
1158 ULONG flags, /* [in] Heap allocation control flags */
1159 ULONG size) /* [in] Number of bytes to allocate */
1160 {
1161 ARENA_FREE *pArena;
1162 ARENA_INUSE *pInUse;
1163 SUBHEAP *subheap;
1164 HEAP *heapPtr = HEAP_GetPtr( heap );
1165
1166 /* Validate the parameters */
1167
1168 if (!heapPtr)
1169 {
1170 if (flags & HEAP_GENERATE_EXCEPTIONS)
1171 RtlRaiseStatus( STATUS_NO_MEMORY );
1172 return NULL;
1173 }
1174 flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY;
1175 flags |= heapPtr->flags;
1176 if (!(flags & HEAP_NO_SERIALIZE))
1177 RtlEnterCriticalSection( &heapPtr->critSection );
1178 size = (size + 7) & ~7;
1179 if (size < HEAP_MIN_BLOCK_SIZE)
1180 size = HEAP_MIN_BLOCK_SIZE;
1181
1182 /* Locate a suitable free block */
1183
1184 if (!(pArena = HEAP_FindFreeBlock( heapPtr, size, &subheap )))
1185 {
1186 DPRINT("(%08x,%08lx,%08lx): returning NULL\n",
1187 heap, flags, size );
1188 if (!(flags & HEAP_NO_SERIALIZE))
1189 RtlLeaveCriticalSection( &heapPtr->critSection );
1190 if (flags & HEAP_GENERATE_EXCEPTIONS)
1191 RtlRaiseStatus( STATUS_NO_MEMORY );
1192 return NULL;
1193 }
1194
1195 /* Remove the arena from the free list */
1196
1197 pArena->next->prev = pArena->prev;
1198 pArena->prev->next = pArena->next;
1199
1200 /* Build the in-use arena */
1201
1202 pInUse = (ARENA_INUSE *)pArena;
1203 pInUse->size = (pInUse->size & ~ARENA_FLAG_FREE)
1204 + sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
1205 pInUse->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
1206 pInUse->magic = ARENA_INUSE_MAGIC;
1207
1208 /* Shrink the block */
1209
1210 HEAP_ShrinkBlock( subheap, pInUse, size );
1211
1212 if (flags & HEAP_ZERO_MEMORY)
1213 memset( pInUse + 1, 0, pInUse->size & ARENA_SIZE_MASK );
1214 else if (TRACE_ON(heap))
1215 memset( pInUse + 1, ARENA_INUSE_FILLER, pInUse->size & ARENA_SIZE_MASK );
1216
1217 if (!(flags & HEAP_NO_SERIALIZE))
1218 RtlLeaveCriticalSection( &heapPtr->critSection );
1219
1220 DPRINT("(%08x,%08lx,%08lx): returning %p\n",
1221 heap, flags, size, (PVOID)(pInUse + 1) );
1222 return (PVOID)(pInUse + 1);
1223 }
1224
1225
1226 /***********************************************************************
1227 * HeapFree (KERNEL32.338)
1228 * RETURNS
1229 * TRUE: Success
1230 * FALSE: Failure
1231 *
1232 * @implemented
1233 */
1234 BOOLEAN STDCALL RtlFreeHeap(
1235 HANDLE heap, /* [in] Handle of heap */
1236 ULONG flags, /* [in] Heap freeing flags */
1237 PVOID ptr /* [in] Address of memory to free */
1238 )
1239 {
1240 ARENA_INUSE *pInUse;
1241 SUBHEAP *subheap;
1242 HEAP *heapPtr = HEAP_GetPtr( heap );
1243
1244 /* Validate the parameters */
1245
1246 if (!heapPtr)
1247 return FALSE;
1248 if (!ptr) /* Freeing a NULL ptr is doesn't indicate an error in Win2k */
1249 {
1250 DPRINT("(%08x,%08lx,%p): asked to free NULL\n",
1251 heap, flags, ptr );
1252 return TRUE;
1253 }
1254
1255 flags &= HEAP_NO_SERIALIZE;
1256 flags |= heapPtr->flags;
1257 if (!(flags & HEAP_NO_SERIALIZE))
1258 RtlEnterCriticalSection( &heapPtr->critSection );
1259 if (!HEAP_IsRealArena( heap, HEAP_NO_SERIALIZE, ptr, QUIET ))
1260 {
1261 if (!(flags & HEAP_NO_SERIALIZE))
1262 RtlLeaveCriticalSection( &heapPtr->critSection );
1263 DPRINT("(%08x,%08lx,%p): returning FALSE\n",
1264 heap, flags, ptr );
1265 return FALSE;
1266 }
1267
1268 /* Turn the block into a free block */
1269
1270 pInUse = (ARENA_INUSE *)ptr - 1;
1271 subheap = HEAP_FindSubHeap( heapPtr, pInUse );
1272 HEAP_MakeInUseBlockFree( subheap, pInUse, heapPtr->flags );
1273
1274 if (!(flags & HEAP_NO_SERIALIZE))
1275 RtlLeaveCriticalSection( &heapPtr->critSection );
1276
1277 DPRINT("(%08x,%08lx,%p): returning TRUE\n",
1278 heap, flags, ptr );
1279 return TRUE;
1280 }
1281
1282
1283 /***********************************************************************
1284 * RtlReAllocateHeap
1285 * PARAMS
1286 * Heap [in] Handle of heap block
1287 * Flags [in] Heap reallocation flags
1288 * Ptr, [in] Address of memory to reallocate
1289 * Size [in] Number of bytes to reallocate
1290 *
1291 * RETURNS
1292 * Pointer to reallocated memory block
1293 * NULL: Failure
1294 * 0x7d030f60--invalid flags in RtlHeapAllocate
1295 * @implemented
1296 */
1297 PVOID STDCALL RtlReAllocateHeap(
1298 HANDLE Heap,
1299 ULONG Flags,
1300 PVOID Ptr,
1301 ULONG Size
1302 )
1303 {
1304 ARENA_INUSE *pArena;
1305 ULONG oldSize;
1306 HEAP *heapPtr;
1307 SUBHEAP *subheap;
1308
1309 if (!Ptr)
1310 return FALSE;
1311 if (!(heapPtr = HEAP_GetPtr( Heap )))
1312 return FALSE;
1313
1314 /* Validate the parameters */
1315
1316 Flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY |
1317 HEAP_REALLOC_IN_PLACE_ONLY;
1318 Flags |= heapPtr->flags;
1319 Size = (Size + 7) & ~7;
1320 if (Size < HEAP_MIN_BLOCK_SIZE)
1321 Size = HEAP_MIN_BLOCK_SIZE;
1322
1323 if (!(Flags & HEAP_NO_SERIALIZE))
1324 RtlEnterCriticalSection( &heapPtr->critSection );
1325 if (!HEAP_IsRealArena( Heap, HEAP_NO_SERIALIZE, Ptr, QUIET ))
1326 {
1327 if (!(Flags & HEAP_NO_SERIALIZE))
1328 RtlLeaveCriticalSection( &heapPtr->critSection );
1329 DPRINT("(%08x,%08lx,%p,%08lx): returning NULL\n",
1330 Heap, Flags, Ptr, Size );
1331 if (Flags & HEAP_GENERATE_EXCEPTIONS)
1332 RtlRaiseStatus( STATUS_NO_MEMORY );
1333 return NULL;
1334 }
1335
1336 /* Check if we need to grow the block */
1337
1338 pArena = (ARENA_INUSE *)Ptr - 1;
1339 pArena->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
1340
1341 subheap = HEAP_FindSubHeap( heapPtr, pArena );
1342 oldSize = (pArena->size & ARENA_SIZE_MASK);
1343 if (Size > oldSize)
1344 {
1345 char *pNext = (char *)(pArena + 1) + oldSize;
1346 if ((pNext < (char *)subheap + subheap->size) &&
1347 (*(PULONG)pNext & ARENA_FLAG_FREE) &&
1348 (oldSize + (*(PULONG)pNext & ARENA_SIZE_MASK) + sizeof(ARENA_FREE) >= Size))
1349 {
1350 /* The next block is free and large enough */
1351 ARENA_FREE *pFree = (ARENA_FREE *)pNext;
1352 pFree->next->prev = pFree->prev;
1353 pFree->prev->next = pFree->next;
1354 pArena->size += (pFree->size & ARENA_SIZE_MASK) + sizeof(*pFree);
1355 if (!HEAP_Commit( subheap, (char *)pArena + sizeof(ARENA_INUSE)
1356 + Size + HEAP_MIN_BLOCK_SIZE,
1357 heapPtr->flags))
1358 {
1359 if (!(Flags & HEAP_NO_SERIALIZE))
1360 RtlLeaveCriticalSection( &heapPtr->critSection );
1361 if (Flags & HEAP_GENERATE_EXCEPTIONS)
1362 RtlRaiseStatus( STATUS_NO_MEMORY );
1363 return NULL;
1364 }
1365 HEAP_ShrinkBlock( subheap, pArena, Size );
1366 }
1367 else /* Do it the hard way */
1368 {
1369 ARENA_FREE *pNew;
1370 ARENA_INUSE *pInUse;
1371 SUBHEAP *newsubheap;
1372
1373 if ((Flags & HEAP_REALLOC_IN_PLACE_ONLY) ||
1374 !(pNew = HEAP_FindFreeBlock( heapPtr, Size, &newsubheap )))
1375 {
1376 if (!(Flags & HEAP_NO_SERIALIZE))
1377 RtlLeaveCriticalSection( &heapPtr->critSection );
1378 if (Flags & HEAP_GENERATE_EXCEPTIONS)
1379 RtlRaiseStatus( STATUS_NO_MEMORY );
1380 return NULL;
1381 }
1382
1383 /* Build the in-use arena */
1384
1385 pNew->next->prev = pNew->prev;
1386 pNew->prev->next = pNew->next;
1387 pInUse = (ARENA_INUSE *)pNew;
1388 pInUse->size = (pInUse->size & ~ARENA_FLAG_FREE)
1389 + sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
1390 pInUse->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
1391 pInUse->magic = ARENA_INUSE_MAGIC;
1392 HEAP_ShrinkBlock( newsubheap, pInUse, Size );
1393 memcpy( pInUse + 1, pArena + 1, oldSize );
1394
1395 /* Free the previous block */
1396
1397 HEAP_MakeInUseBlockFree( subheap, pArena, Flags );
1398 subheap = newsubheap;
1399 pArena = pInUse;
1400 }
1401 }
1402 else
1403 HEAP_ShrinkBlock( subheap, pArena, Size ); /* Shrink the block */
1404
1405 /* Clear the extra bytes if needed */
1406
1407 if (Size > oldSize)
1408 {
1409 if (Flags & HEAP_ZERO_MEMORY)
1410 memset( (char *)(pArena + 1) + oldSize, 0,
1411 (pArena->size & ARENA_SIZE_MASK) - oldSize );
1412 else if (TRACE_ON(heap))
1413 memset( (char *)(pArena + 1) + oldSize, ARENA_INUSE_FILLER,
1414 (pArena->size & ARENA_SIZE_MASK) - oldSize );
1415 }
1416
1417 /* Return the new arena */
1418
1419 if (!(Flags & HEAP_NO_SERIALIZE))
1420 RtlLeaveCriticalSection( &heapPtr->critSection );
1421
1422 DPRINT("(%08x,%08lx,%p,%08lx): returning %p\n",
1423 Heap, Flags, Ptr, Size, (PVOID)(pArena + 1) );
1424 return (PVOID)(pArena + 1);
1425 }
1426
1427
1428 /***********************************************************************
1429 * RtlCompactHeap
1430 *
1431 * @unimplemented
1432 */
1433 ULONG STDCALL
1434 RtlCompactHeap(HANDLE Heap,
1435 ULONG Flags)
1436 {
1437 UNIMPLEMENTED;
1438 return 0;
1439 }
1440
1441
1442 /***********************************************************************
1443 * RtlLockHeap
1444 * Attempts to acquire the critical section object for a specified heap.
1445 *
1446 * PARAMS
1447 * Heap [in] Handle of heap to lock for exclusive access
1448 *
1449 * RETURNS
1450 * TRUE: Success
1451 * FALSE: Failure
1452 *
1453 * @implemented
1454 */
1455 BOOLEAN STDCALL
1456 RtlLockHeap(IN HANDLE Heap)
1457 {
1458 HEAP *heapPtr = HEAP_GetPtr( Heap );
1459 if (!heapPtr)
1460 return FALSE;
1461 RtlEnterCriticalSection( &heapPtr->critSection );
1462 return TRUE;
1463 }
1464
1465
1466 /***********************************************************************
1467 * RtlUnlockHeap
1468 * Releases ownership of the critical section object.
1469 *
1470 * PARAMS
1471 * Heap [in] Handle to the heap to unlock
1472 *
1473 * RETURNS
1474 * TRUE: Success
1475 * FALSE: Failure
1476 *
1477 * @implemented
1478 */
1479 BOOLEAN STDCALL
1480 RtlUnlockHeap(HANDLE Heap)
1481 {
1482 HEAP *heapPtr = HEAP_GetPtr( Heap );
1483 if (!heapPtr)
1484 return FALSE;
1485 RtlLeaveCriticalSection( &heapPtr->critSection );
1486 return TRUE;
1487 }
1488
1489
1490 /***********************************************************************
1491 * RtlSizeHeap
1492 * PARAMS
1493 * Heap [in] Handle of heap
1494 * Flags [in] Heap size control flags
1495 * Ptr [in] Address of memory to return size for
1496 *
1497 * RETURNS
1498 * Size in bytes of allocated memory
1499 * 0xffffffff: Failure
1500 *
1501 * @implemented
1502 */
1503 ULONG STDCALL
1504 RtlSizeHeap(
1505 HANDLE Heap,
1506 ULONG Flags,
1507 PVOID Ptr
1508 )
1509 {
1510 ULONG ret;
1511 HEAP *heapPtr = HEAP_GetPtr( Heap );
1512
1513 if (!heapPtr)
1514 return 0;
1515 Flags &= HEAP_NO_SERIALIZE;
1516 Flags |= heapPtr->flags;
1517 if (!(Flags & HEAP_NO_SERIALIZE))
1518 RtlEnterCriticalSection( &heapPtr->critSection );
1519 if (!HEAP_IsRealArena( Heap, HEAP_NO_SERIALIZE, Ptr, QUIET ))
1520 {
1521 ret = 0xffffffff;
1522 }
1523 else
1524 {
1525 ARENA_INUSE *pArena = (ARENA_INUSE *)Ptr - 1;
1526 ret = pArena->size & ARENA_SIZE_MASK;
1527 }
1528 if (!(Flags & HEAP_NO_SERIALIZE))
1529 RtlLeaveCriticalSection( &heapPtr->critSection );
1530
1531 DPRINT("(%08x,%08lx,%p): returning %08lx\n",
1532 Heap, Flags, Ptr, ret );
1533 return ret;
1534 }
1535
1536
1537 /***********************************************************************
1538 * RtlValidateHeap
1539 * Validates a specified heap.
1540 *
1541 * PARAMS
1542 * Heap [in] Handle to the heap
1543 * Flags [in] Bit flags that control access during operation
1544 * Block [in] Optional pointer to memory block to validate
1545 *
1546 * NOTES
1547 * Flags is ignored.
1548 *
1549 * RETURNS
1550 * TRUE: Success
1551 * FALSE: Failure
1552 *
1553 * @implemented
1554 */
1555 BOOLEAN STDCALL RtlValidateHeap(
1556 HANDLE Heap,
1557 ULONG Flags,
1558 PVOID Block
1559 )
1560 {
1561 HEAP *heapPtr = HEAP_GetPtr( Heap );
1562 if (!heapPtr)
1563 return FALSE;
1564 return HEAP_IsRealArena( heapPtr, Flags, Block, QUIET );
1565 }
1566
1567
1568 /***********************************************************************
1569 * HeapWalk (KERNEL32.344)
1570 * Enumerates the memory blocks in a specified heap.
1571 * See HEAP_Dump() for info on heap structure.
1572 *
1573 * TODO
1574 * - handling of PROCESS_HEAP_ENTRY_MOVEABLE and
1575 * PROCESS_HEAP_ENTRY_DDESHARE (needs heap.c support)
1576 *
1577 * RETURNS
1578 * TRUE: Success
1579 * FALSE: Failure
1580 */
1581 #if 0
1582 BOOLEAN STDCALL HeapWalk(
1583 HANDLE heap, /* [in] Handle to heap to enumerate */
1584 LPPROCESS_HEAP_ENTRY entry /* [out] Pointer to structure of enumeration info */
1585 )
1586 {
1587 HEAP *heapPtr = HEAP_GetPtr(heap);
1588 SUBHEAP *sub, *currentheap = NULL;
1589 BOOLEAN ret = FALSE;
1590 char *ptr;
1591 int region_index = 0;
1592
1593 if (!heapPtr || !entry)
1594 {
1595 return FALSE;
1596 }
1597
1598 if (!(heapPtr->flags & HEAP_NO_SERIALIZE))
1599 RtlEnterCriticalSection( &heapPtr->critSection );
1600
1601 /* set ptr to the next arena to be examined */
1602
1603 if (!entry->lpData) /* first call (init) ? */
1604 {
1605 DPRINT("begin walking of heap 0x%08x.\n", heap);
1606 /*HEAP_Dump(heapPtr);*/
1607 currentheap = &heapPtr->subheap;
1608 ptr = (char*)currentheap + currentheap->headerSize;
1609 }
1610 else
1611 {
1612 ptr = entry->lpData;
1613 sub = &heapPtr->subheap;
1614 while (sub)
1615 {
1616 if (((char *)ptr >= (char *)sub) &&
1617 ((char *)ptr < (char *)sub + sub->size))
1618 {
1619 currentheap = sub;
1620 break;
1621 }
1622 sub = sub->next;
1623 region_index++;
1624 }
1625 if (currentheap == NULL)
1626 {
1627 DPRINT("no matching subheap found, shouldn't happen !\n");
1628 goto HW_end;
1629 }
1630
1631 ptr += entry->cbData; /* point to next arena */
1632 if (ptr > (char *)currentheap + currentheap->size - 1)
1633 { /* proceed with next subheap */
1634 if (!(currentheap = currentheap->next))
1635 { /* successfully finished */
1636 DPRINT("end reached.\n");
1637 goto HW_end;
1638 }
1639 ptr = (char*)currentheap + currentheap->headerSize;
1640 }
1641 }
1642
1643 entry->wFlags = 0;
1644 if (*(PULONG)ptr & ARENA_FLAG_FREE)
1645 {
1646 ARENA_FREE *pArena = (ARENA_FREE *)ptr;
1647
1648 /*DPRINT("free, magic: %04x\n", pArena->magic);*/
1649
1650 entry->lpData = pArena + 1;
1651 entry->cbData = pArena->size & ARENA_SIZE_MASK;
1652 entry->cbOverhead = sizeof(ARENA_FREE);
1653 entry->wFlags = PROCESS_HEAP_UNCOMMITTED_RANGE;
1654 }
1655 else
1656 {
1657 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
1658
1659 /*DPRINT("busy, magic: %04x\n", pArena->magic);*/
1660
1661 entry->lpData = pArena + 1;
1662 entry->cbData = pArena->size & ARENA_SIZE_MASK;
1663 entry->cbOverhead = sizeof(ARENA_INUSE);
1664 entry->wFlags = PROCESS_HEAP_ENTRY_BUSY;
1665 /* FIXME: can't handle PROCESS_HEAP_ENTRY_MOVEABLE
1666 and PROCESS_HEAP_ENTRY_DDESHARE yet */
1667 }
1668
1669 entry->iRegionIndex = region_index;
1670
1671 /* first element of heap ? */
1672 if (ptr == (char *)(currentheap + currentheap->headerSize))
1673 {
1674 entry->wFlags |= PROCESS_HEAP_REGION;
1675 entry->Foo.Region.dwCommittedSize = currentheap->commitSize;
1676 entry->Foo.Region.dwUnCommittedSize =
1677 currentheap->size - currentheap->commitSize;
1678 entry->Foo.Region.lpFirstBlock = /* first valid block */
1679 currentheap + currentheap->headerSize;
1680 entry->Foo.Region.lpLastBlock = /* first invalid block */
1681 currentheap + currentheap->size;
1682 }
1683 ret = TRUE;
1684
1685 HW_end:
1686 if (!(heapPtr->flags & HEAP_NO_SERIALIZE))
1687 RtlLeaveCriticalSection( &heapPtr->critSection );
1688
1689 return ret;
1690 }
1691 #endif
1692
1693
1694 VOID
1695 RtlInitializeHeapManager(VOID)
1696 {
1697 PPEB Peb;
1698
1699 Peb = NtCurrentPeb();
1700
1701 Peb->NumberOfHeaps = 0;
1702 Peb->MaximumNumberOfHeaps = -1; /* no limit */
1703 Peb->ProcessHeaps = NULL;
1704
1705 RtlInitializeCriticalSection(&RtlpProcessHeapsListLock);
1706 }
1707
1708
1709 /*
1710 * @implemented
1711 */
1712 NTSTATUS STDCALL
1713 RtlEnumProcessHeaps(PHEAP_ENUMERATION_ROUTINE HeapEnumerationRoutine,
1714 PVOID lParam)
1715 {
1716 NTSTATUS Status = STATUS_SUCCESS;
1717 HEAP** pptr;
1718
1719 RtlEnterCriticalSection(&RtlpProcessHeapsListLock);
1720
1721 for (pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps; *pptr; pptr = &(*pptr)->next)
1722 {
1723 Status = HeapEnumerationRoutine(*pptr,lParam);
1724 if (!NT_SUCCESS(Status))
1725 break;
1726 }
1727
1728 RtlLeaveCriticalSection(&RtlpProcessHeapsListLock);
1729
1730 return Status;
1731 }
1732
1733
1734 /*
1735 * @implemented
1736 */
1737 ULONG STDCALL
1738 RtlGetProcessHeaps(ULONG HeapCount,
1739 HANDLE *HeapArray)
1740 {
1741 ULONG Result = 0;
1742 HEAP ** pptr;
1743
1744 RtlEnterCriticalSection(&RtlpProcessHeapsListLock);
1745
1746 Result = NtCurrentPeb()->NumberOfHeaps;
1747
1748 if (NtCurrentPeb()->NumberOfHeaps <= HeapCount)
1749 {
1750 int i = 0;
1751 for (pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps; *pptr; pptr = &(*pptr)->next)
1752 {
1753 HeapArray[i++] = *pptr;
1754 }
1755 }
1756
1757 RtlLeaveCriticalSection (&RtlpProcessHeapsListLock);
1758
1759 return Result;
1760 }
1761
1762
1763 /*
1764 * @implemented
1765 */
1766 BOOLEAN STDCALL
1767 RtlValidateProcessHeaps(VOID)
1768 {
1769 BOOLEAN Result = TRUE;
1770 HEAP ** pptr;
1771
1772 RtlEnterCriticalSection(&RtlpProcessHeapsListLock);
1773
1774 for (pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps; *pptr; pptr = &(*pptr)->next)
1775 {
1776 if (!RtlValidateHeap(*pptr, 0, NULL))
1777 {
1778 Result = FALSE;
1779 break;
1780 }
1781 }
1782
1783 RtlLeaveCriticalSection (&RtlpProcessHeapsListLock);
1784
1785 return Result;
1786 }
1787
1788
1789 /*
1790 * @unimplemented
1791 */
1792 BOOLEAN STDCALL
1793 RtlZeroHeap(
1794 IN PVOID HeapHandle,
1795 IN ULONG Flags
1796 )
1797 {
1798 UNIMPLEMENTED;
1799 return FALSE;
1800 }
1801
1802 /* EOF */