-link heaps together instead of using stupid table past peb
[reactos.git] / reactos / lib / rtl / heap.c
1 /*
2 * Win32 heap functions
3 *
4 * Copyright 1996 Alexandre Julliard
5 * Copyright 1998 Ulrich Weigand
6 */
7
8
9 /* Note: the heap data structures are based on what Pietrek describes in his
10 * book 'Windows 95 System Programming Secrets'. The layout is not exactly
11 * the same, but could be easily adapted if it turns out some programs
12 * require it.
13 */
14
15 #include <string.h>
16 #include <ddk/ntddk.h>
17 #include <ntdll/rtl.h>
18 #include <ntos/heap.h>
19 #include <ntos/minmax.h>
20
21 #define NDEBUG
22 #include <debug.h>
23
24 #define WARN_ON(x) (1)
25
26 #ifdef NDEBUG
27 #define TRACE_ON(x) (0)
28 #else
29 #define TRACE_ON(x) (1)
30 #endif
31
32
33 static CRITICAL_SECTION RtlpProcessHeapsListLock;
34
35
36 typedef struct tagARENA_INUSE
37 {
38 ULONG size; /* Block size; must be the first field */
39 USHORT threadId; /* Allocating thread id */
40 USHORT magic; /* Magic number */
41 }
42 ARENA_INUSE;
43
44 typedef struct tagARENA_FREE
45 {
46 ULONG size; /* Block size; must be the first field */
47 USHORT threadId; /* Freeing thread id */
48 USHORT magic; /* Magic number */
49 struct tagARENA_FREE *next; /* Next free arena */
50 struct tagARENA_FREE *prev; /* Prev free arena */
51 }
52 ARENA_FREE;
53
54 #define ARENA_FLAG_FREE 0x00000001 /* flags OR'ed with arena size */
55 #define ARENA_FLAG_PREV_FREE 0x00000002
56 #define ARENA_SIZE_MASK 0xfffffffc
57 #define ARENA_INUSE_MAGIC 0x4842 /* Value for arena 'magic' field */
58 #define ARENA_FREE_MAGIC 0x4846 /* Value for arena 'magic' field */
59
60 #define ARENA_INUSE_FILLER 0x55
61 #define ARENA_FREE_FILLER 0xaa
62
63 #define QUIET 1 /* Suppress messages */
64 #define NOISY 0 /* Report all errors */
65
66 #define HEAP_NB_FREE_LISTS 4 /* Number of free lists */
67
68 /* Max size of the blocks on the free lists */
69 static const ULONG HEAP_freeListSizes[HEAP_NB_FREE_LISTS] =
70 {
71 0x20, 0x80, 0x200, 0xffffffff
72 };
73
74 typedef struct
75 {
76 ULONG size;
77 ARENA_FREE arena;
78 }
79 FREE_LIST_ENTRY;
80
81 struct tagHEAP;
82
83 typedef struct tagSUBHEAP
84 {
85 ULONG size; /* Size of the whole sub-heap */
86 ULONG commitSize; /* Committed size of the sub-heap */
87 ULONG headerSize; /* Size of the heap header */
88 struct tagSUBHEAP *next; /* Next sub-heap */
89 struct tagHEAP *heap; /* Main heap structure */
90 ULONG magic; /* Magic number */
91 }
92 SUBHEAP, *PSUBHEAP;
93
94 #define SUBHEAP_MAGIC ((ULONG)('S' | ('U'<<8) | ('B'<<16) | ('H'<<24)))
95
96 typedef struct tagHEAP
97 {
98 SUBHEAP subheap; /* First sub-heap */
99 struct tagHEAP *next; /* Next heap for this process */
100 FREE_LIST_ENTRY freeList[HEAP_NB_FREE_LISTS]; /* Free lists */
101 CRITICAL_SECTION critSection; /* Critical section for serialization */
102 ULONG flags; /* Heap flags */
103 ULONG magic; /* Magic number */
104 UCHAR filler[4]; /* Make multiple of 8 bytes */
105 }
106 HEAP, *PHEAP;
107
108 #define HEAP_MAGIC ((ULONG)('H' | ('E'<<8) | ('A'<<16) | ('P'<<24)))
109
110 #define HEAP_DEF_SIZE 0x110000 /* Default heap size = 1Mb + 64Kb */
111 #define HEAP_MIN_BLOCK_SIZE (sizeof(ARENA_FREE) + 8) /* Min. heap block size */
112 #define COMMIT_MASK 0xffff /* bitmask for commit/decommit granularity */
113
114
115 static BOOLEAN HEAP_IsRealArena( HANDLE heap, ULONG flags, PVOID block, BOOLEAN quiet );
116
117
118 /***********************************************************************
119 * HEAP_Dump
120 */
121 VOID
122 HEAP_Dump(PHEAP heap)
123 {
124 int i;
125 SUBHEAP *subheap;
126 char *ptr;
127
128 DPRINT( "Heap: %p\n", heap );
129 DPRINT( "Next: %p Sub-heaps: %p",
130 heap->next, &heap->subheap );
131 subheap = &heap->subheap;
132 while (subheap->next)
133 {
134 DPRINT( " -> %p", subheap->next );
135 subheap = subheap->next;
136 }
137
138 DPRINT( "\nFree lists:\n Block Stat Size Id\n" );
139 for (i = 0; i < HEAP_NB_FREE_LISTS; i++)
140 DPRINT( "%p free %08lx %04x prev=%p next=%p\n",
141 &heap->freeList[i].arena,
142 heap->freeList[i].arena.size,
143 heap->freeList[i].arena.threadId,
144 heap->freeList[i].arena.prev,
145 heap->freeList[i].arena.next );
146
147 subheap = &heap->subheap;
148 while (subheap)
149 {
150 ULONG freeSize = 0, usedSize = 0, arenaSize = subheap->headerSize;
151 DPRINT( "\n\nSub-heap %p: size=%08lx committed=%08lx\n",
152 subheap, subheap->size, subheap->commitSize );
153
154 DPRINT( "\n Block Stat Size Id\n" );
155 ptr = (char*)subheap + subheap->headerSize;
156 while (ptr < (char *)subheap + subheap->size)
157 {
158 if (*(PULONG)ptr & ARENA_FLAG_FREE)
159 {
160 ARENA_FREE *pArena = (ARENA_FREE *)ptr;
161 DPRINT( "%p free %08lx %04x prev=%p next=%p\n",
162 pArena, pArena->size & ARENA_SIZE_MASK,
163 pArena->threadId, pArena->prev,
164 pArena->next);
165 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
166 arenaSize += sizeof(ARENA_FREE);
167 freeSize += pArena->size & ARENA_SIZE_MASK;
168 }
169 else if (*(PULONG)ptr & ARENA_FLAG_PREV_FREE)
170 {
171 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
172 DPRINT( "%p Used %08lx %04x back=%08lx\n",
173 pArena, pArena->size & ARENA_SIZE_MASK,
174 pArena->threadId, *((PULONG)pArena - 1));
175 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
176 arenaSize += sizeof(ARENA_INUSE);
177 usedSize += pArena->size & ARENA_SIZE_MASK;
178 }
179 else
180 {
181 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
182 DPRINT( "%p used %08lx %04x\n",
183 pArena, pArena->size & ARENA_SIZE_MASK,
184 pArena->threadId);
185 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
186 arenaSize += sizeof(ARENA_INUSE);
187 usedSize += pArena->size & ARENA_SIZE_MASK;
188 }
189 }
190 DPRINT( "\nTotal: Size=%08lx Committed=%08lx Free=%08lx Used=%08lx Arenas=%08lx (%ld%%)\n\n",
191 subheap->size, subheap->commitSize, freeSize, usedSize,
192 arenaSize, (arenaSize * 100) / subheap->size );
193 subheap = subheap->next;
194 }
195 }
196
197
198 /***********************************************************************
199 * HEAP_GetPtr
200 * RETURNS
201 * Pointer to the heap
202 * NULL: Failure
203 */
204 static PHEAP
205 HEAP_GetPtr(HANDLE heap) /* [in] Handle to the heap */
206 {
207 HEAP *heapPtr = (HEAP *)heap;
208 if (!heapPtr || (heapPtr->magic != HEAP_MAGIC))
209 {
210 DPRINT("Invalid heap %08x!\n", heap );
211 return NULL;
212 }
213 if (TRACE_ON(heap) && !HEAP_IsRealArena( heap, 0, NULL, NOISY ))
214 {
215 HEAP_Dump( heapPtr );
216 ASSERT( FALSE );
217 return NULL;
218 }
219 return heapPtr;
220 }
221
222
223 /***********************************************************************
224 * HEAP_InsertFreeBlock
225 *
226 * Insert a free block into the free list.
227 */
228 static VOID
229 HEAP_InsertFreeBlock(PHEAP heap,
230 ARENA_FREE *pArena,
231 BOOLEAN last)
232 {
233 FREE_LIST_ENTRY *pEntry = heap->freeList;
234 while (pEntry->size < pArena->size)
235 pEntry++;
236 if (last)
237 {
238 /* insert at end of free list, i.e. before next free list entry */
239 pEntry++;
240 if (pEntry == &heap->freeList[HEAP_NB_FREE_LISTS])
241 {
242 pEntry = heap->freeList;
243 }
244 pArena->prev = pEntry->arena.prev;
245 pArena->prev->next = pArena;
246 pArena->next = &pEntry->arena;
247 pEntry->arena.prev = pArena;
248 }
249 else
250 {
251 /* insert at head of free list */
252 pArena->next = pEntry->arena.next;
253 pArena->next->prev = pArena;
254 pArena->prev = &pEntry->arena;
255 pEntry->arena.next = pArena;
256 }
257 pArena->size |= ARENA_FLAG_FREE;
258 }
259
260
261 /***********************************************************************
262 * HEAP_FindSubHeap
263 * Find the sub-heap containing a given address.
264 *
265 * RETURNS
266 * Pointer: Success
267 * NULL: Failure
268 */
269 static PSUBHEAP
270 HEAP_FindSubHeap(HEAP *heap, /* [in] Heap pointer */
271 PVOID ptr) /* [in] Address */
272 {
273 PSUBHEAP sub = &heap->subheap;
274 while (sub)
275 {
276 if (((char *)ptr >= (char *)sub) &&
277 ((char *)ptr < (char *)sub + sub->size))
278 return sub;
279 sub = sub->next;
280 }
281 return NULL;
282 }
283
284
285 /***********************************************************************
286 * HEAP_Commit
287 *
288 * Make sure the heap storage is committed up to (not including) ptr.
289 */
290 static inline BOOLEAN
291 HEAP_Commit(SUBHEAP *subheap,
292 PVOID ptr,
293 ULONG flags)
294 {
295 ULONG size = (ULONG)((char *)ptr - (char *)subheap);
296 NTSTATUS Status;
297 PVOID address;
298 ULONG commitsize;
299
300 size = (size + COMMIT_MASK) & ~COMMIT_MASK;
301 if (size > subheap->size)
302 size = subheap->size;
303 if (size <= subheap->commitSize)
304 return TRUE;
305
306 address = (PVOID)((char *)subheap + subheap->commitSize);
307 commitsize = size - subheap->commitSize;
308
309 if (!(flags & HEAP_NO_VALLOC))
310 {
311 Status = NtAllocateVirtualMemory(NtCurrentProcess(),
312 &address,
313 0,
314 &commitsize,
315 MEM_COMMIT,
316 PAGE_EXECUTE_READWRITE);
317 if (!NT_SUCCESS(Status))
318 {
319 DPRINT( "Could not commit %08lx bytes at %p for heap %p\n",
320 size - subheap->commitSize,
321 ((char *)subheap + subheap->commitSize),
322 subheap->heap );
323 return FALSE;
324 }
325 }
326 subheap->commitSize += commitsize;
327 return TRUE;
328 }
329
330
331 /***********************************************************************
332 * HEAP_Decommit
333 *
334 * If possible, decommit the heap storage from (including) 'ptr'.
335 */
336 static inline BOOLEAN HEAP_Decommit( SUBHEAP *subheap, PVOID ptr, ULONG flags )
337 {
338 ULONG size = (ULONG)((char *)ptr - (char *)subheap);
339 PVOID address;
340 ULONG decommitsize;
341 NTSTATUS Status;
342 /* round to next block and add one full block */
343 size = ((size + COMMIT_MASK) & ~COMMIT_MASK) + COMMIT_MASK + 1;
344 if (size >= subheap->commitSize)
345 return TRUE;
346
347 address = (PVOID)((char *)subheap + size);
348 decommitsize = subheap->commitSize - size;
349
350 if (!(flags & HEAP_NO_VALLOC))
351 {
352 Status = ZwFreeVirtualMemory(NtCurrentProcess(),
353 &address,
354 &decommitsize,
355 MEM_DECOMMIT);
356 if (!NT_SUCCESS(Status))
357 ;
358 {
359 DPRINT( "Could not decommit %08lx bytes at %p for heap %p\n",
360 subheap->commitSize - size,
361 ((char *)subheap + size),
362 subheap->heap );
363 return FALSE;
364 }
365 }
366 subheap->commitSize -= decommitsize;
367 return TRUE;
368 }
369
370
371 /***********************************************************************
372 * HEAP_CreateFreeBlock
373 *
374 * Create a free block at a specified address. 'size' is the size of the
375 * whole block, including the new arena.
376 */
377 static VOID HEAP_CreateFreeBlock( SUBHEAP *subheap, PVOID ptr, ULONG size )
378 {
379 ARENA_FREE *pFree;
380 BOOLEAN last;
381
382 /* Create a free arena */
383
384 pFree = (ARENA_FREE *)ptr;
385 pFree->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
386 pFree->magic = ARENA_FREE_MAGIC;
387
388 /* If debugging, erase the freed block content */
389
390 if (TRACE_ON(heap))
391 {
392 char *pEnd = (char *)ptr + size;
393 if (pEnd > (char *)subheap + subheap->commitSize)
394 pEnd = (char *)subheap + subheap->commitSize;
395 if (pEnd > (char *)(pFree + 1))
396 memset( pFree + 1, ARENA_FREE_FILLER, pEnd - (char *)(pFree + 1) );
397 }
398
399 /* Check if next block is free also */
400
401 if (((char *)ptr + size < (char *)subheap + subheap->size) &&
402 (*(PULONG)((char *)ptr + size) & ARENA_FLAG_FREE))
403 {
404 /* Remove the next arena from the free list */
405 ARENA_FREE *pNext = (ARENA_FREE *)((char *)ptr + size);
406 pNext->next->prev = pNext->prev;
407 pNext->prev->next = pNext->next;
408 size += (pNext->size & ARENA_SIZE_MASK) + sizeof(*pNext);
409 if (TRACE_ON(heap))
410 memset( pNext, ARENA_FREE_FILLER, sizeof(ARENA_FREE) );
411 }
412
413 /* Set the next block PREV_FREE flag and pointer */
414
415 last = ((char *)ptr + size >= (char *)subheap + subheap->size);
416 if (!last)
417 {
418 PULONG pNext = (PULONG)((char *)ptr + size);
419 *pNext |= ARENA_FLAG_PREV_FREE;
420 *(ARENA_FREE **)(pNext - 1) = pFree;
421 }
422
423 /* Last, insert the new block into the free list */
424
425 pFree->size = size - sizeof(*pFree);
426 HEAP_InsertFreeBlock( subheap->heap, pFree, last );
427 }
428
429
430 /***********************************************************************
431 * HEAP_MakeInUseBlockFree
432 *
433 * Turn an in-use block into a free block. Can also decommit the end of
434 * the heap, and possibly even free the sub-heap altogether.
435 */
436 static VOID HEAP_MakeInUseBlockFree( SUBHEAP *subheap, ARENA_INUSE *pArena,
437 ULONG flags)
438 {
439 ARENA_FREE *pFree;
440 ULONG size = (pArena->size & ARENA_SIZE_MASK) + sizeof(*pArena);
441
442 /* Check if we can merge with previous block */
443
444 if (pArena->size & ARENA_FLAG_PREV_FREE)
445 {
446 pFree = *((ARENA_FREE **)pArena - 1);
447 size += (pFree->size & ARENA_SIZE_MASK) + sizeof(ARENA_FREE);
448 /* Remove it from the free list */
449 pFree->next->prev = pFree->prev;
450 pFree->prev->next = pFree->next;
451 }
452 else
453 pFree = (ARENA_FREE *)pArena;
454
455 /* Create a free block */
456
457 HEAP_CreateFreeBlock( subheap, pFree, size );
458 size = (pFree->size & ARENA_SIZE_MASK) + sizeof(ARENA_FREE);
459 if ((char *)pFree + size < (char *)subheap + subheap->size)
460 return; /* Not the last block, so nothing more to do */
461
462 /* Free the whole sub-heap if it's empty and not the original one */
463
464 if (((char *)pFree == (char *)subheap + subheap->headerSize) &&
465 (subheap != &subheap->heap->subheap))
466 {
467 SUBHEAP *pPrev = &subheap->heap->subheap;
468 /* Remove the free block from the list */
469 pFree->next->prev = pFree->prev;
470 pFree->prev->next = pFree->next;
471 /* Remove the subheap from the list */
472 while (pPrev && (pPrev->next != subheap))
473 pPrev = pPrev->next;
474 if (pPrev)
475 pPrev->next = subheap->next;
476 /* Free the memory */
477 subheap->magic = 0;
478 if (!(flags & HEAP_NO_VALLOC))
479 {
480 ULONG dummySize = 0;
481 ZwFreeVirtualMemory(NtCurrentProcess(),
482 (PVOID*)&subheap,
483 &dummySize,
484 MEM_RELEASE);
485 }
486 return;
487 }
488
489 /* Decommit the end of the heap */
490 }
491
492
493 /***********************************************************************
494 * HEAP_ShrinkBlock
495 *
496 * Shrink an in-use block.
497 */
498 static void HEAP_ShrinkBlock(SUBHEAP *subheap, ARENA_INUSE *pArena, ULONG size)
499 {
500 if ((pArena->size & ARENA_SIZE_MASK) >= size + HEAP_MIN_BLOCK_SIZE)
501 {
502 HEAP_CreateFreeBlock( subheap, (char *)(pArena + 1) + size,
503 (pArena->size & ARENA_SIZE_MASK) - size );
504 /* assign size plus previous arena flags */
505 pArena->size = size | (pArena->size & ~ARENA_SIZE_MASK);
506 }
507 else
508 {
509 /* Turn off PREV_FREE flag in next block */
510 char *pNext = (char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK);
511 if (pNext < (char *)subheap + subheap->size)
512 *(PULONG)pNext &= ~ARENA_FLAG_PREV_FREE;
513 }
514 }
515
516 /***********************************************************************
517 * HEAP_InitSubHeap
518 */
519 static BOOLEAN HEAP_InitSubHeap( HEAP *heap, PVOID address, ULONG flags,
520 ULONG commitSize, ULONG totalSize )
521 {
522 SUBHEAP *subheap = (SUBHEAP *)address;
523 FREE_LIST_ENTRY *pEntry;
524 int i;
525 NTSTATUS Status;
526
527 /* Commit memory */
528 if (!(flags & HEAP_NO_VALLOC))
529 {
530 Status = ZwAllocateVirtualMemory(NtCurrentProcess(),
531 &address,
532 0,
533 (PULONG)&commitSize,
534 MEM_COMMIT,
535 PAGE_EXECUTE_READWRITE);
536 if (!NT_SUCCESS(Status))
537 {
538 DPRINT("Could not commit %08lx bytes for sub-heap %p\n",
539 commitSize, address);
540 return FALSE;
541 }
542 }
543
544 /* Fill the sub-heap structure */
545
546 subheap = (PSUBHEAP)address;
547 subheap->heap = heap;
548 subheap->size = totalSize;
549 subheap->commitSize = commitSize;
550 subheap->magic = SUBHEAP_MAGIC;
551
552 if ( subheap != (SUBHEAP *)heap )
553 {
554 /* If this is a secondary subheap, insert it into list */
555
556 subheap->headerSize = sizeof(SUBHEAP);
557 subheap->next = heap->subheap.next;
558 heap->subheap.next = subheap;
559 }
560 else
561 {
562 /* If this is a primary subheap, initialize main heap */
563
564 subheap->headerSize = sizeof(HEAP);
565 subheap->next = NULL;
566 heap->next = NULL;
567 heap->flags = flags;
568 heap->magic = HEAP_MAGIC;
569
570 /* Build the free lists */
571
572 for (i = 0, pEntry = heap->freeList; i < HEAP_NB_FREE_LISTS; i++, pEntry++)
573 {
574 pEntry->size = HEAP_freeListSizes[i];
575 pEntry->arena.size = 0 | ARENA_FLAG_FREE;
576 pEntry->arena.next = i < HEAP_NB_FREE_LISTS-1 ?
577 &heap->freeList[i+1].arena : &heap->freeList[0].arena;
578 pEntry->arena.prev = i ? &heap->freeList[i-1].arena :
579 &heap->freeList[HEAP_NB_FREE_LISTS-1].arena;
580 pEntry->arena.threadId = 0;
581 pEntry->arena.magic = ARENA_FREE_MAGIC;
582 }
583
584 /* Initialize critical section */
585
586 RtlInitializeCriticalSection( &heap->critSection );
587 }
588
589 /* Create the first free block */
590
591 HEAP_CreateFreeBlock( subheap, (PUCHAR)subheap + subheap->headerSize,
592 subheap->size - subheap->headerSize );
593
594 return TRUE;
595 }
596
597 /***********************************************************************
598 * HEAP_CreateSubHeap
599 *
600 * Create a sub-heap of the given size.
601 * If heap == NULL, creates a main heap.
602 */
603 static PSUBHEAP
604 HEAP_CreateSubHeap(PVOID BaseAddress,
605 HEAP *heap, ULONG flags,
606 ULONG commitSize, ULONG totalSize )
607 {
608 PVOID address;
609 NTSTATUS Status;
610
611 /* Round-up sizes on a 64K boundary */
612
613 totalSize = (totalSize + 0xffff) & 0xffff0000;
614 commitSize = (commitSize + 0xffff) & 0xffff0000;
615 if (!commitSize)
616 commitSize = 0x10000;
617 if (totalSize < commitSize)
618 totalSize = commitSize;
619
620 /* Allocate the memory block */
621 address = BaseAddress;
622 if (!(flags & HEAP_NO_VALLOC))
623 {
624 Status = ZwAllocateVirtualMemory(NtCurrentProcess(),
625 &address,
626 0,
627 (PULONG)&totalSize,
628 MEM_RESERVE | MEM_COMMIT,
629 PAGE_EXECUTE_READWRITE);
630 if (!NT_SUCCESS(Status))
631 {
632 DPRINT("Could not VirtualAlloc %08lx bytes\n",
633 totalSize );
634 return NULL;
635 }
636 }
637
638 /* Initialize subheap */
639
640 if (!HEAP_InitSubHeap( heap? heap : (HEAP *)address,
641 address, flags, commitSize, totalSize ))
642 {
643 if (address && !(flags & HEAP_NO_VALLOC))
644 {
645 ULONG dummySize = 0;
646 ZwFreeVirtualMemory(NtCurrentProcess(),
647 &address,
648 &dummySize,
649 MEM_RELEASE);
650 }
651 return NULL;
652 }
653
654 return (SUBHEAP *)address;
655 }
656
657
658 /***********************************************************************
659 * HEAP_FindFreeBlock
660 *
661 * Find a free block at least as large as the requested size, and make sure
662 * the requested size is committed.
663 */
664 static ARENA_FREE *HEAP_FindFreeBlock( HEAP *heap, ULONG size,
665 SUBHEAP **ppSubHeap )
666 {
667 SUBHEAP *subheap;
668 ARENA_FREE *pArena;
669 FREE_LIST_ENTRY *pEntry = heap->freeList;
670
671 /* Find a suitable free list, and in it find a block large enough */
672
673 while (pEntry->size < size)
674 pEntry++;
675 pArena = pEntry->arena.next;
676 while (pArena != &heap->freeList[0].arena)
677 {
678 ULONG arena_size = (pArena->size & ARENA_SIZE_MASK) +
679 sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
680 if (arena_size >= size)
681 {
682 subheap = HEAP_FindSubHeap( heap, pArena );
683 if (!HEAP_Commit( subheap, (char *)pArena + sizeof(ARENA_INUSE)
684 + size + HEAP_MIN_BLOCK_SIZE,
685 heap->flags))
686 return NULL;
687 *ppSubHeap = subheap;
688 return pArena;
689 }
690
691 pArena = pArena->next;
692 }
693
694 /* If no block was found, attempt to grow the heap */
695
696 if (!(heap->flags & HEAP_GROWABLE))
697 {
698 DPRINT("Not enough space in heap %p for %08lx bytes\n",
699 heap, size );
700 return NULL;
701 }
702 /* make sure that we have a big enough size *committed* to fit another
703 * last free arena in !
704 * So just one heap struct, one first free arena which will eventually
705 * get inuse, and HEAP_MIN_BLOCK_SIZE for the second free arena that
706 * might get assigned all remaining free space in HEAP_ShrinkBlock() */
707 size += sizeof(SUBHEAP) + sizeof(ARENA_FREE) + HEAP_MIN_BLOCK_SIZE;
708 if (!(subheap = HEAP_CreateSubHeap( NULL, heap, heap->flags, size,
709 max( HEAP_DEF_SIZE, size ) )))
710 return NULL;
711
712 DPRINT("created new sub-heap %p of %08lx bytes for heap %p\n",
713 subheap, size, heap );
714
715 *ppSubHeap = subheap;
716 return (ARENA_FREE *)(subheap + 1);
717 }
718
719
720 /***********************************************************************
721 * HEAP_IsValidArenaPtr
722 *
723 * Check that the pointer is inside the range possible for arenas.
724 */
725 static BOOLEAN HEAP_IsValidArenaPtr( HEAP *heap, PVOID ptr )
726 {
727 int i;
728 SUBHEAP *subheap = HEAP_FindSubHeap( heap, ptr );
729 if (!subheap)
730 return FALSE;
731 if ((char *)ptr >= (char *)subheap + subheap->headerSize)
732 return TRUE;
733 if (subheap != &heap->subheap)
734 return FALSE;
735 for (i = 0; i < HEAP_NB_FREE_LISTS; i++)
736 if (ptr == (PVOID)&heap->freeList[i].arena)
737 return TRUE;
738 return FALSE;
739 }
740
741
742 /***********************************************************************
743 * HEAP_ValidateFreeArena
744 */
745 static BOOLEAN HEAP_ValidateFreeArena( SUBHEAP *subheap, ARENA_FREE *pArena )
746 {
747 char *heapEnd = (char *)subheap + subheap->size;
748
749 /* Check magic number */
750 if (pArena->magic != ARENA_FREE_MAGIC)
751 {
752 DPRINT("Heap %p: invalid free arena magic for %p\n",
753 subheap->heap, pArena);
754 return FALSE;
755 }
756 /* Check size flags */
757 if (!(pArena->size & ARENA_FLAG_FREE) ||
758 (pArena->size & ARENA_FLAG_PREV_FREE))
759 {
760 DPRINT("Heap %p: bad flags %lx for free arena %p\n",
761 subheap->heap, pArena->size & ~ARENA_SIZE_MASK, pArena);
762 }
763 /* Check arena size */
764 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) > heapEnd)
765 {
766 DPRINT("Heap %p: bad size %08lx for free arena %p\n",
767 subheap->heap, (ULONG)pArena->size & ARENA_SIZE_MASK, pArena );
768 return FALSE;
769 }
770 /* Check that next pointer is valid */
771 if (!HEAP_IsValidArenaPtr( subheap->heap, pArena->next ))
772 {
773 DPRINT("Heap %p: bad next ptr %p for arena %p\n",
774 subheap->heap, pArena->next, pArena);
775 return FALSE;
776 }
777 /* Check that next arena is free */
778 if (!(pArena->next->size & ARENA_FLAG_FREE) ||
779 (pArena->next->magic != ARENA_FREE_MAGIC))
780 {
781 DPRINT("Heap %p: next arena %p invalid for %p\n",
782 subheap->heap, pArena->next, pArena);
783 return FALSE;
784 }
785 /* Check that prev pointer is valid */
786 if (!HEAP_IsValidArenaPtr( subheap->heap, pArena->prev ))
787 {
788 DPRINT("Heap %p: bad prev ptr %p for arena %p\n",
789 subheap->heap, pArena->prev, pArena);
790 return FALSE;
791 }
792 /* Check that prev arena is free */
793 if (!(pArena->prev->size & ARENA_FLAG_FREE) ||
794 (pArena->prev->magic != ARENA_FREE_MAGIC))
795 {
796 DPRINT("Heap %p: prev arena %p invalid for %p\n",
797 subheap->heap, pArena->prev, pArena);
798 return FALSE;
799 }
800 /* Check that next block has PREV_FREE flag */
801 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) < heapEnd)
802 {
803 if (!(*(PULONG)((char *)(pArena + 1) +
804 (pArena->size & ARENA_SIZE_MASK)) & ARENA_FLAG_PREV_FREE))
805 {
806 DPRINT("Heap %p: free arena %p next block has no PREV_FREE flag\n",
807 subheap->heap, pArena);
808 return FALSE;
809 }
810 /* Check next block back pointer */
811 if (*((ARENA_FREE **)((char *)(pArena + 1) +
812 (pArena->size & ARENA_SIZE_MASK)) - 1) != pArena)
813 {
814 DPRINT("Heap %p: arena %p has wrong back ptr %p\n",
815 subheap->heap, pArena,
816 *((PULONG)((char *)(pArena+1)+ (pArena->size & ARENA_SIZE_MASK)) - 1));
817 return FALSE;
818 }
819 }
820 return TRUE;
821 }
822
823
824 /***********************************************************************
825 * HEAP_ValidateInUseArena
826 */
827 static BOOLEAN HEAP_ValidateInUseArena( SUBHEAP *subheap, ARENA_INUSE *pArena, BOOLEAN quiet )
828 {
829 char *heapEnd = (char *)subheap + subheap->size;
830
831 /* Check magic number */
832 if (pArena->magic != ARENA_INUSE_MAGIC)
833 {
834 if (quiet == NOISY)
835 {
836 DPRINT("Heap %p: invalid in-use arena magic for %p\n",
837 subheap->heap, pArena);
838 if (TRACE_ON(heap))
839 HEAP_Dump( subheap->heap );
840 }
841 else if (WARN_ON(heap))
842 {
843 DPRINT("Heap %p: invalid in-use arena magic for %p\n",
844 subheap->heap, pArena);
845 if (TRACE_ON(heap))
846 HEAP_Dump( subheap->heap );
847 }
848 return FALSE;
849 }
850 /* Check size flags */
851 if (pArena->size & ARENA_FLAG_FREE)
852 {
853 DPRINT("Heap %p: bad flags %lx for in-use arena %p\n",
854 subheap->heap, pArena->size & ~ARENA_SIZE_MASK, pArena);
855 return FALSE;
856 }
857 /* Check arena size */
858 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) > heapEnd)
859 {
860 DPRINT("Heap %p: bad size %08lx for in-use arena %p\n",
861 subheap->heap, (ULONG)pArena->size & ARENA_SIZE_MASK, pArena);
862 return FALSE;
863 }
864 /* Check next arena PREV_FREE flag */
865 if (((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) < heapEnd) &&
866 (*(PULONG)((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK)) & ARENA_FLAG_PREV_FREE))
867 {
868 DPRINT("Heap %p: in-use arena %p next block has PREV_FREE flag\n",
869 subheap->heap, pArena);
870 return FALSE;
871 }
872 /* Check prev free arena */
873 if (pArena->size & ARENA_FLAG_PREV_FREE)
874 {
875 ARENA_FREE *pPrev = *((ARENA_FREE **)pArena - 1);
876 /* Check prev pointer */
877 if (!HEAP_IsValidArenaPtr( subheap->heap, pPrev ))
878 {
879 DPRINT("Heap %p: bad back ptr %p for arena %p\n",
880 subheap->heap, pPrev, pArena );
881 return FALSE;
882 }
883 /* Check that prev arena is free */
884 if (!(pPrev->size & ARENA_FLAG_FREE) ||
885 (pPrev->magic != ARENA_FREE_MAGIC))
886 {
887 DPRINT("Heap %p: prev arena %p invalid for in-use %p\n",
888 subheap->heap, pPrev, pArena);
889 return FALSE;
890 }
891 /* Check that prev arena is really the previous block */
892 if ((char *)(pPrev + 1) + (pPrev->size & ARENA_SIZE_MASK) != (char *)pArena)
893 {
894 DPRINT("Heap %p: prev arena %p is not prev for in-use %p\n",
895 subheap->heap, pPrev, pArena );
896 return FALSE;
897 }
898 }
899 return TRUE;
900 }
901
902
903 /***********************************************************************
904 * HEAP_IsInsideHeap
905 * Checks whether the pointer points to a block inside a given heap.
906 *
907 * NOTES
908 * Should this return BOOL32?
909 *
910 * RETURNS
911 * !0: Success
912 * 0: Failure
913 */
914 int HEAP_IsInsideHeap(
915 HANDLE heap, /* [in] Heap */
916 ULONG flags, /* [in] Flags */
917 PVOID ptr /* [in] Pointer */
918 )
919 {
920 HEAP *heapPtr = HEAP_GetPtr( heap );
921 SUBHEAP *subheap;
922 int ret;
923
924 /* Validate the parameters */
925
926 if (!heapPtr)
927 return 0;
928 flags |= heapPtr->flags;
929 if (!(flags & HEAP_NO_SERIALIZE))
930 RtlEnterCriticalSection( &heapPtr->critSection );
931 ret = (((subheap = HEAP_FindSubHeap( heapPtr, ptr )) != NULL) &&
932 (((char *)ptr >= (char *)subheap + subheap->headerSize
933 + sizeof(ARENA_INUSE))));
934 if (!(flags & HEAP_NO_SERIALIZE))
935 RtlLeaveCriticalSection( &heapPtr->critSection );
936 return ret;
937 }
938
939
940 void DumpStackFrames ( PULONG Frame, ULONG FrameCount )
941 {
942 ULONG i=0;
943
944 DbgPrint("Frames: ");
945 if ( !Frame )
946 {
947 #if defined __GNUC__
948 __asm__("mov %%ebp, %%ebx" : "=b" (Frame) : );
949 #elif defined(_MSC_VER)
950 __asm mov [Frame], ebp
951 #endif
952 Frame = (PULONG)Frame[0]; // step out of DumpStackFrames
953 }
954 while ( Frame != 0 && (ULONG)Frame != 0xDEADBEEF && (ULONG)Frame != 0xcdcdcdcd && (ULONG)Frame != 0xcccccccc && i++ < FrameCount )
955 {
956 DbgPrint("<%X>", (PVOID)Frame[1]);
957 if (Frame[1] == 0xdeadbeef)
958 break;
959 Frame = (PULONG)Frame[0];
960 DbgPrint(" ");
961 }
962 DbgPrint("\n");
963 }
964
965 /***********************************************************************
966 * HEAP_IsRealArena [Internal]
967 * Validates a block is a valid arena.
968 *
969 * RETURNS
970 * TRUE: Success
971 * FALSE: Failure
972 */
973 static BOOLEAN HEAP_IsRealArena(
974 HANDLE heap, /* [in] Handle to the heap */
975 ULONG flags, /* [in] Bit flags that control access during operation */
976 PVOID block, /* [in] Optional pointer to memory block to validate */
977 BOOLEAN quiet /* [in] Flag - if true, HEAP_ValidateInUseArena
978 * does not complain */
979 )
980 {
981 SUBHEAP *subheap;
982 HEAP *heapPtr = (HEAP *)(heap);
983 BOOLEAN ret = TRUE;
984
985 if (!heapPtr || (heapPtr->magic != HEAP_MAGIC))
986 {
987 DPRINT("Invalid heap %08x!\n", heap );
988 return FALSE;
989 }
990
991 flags &= HEAP_NO_SERIALIZE;
992 flags |= heapPtr->flags;
993 /* calling HeapLock may result in infinite recursion, so do the critsect directly */
994 if (!(flags & HEAP_NO_SERIALIZE))
995 RtlEnterCriticalSection( &heapPtr->critSection );
996
997 if (block)
998 {
999 /* Only check this single memory block */
1000
1001 /* The following code is really HEAP_IsInsideHeap *
1002 * with serialization already done. */
1003 if (!(subheap = HEAP_FindSubHeap( heapPtr, block )) ||
1004 ((char *)block < (char *)subheap + subheap->headerSize
1005 + sizeof(ARENA_INUSE)))
1006 {
1007 if (quiet == NOISY)
1008 {
1009 DPRINT("Heap %p: block %p is not inside heap\n",
1010 heap, block );
1011 DumpStackFrames(NULL,10);
1012 }
1013 else if (WARN_ON(heap))
1014 {
1015 DPRINT1("Heap %p: block %p is not inside heap\n",
1016 heap, block );
1017 DumpStackFrames(NULL,10);
1018 }
1019 ret = FALSE;
1020 }
1021 else
1022 ret = HEAP_ValidateInUseArena( subheap, (ARENA_INUSE *)block - 1, quiet );
1023
1024 if (!(flags & HEAP_NO_SERIALIZE))
1025 RtlLeaveCriticalSection( &heapPtr->critSection );
1026 return ret;
1027 }
1028
1029 subheap = &heapPtr->subheap;
1030 while (subheap && ret)
1031 {
1032 char *ptr = (char *)subheap + subheap->headerSize;
1033 while (ptr < (char *)subheap + subheap->size)
1034 {
1035 if (*(PULONG)ptr & ARENA_FLAG_FREE)
1036 {
1037 if (!HEAP_ValidateFreeArena( subheap, (ARENA_FREE *)ptr ))
1038 {
1039 ret = FALSE;
1040 break;
1041 }
1042 ptr += sizeof(ARENA_FREE) + (*(PULONG)ptr & ARENA_SIZE_MASK);
1043 }
1044 else
1045 {
1046 if (!HEAP_ValidateInUseArena( subheap, (ARENA_INUSE *)ptr, NOISY ))
1047 {
1048 ret = FALSE;
1049 break;
1050 }
1051 ptr += sizeof(ARENA_INUSE) + (*(PULONG)ptr & ARENA_SIZE_MASK);
1052 }
1053 }
1054 subheap = subheap->next;
1055 }
1056
1057 if (!(flags & HEAP_NO_SERIALIZE))
1058 RtlLeaveCriticalSection( &heapPtr->critSection );
1059 return ret;
1060 }
1061
1062
1063 /***********************************************************************
1064 * HeapCreate (KERNEL32.336)
1065 * RETURNS
1066 * Handle of heap: Success
1067 * NULL: Failure
1068 *
1069 * @implemented
1070 */
1071 HANDLE STDCALL
1072 RtlCreateHeap(ULONG flags,
1073 PVOID BaseAddress,
1074 ULONG maxSize,
1075 ULONG initialSize,
1076 PVOID Unknown,
1077 PRTL_HEAP_DEFINITION Definition)
1078 {
1079 SUBHEAP *subheap;
1080 HEAP *heapPtr;
1081
1082 /* Allocate the heap block */
1083
1084 if (!maxSize)
1085 {
1086 maxSize = HEAP_DEF_SIZE;
1087 flags |= HEAP_GROWABLE;
1088 }
1089 if (!(subheap = HEAP_CreateSubHeap( BaseAddress, NULL, flags, initialSize, maxSize )))
1090 {
1091 return 0;
1092 }
1093
1094 /* link it into the per-process heap list */
1095 RtlEnterCriticalSection (&RtlpProcessHeapsListLock);
1096
1097 heapPtr = subheap->heap;
1098 heapPtr->next = (HEAP*)NtCurrentPeb()->ProcessHeaps;
1099 NtCurrentPeb()->ProcessHeaps = (HANDLE)heapPtr;
1100 NtCurrentPeb()->NumberOfHeaps++;
1101
1102 RtlLeaveCriticalSection (&RtlpProcessHeapsListLock);
1103
1104 return (HANDLE)subheap;
1105 }
1106
1107 /***********************************************************************
1108 * HeapDestroy (KERNEL32.337)
1109 * RETURNS
1110 * TRUE: Success
1111 * FALSE: Failure
1112 *
1113 * @implemented
1114 *
1115 * RETURNS
1116 * Success: A NULL HANDLE, if heap is NULL or it was destroyed
1117 * Failure: The Heap handle, if heap is the process heap.
1118 */
1119 HANDLE STDCALL
1120 RtlDestroyHeap(HANDLE heap) /* [in] Handle of heap */
1121 {
1122 HEAP *heapPtr = HEAP_GetPtr( heap );
1123 SUBHEAP *subheap;
1124 ULONG flags;
1125 HEAP **pptr;
1126
1127 DPRINT("%08x\n", heap );
1128 if (!heapPtr)
1129 return heap;
1130
1131 if (heap == NtCurrentPeb()->ProcessHeap)
1132 return heap; /* cannot delete the main process heap */
1133
1134 /* remove it from the per-process list */
1135 RtlEnterCriticalSection (&RtlpProcessHeapsListLock);
1136
1137 pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps;
1138 while (*pptr && *pptr != heapPtr) pptr = &(*pptr)->next;
1139 if (*pptr) *pptr = (*pptr)->next;
1140 NtCurrentPeb()->NumberOfHeaps--;
1141
1142 RtlLeaveCriticalSection (&RtlpProcessHeapsListLock);
1143
1144 RtlDeleteCriticalSection( &heapPtr->critSection );
1145 subheap = &heapPtr->subheap;
1146 // We must save the flags. The first subheap is located after
1147 // the heap structure. If we release the first subheap,
1148 // we release also the heap structure.
1149 flags = heapPtr->flags;
1150 while (subheap)
1151 {
1152 SUBHEAP *next = subheap->next;
1153
1154 if (!(flags & HEAP_NO_VALLOC))
1155 {
1156 ULONG dummySize = 0;
1157 ZwFreeVirtualMemory(NtCurrentProcess(),
1158 (PVOID*)&subheap,
1159 &dummySize,
1160 MEM_RELEASE);
1161 }
1162 subheap = next;
1163 }
1164 return (HANDLE)NULL;
1165 }
1166
1167
1168 /***********************************************************************
1169 * HeapAlloc (KERNEL32.334)
1170 * RETURNS
1171 * Pointer to allocated memory block
1172 * NULL: Failure
1173 * 0x7d030f60--invalid flags in RtlHeapAllocate
1174 * @implemented
1175 */
1176 PVOID STDCALL
1177 RtlAllocateHeap(HANDLE heap, /* [in] Handle of private heap block */
1178 ULONG flags, /* [in] Heap allocation control flags */
1179 ULONG size) /* [in] Number of bytes to allocate */
1180 {
1181 ARENA_FREE *pArena;
1182 ARENA_INUSE *pInUse;
1183 SUBHEAP *subheap;
1184 HEAP *heapPtr = HEAP_GetPtr( heap );
1185
1186 /* Validate the parameters */
1187
1188 if (!heapPtr)
1189 {
1190 if (flags & HEAP_GENERATE_EXCEPTIONS)
1191 RtlRaiseStatus( STATUS_NO_MEMORY );
1192 return NULL;
1193 }
1194 flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY;
1195 flags |= heapPtr->flags;
1196 if (!(flags & HEAP_NO_SERIALIZE))
1197 RtlEnterCriticalSection( &heapPtr->critSection );
1198 size = (size + 7) & ~7;
1199 if (size < HEAP_MIN_BLOCK_SIZE)
1200 size = HEAP_MIN_BLOCK_SIZE;
1201
1202 /* Locate a suitable free block */
1203
1204 if (!(pArena = HEAP_FindFreeBlock( heapPtr, size, &subheap )))
1205 {
1206 DPRINT("(%08x,%08lx,%08lx): returning NULL\n",
1207 heap, flags, size );
1208 if (!(flags & HEAP_NO_SERIALIZE))
1209 RtlLeaveCriticalSection( &heapPtr->critSection );
1210 if (flags & HEAP_GENERATE_EXCEPTIONS)
1211 RtlRaiseStatus( STATUS_NO_MEMORY );
1212 return NULL;
1213 }
1214
1215 /* Remove the arena from the free list */
1216
1217 pArena->next->prev = pArena->prev;
1218 pArena->prev->next = pArena->next;
1219
1220 /* Build the in-use arena */
1221
1222 pInUse = (ARENA_INUSE *)pArena;
1223 pInUse->size = (pInUse->size & ~ARENA_FLAG_FREE)
1224 + sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
1225 pInUse->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
1226 pInUse->magic = ARENA_INUSE_MAGIC;
1227
1228 /* Shrink the block */
1229
1230 HEAP_ShrinkBlock( subheap, pInUse, size );
1231
1232 if (flags & HEAP_ZERO_MEMORY)
1233 memset( pInUse + 1, 0, pInUse->size & ARENA_SIZE_MASK );
1234 else if (TRACE_ON(heap))
1235 memset( pInUse + 1, ARENA_INUSE_FILLER, pInUse->size & ARENA_SIZE_MASK );
1236
1237 if (!(flags & HEAP_NO_SERIALIZE))
1238 RtlLeaveCriticalSection( &heapPtr->critSection );
1239
1240 DPRINT("(%08x,%08lx,%08lx): returning %p\n",
1241 heap, flags, size, (PVOID)(pInUse + 1) );
1242 return (PVOID)(pInUse + 1);
1243 }
1244
1245
1246 /***********************************************************************
1247 * HeapFree (KERNEL32.338)
1248 * RETURNS
1249 * TRUE: Success
1250 * FALSE: Failure
1251 *
1252 * @implemented
1253 */
1254 BOOLEAN STDCALL RtlFreeHeap(
1255 HANDLE heap, /* [in] Handle of heap */
1256 ULONG flags, /* [in] Heap freeing flags */
1257 PVOID ptr /* [in] Address of memory to free */
1258 )
1259 {
1260 ARENA_INUSE *pInUse;
1261 SUBHEAP *subheap;
1262 HEAP *heapPtr = HEAP_GetPtr( heap );
1263
1264 /* Validate the parameters */
1265
1266 if (!heapPtr)
1267 return FALSE;
1268 if (!ptr) /* Freeing a NULL ptr is doesn't indicate an error in Win2k */
1269 {
1270 DPRINT("(%08x,%08lx,%p): asked to free NULL\n",
1271 heap, flags, ptr );
1272 return TRUE;
1273 }
1274
1275 flags &= HEAP_NO_SERIALIZE;
1276 flags |= heapPtr->flags;
1277 if (!(flags & HEAP_NO_SERIALIZE))
1278 RtlEnterCriticalSection( &heapPtr->critSection );
1279 if (!HEAP_IsRealArena( heap, HEAP_NO_SERIALIZE, ptr, QUIET ))
1280 {
1281 if (!(flags & HEAP_NO_SERIALIZE))
1282 RtlLeaveCriticalSection( &heapPtr->critSection );
1283 DPRINT("(%08x,%08lx,%p): returning FALSE\n",
1284 heap, flags, ptr );
1285 return FALSE;
1286 }
1287
1288 /* Turn the block into a free block */
1289
1290 pInUse = (ARENA_INUSE *)ptr - 1;
1291 subheap = HEAP_FindSubHeap( heapPtr, pInUse );
1292 HEAP_MakeInUseBlockFree( subheap, pInUse, heapPtr->flags );
1293
1294 if (!(flags & HEAP_NO_SERIALIZE))
1295 RtlLeaveCriticalSection( &heapPtr->critSection );
1296
1297 DPRINT("(%08x,%08lx,%p): returning TRUE\n",
1298 heap, flags, ptr );
1299 return TRUE;
1300 }
1301
1302
1303 /***********************************************************************
1304 * RtlReAllocateHeap
1305 * PARAMS
1306 * Heap [in] Handle of heap block
1307 * Flags [in] Heap reallocation flags
1308 * Ptr, [in] Address of memory to reallocate
1309 * Size [in] Number of bytes to reallocate
1310 *
1311 * RETURNS
1312 * Pointer to reallocated memory block
1313 * NULL: Failure
1314 * 0x7d030f60--invalid flags in RtlHeapAllocate
1315 * @implemented
1316 */
1317 PVOID STDCALL RtlReAllocateHeap(
1318 HANDLE Heap,
1319 ULONG Flags,
1320 PVOID Ptr,
1321 ULONG Size
1322 )
1323 {
1324 ARENA_INUSE *pArena;
1325 ULONG oldSize;
1326 HEAP *heapPtr;
1327 SUBHEAP *subheap;
1328
1329 if (!Ptr)
1330 return FALSE;
1331 if (!(heapPtr = HEAP_GetPtr( Heap )))
1332 return FALSE;
1333
1334 /* Validate the parameters */
1335
1336 Flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY |
1337 HEAP_REALLOC_IN_PLACE_ONLY;
1338 Flags |= heapPtr->flags;
1339 Size = (Size + 7) & ~7;
1340 if (Size < HEAP_MIN_BLOCK_SIZE)
1341 Size = HEAP_MIN_BLOCK_SIZE;
1342
1343 if (!(Flags & HEAP_NO_SERIALIZE))
1344 RtlEnterCriticalSection( &heapPtr->critSection );
1345 if (!HEAP_IsRealArena( Heap, HEAP_NO_SERIALIZE, Ptr, QUIET ))
1346 {
1347 if (!(Flags & HEAP_NO_SERIALIZE))
1348 RtlLeaveCriticalSection( &heapPtr->critSection );
1349 DPRINT("(%08x,%08lx,%p,%08lx): returning NULL\n",
1350 Heap, Flags, Ptr, Size );
1351 if (Flags & HEAP_GENERATE_EXCEPTIONS)
1352 RtlRaiseStatus( STATUS_NO_MEMORY );
1353 return NULL;
1354 }
1355
1356 /* Check if we need to grow the block */
1357
1358 pArena = (ARENA_INUSE *)Ptr - 1;
1359 pArena->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
1360
1361 subheap = HEAP_FindSubHeap( heapPtr, pArena );
1362 oldSize = (pArena->size & ARENA_SIZE_MASK);
1363 if (Size > oldSize)
1364 {
1365 char *pNext = (char *)(pArena + 1) + oldSize;
1366 if ((pNext < (char *)subheap + subheap->size) &&
1367 (*(PULONG)pNext & ARENA_FLAG_FREE) &&
1368 (oldSize + (*(PULONG)pNext & ARENA_SIZE_MASK) + sizeof(ARENA_FREE) >= Size))
1369 {
1370 /* The next block is free and large enough */
1371 ARENA_FREE *pFree = (ARENA_FREE *)pNext;
1372 pFree->next->prev = pFree->prev;
1373 pFree->prev->next = pFree->next;
1374 pArena->size += (pFree->size & ARENA_SIZE_MASK) + sizeof(*pFree);
1375 if (!HEAP_Commit( subheap, (char *)pArena + sizeof(ARENA_INUSE)
1376 + Size + HEAP_MIN_BLOCK_SIZE,
1377 heapPtr->flags))
1378 {
1379 if (!(Flags & HEAP_NO_SERIALIZE))
1380 RtlLeaveCriticalSection( &heapPtr->critSection );
1381 if (Flags & HEAP_GENERATE_EXCEPTIONS)
1382 RtlRaiseStatus( STATUS_NO_MEMORY );
1383 return NULL;
1384 }
1385 HEAP_ShrinkBlock( subheap, pArena, Size );
1386 }
1387 else /* Do it the hard way */
1388 {
1389 ARENA_FREE *pNew;
1390 ARENA_INUSE *pInUse;
1391 SUBHEAP *newsubheap;
1392
1393 if ((Flags & HEAP_REALLOC_IN_PLACE_ONLY) ||
1394 !(pNew = HEAP_FindFreeBlock( heapPtr, Size, &newsubheap )))
1395 {
1396 if (!(Flags & HEAP_NO_SERIALIZE))
1397 RtlLeaveCriticalSection( &heapPtr->critSection );
1398 if (Flags & HEAP_GENERATE_EXCEPTIONS)
1399 RtlRaiseStatus( STATUS_NO_MEMORY );
1400 return NULL;
1401 }
1402
1403 /* Build the in-use arena */
1404
1405 pNew->next->prev = pNew->prev;
1406 pNew->prev->next = pNew->next;
1407 pInUse = (ARENA_INUSE *)pNew;
1408 pInUse->size = (pInUse->size & ~ARENA_FLAG_FREE)
1409 + sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
1410 pInUse->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
1411 pInUse->magic = ARENA_INUSE_MAGIC;
1412 HEAP_ShrinkBlock( newsubheap, pInUse, Size );
1413 memcpy( pInUse + 1, pArena + 1, oldSize );
1414
1415 /* Free the previous block */
1416
1417 HEAP_MakeInUseBlockFree( subheap, pArena, Flags );
1418 subheap = newsubheap;
1419 pArena = pInUse;
1420 }
1421 }
1422 else
1423 HEAP_ShrinkBlock( subheap, pArena, Size ); /* Shrink the block */
1424
1425 /* Clear the extra bytes if needed */
1426
1427 if (Size > oldSize)
1428 {
1429 if (Flags & HEAP_ZERO_MEMORY)
1430 memset( (char *)(pArena + 1) + oldSize, 0,
1431 (pArena->size & ARENA_SIZE_MASK) - oldSize );
1432 else if (TRACE_ON(heap))
1433 memset( (char *)(pArena + 1) + oldSize, ARENA_INUSE_FILLER,
1434 (pArena->size & ARENA_SIZE_MASK) - oldSize );
1435 }
1436
1437 /* Return the new arena */
1438
1439 if (!(Flags & HEAP_NO_SERIALIZE))
1440 RtlLeaveCriticalSection( &heapPtr->critSection );
1441
1442 DPRINT("(%08x,%08lx,%p,%08lx): returning %p\n",
1443 Heap, Flags, Ptr, Size, (PVOID)(pArena + 1) );
1444 return (PVOID)(pArena + 1);
1445 }
1446
1447
1448 /***********************************************************************
1449 * RtlCompactHeap
1450 *
1451 * @unimplemented
1452 */
1453 ULONG STDCALL
1454 RtlCompactHeap(HANDLE Heap,
1455 ULONG Flags)
1456 {
1457 UNIMPLEMENTED;
1458 return 0;
1459 }
1460
1461
1462 /***********************************************************************
1463 * RtlLockHeap
1464 * Attempts to acquire the critical section object for a specified heap.
1465 *
1466 * PARAMS
1467 * Heap [in] Handle of heap to lock for exclusive access
1468 *
1469 * RETURNS
1470 * TRUE: Success
1471 * FALSE: Failure
1472 *
1473 * @implemented
1474 */
1475 BOOLEAN STDCALL
1476 RtlLockHeap(IN HANDLE Heap)
1477 {
1478 HEAP *heapPtr = HEAP_GetPtr( Heap );
1479 if (!heapPtr)
1480 return FALSE;
1481 RtlEnterCriticalSection( &heapPtr->critSection );
1482 return TRUE;
1483 }
1484
1485
1486 /***********************************************************************
1487 * RtlUnlockHeap
1488 * Releases ownership of the critical section object.
1489 *
1490 * PARAMS
1491 * Heap [in] Handle to the heap to unlock
1492 *
1493 * RETURNS
1494 * TRUE: Success
1495 * FALSE: Failure
1496 *
1497 * @implemented
1498 */
1499 BOOLEAN STDCALL
1500 RtlUnlockHeap(HANDLE Heap)
1501 {
1502 HEAP *heapPtr = HEAP_GetPtr( Heap );
1503 if (!heapPtr)
1504 return FALSE;
1505 RtlLeaveCriticalSection( &heapPtr->critSection );
1506 return TRUE;
1507 }
1508
1509
1510 /***********************************************************************
1511 * RtlSizeHeap
1512 * PARAMS
1513 * Heap [in] Handle of heap
1514 * Flags [in] Heap size control flags
1515 * Ptr [in] Address of memory to return size for
1516 *
1517 * RETURNS
1518 * Size in bytes of allocated memory
1519 * 0xffffffff: Failure
1520 *
1521 * @implemented
1522 */
1523 ULONG STDCALL
1524 RtlSizeHeap(
1525 HANDLE Heap,
1526 ULONG Flags,
1527 PVOID Ptr
1528 )
1529 {
1530 ULONG ret;
1531 HEAP *heapPtr = HEAP_GetPtr( Heap );
1532
1533 if (!heapPtr)
1534 return 0;
1535 Flags &= HEAP_NO_SERIALIZE;
1536 Flags |= heapPtr->flags;
1537 if (!(Flags & HEAP_NO_SERIALIZE))
1538 RtlEnterCriticalSection( &heapPtr->critSection );
1539 if (!HEAP_IsRealArena( Heap, HEAP_NO_SERIALIZE, Ptr, QUIET ))
1540 {
1541 ret = 0xffffffff;
1542 }
1543 else
1544 {
1545 ARENA_INUSE *pArena = (ARENA_INUSE *)Ptr - 1;
1546 ret = pArena->size & ARENA_SIZE_MASK;
1547 }
1548 if (!(Flags & HEAP_NO_SERIALIZE))
1549 RtlLeaveCriticalSection( &heapPtr->critSection );
1550
1551 DPRINT("(%08x,%08lx,%p): returning %08lx\n",
1552 Heap, Flags, Ptr, ret );
1553 return ret;
1554 }
1555
1556
1557 /***********************************************************************
1558 * RtlValidateHeap
1559 * Validates a specified heap.
1560 *
1561 * PARAMS
1562 * Heap [in] Handle to the heap
1563 * Flags [in] Bit flags that control access during operation
1564 * Block [in] Optional pointer to memory block to validate
1565 *
1566 * NOTES
1567 * Flags is ignored.
1568 *
1569 * RETURNS
1570 * TRUE: Success
1571 * FALSE: Failure
1572 *
1573 * @implemented
1574 */
1575 BOOLEAN STDCALL RtlValidateHeap(
1576 HANDLE Heap,
1577 ULONG Flags,
1578 PVOID Block
1579 )
1580 {
1581 HEAP *heapPtr = HEAP_GetPtr( Heap );
1582 if (!heapPtr)
1583 return FALSE;
1584 return HEAP_IsRealArena( heapPtr, Flags, Block, QUIET );
1585 }
1586
1587
1588 /***********************************************************************
1589 * HeapWalk (KERNEL32.344)
1590 * Enumerates the memory blocks in a specified heap.
1591 * See HEAP_Dump() for info on heap structure.
1592 *
1593 * TODO
1594 * - handling of PROCESS_HEAP_ENTRY_MOVEABLE and
1595 * PROCESS_HEAP_ENTRY_DDESHARE (needs heap.c support)
1596 *
1597 * RETURNS
1598 * TRUE: Success
1599 * FALSE: Failure
1600 */
1601 #if 0
1602 BOOLEAN STDCALL HeapWalk(
1603 HANDLE heap, /* [in] Handle to heap to enumerate */
1604 LPPROCESS_HEAP_ENTRY entry /* [out] Pointer to structure of enumeration info */
1605 )
1606 {
1607 HEAP *heapPtr = HEAP_GetPtr(heap);
1608 SUBHEAP *sub, *currentheap = NULL;
1609 BOOLEAN ret = FALSE;
1610 char *ptr;
1611 int region_index = 0;
1612
1613 if (!heapPtr || !entry)
1614 {
1615 return FALSE;
1616 }
1617
1618 if (!(heapPtr->flags & HEAP_NO_SERIALIZE))
1619 RtlEnterCriticalSection( &heapPtr->critSection );
1620
1621 /* set ptr to the next arena to be examined */
1622
1623 if (!entry->lpData) /* first call (init) ? */
1624 {
1625 DPRINT("begin walking of heap 0x%08x.\n", heap);
1626 /*HEAP_Dump(heapPtr);*/
1627 currentheap = &heapPtr->subheap;
1628 ptr = (char*)currentheap + currentheap->headerSize;
1629 }
1630 else
1631 {
1632 ptr = entry->lpData;
1633 sub = &heapPtr->subheap;
1634 while (sub)
1635 {
1636 if (((char *)ptr >= (char *)sub) &&
1637 ((char *)ptr < (char *)sub + sub->size))
1638 {
1639 currentheap = sub;
1640 break;
1641 }
1642 sub = sub->next;
1643 region_index++;
1644 }
1645 if (currentheap == NULL)
1646 {
1647 DPRINT("no matching subheap found, shouldn't happen !\n");
1648 goto HW_end;
1649 }
1650
1651 ptr += entry->cbData; /* point to next arena */
1652 if (ptr > (char *)currentheap + currentheap->size - 1)
1653 { /* proceed with next subheap */
1654 if (!(currentheap = currentheap->next))
1655 { /* successfully finished */
1656 DPRINT("end reached.\n");
1657 goto HW_end;
1658 }
1659 ptr = (char*)currentheap + currentheap->headerSize;
1660 }
1661 }
1662
1663 entry->wFlags = 0;
1664 if (*(PULONG)ptr & ARENA_FLAG_FREE)
1665 {
1666 ARENA_FREE *pArena = (ARENA_FREE *)ptr;
1667
1668 /*DPRINT("free, magic: %04x\n", pArena->magic);*/
1669
1670 entry->lpData = pArena + 1;
1671 entry->cbData = pArena->size & ARENA_SIZE_MASK;
1672 entry->cbOverhead = sizeof(ARENA_FREE);
1673 entry->wFlags = PROCESS_HEAP_UNCOMMITTED_RANGE;
1674 }
1675 else
1676 {
1677 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
1678
1679 /*DPRINT("busy, magic: %04x\n", pArena->magic);*/
1680
1681 entry->lpData = pArena + 1;
1682 entry->cbData = pArena->size & ARENA_SIZE_MASK;
1683 entry->cbOverhead = sizeof(ARENA_INUSE);
1684 entry->wFlags = PROCESS_HEAP_ENTRY_BUSY;
1685 /* FIXME: can't handle PROCESS_HEAP_ENTRY_MOVEABLE
1686 and PROCESS_HEAP_ENTRY_DDESHARE yet */
1687 }
1688
1689 entry->iRegionIndex = region_index;
1690
1691 /* first element of heap ? */
1692 if (ptr == (char *)(currentheap + currentheap->headerSize))
1693 {
1694 entry->wFlags |= PROCESS_HEAP_REGION;
1695 entry->Foo.Region.dwCommittedSize = currentheap->commitSize;
1696 entry->Foo.Region.dwUnCommittedSize =
1697 currentheap->size - currentheap->commitSize;
1698 entry->Foo.Region.lpFirstBlock = /* first valid block */
1699 currentheap + currentheap->headerSize;
1700 entry->Foo.Region.lpLastBlock = /* first invalid block */
1701 currentheap + currentheap->size;
1702 }
1703 ret = TRUE;
1704
1705 HW_end:
1706 if (!(heapPtr->flags & HEAP_NO_SERIALIZE))
1707 RtlLeaveCriticalSection( &heapPtr->critSection );
1708
1709 return ret;
1710 }
1711 #endif
1712
1713
1714 VOID
1715 RtlInitializeHeapManager(VOID)
1716 {
1717 PPEB Peb;
1718
1719 Peb = NtCurrentPeb();
1720
1721 Peb->NumberOfHeaps = 0;
1722 Peb->MaximumNumberOfHeaps = -1; /* no limit */
1723 Peb->ProcessHeaps = NULL;
1724
1725 RtlInitializeCriticalSection(&RtlpProcessHeapsListLock);
1726 }
1727
1728
1729 /*
1730 * @implemented
1731 */
1732 NTSTATUS STDCALL
1733 RtlEnumProcessHeaps(NTSTATUS STDCALL_FUNC(*func)(PVOID, LONG),
1734 LONG lParam)
1735 {
1736 NTSTATUS Status = STATUS_SUCCESS;
1737 HEAP** pptr;
1738
1739 RtlEnterCriticalSection(&RtlpProcessHeapsListLock);
1740
1741 for (pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps; *pptr; pptr = &(*pptr)->next)
1742 {
1743 Status = func(*pptr,lParam);
1744 if (!NT_SUCCESS(Status))
1745 break;
1746 }
1747
1748 RtlLeaveCriticalSection(&RtlpProcessHeapsListLock);
1749
1750 return Status;
1751 }
1752
1753
1754 /*
1755 * @implemented
1756 */
1757 ULONG STDCALL
1758 RtlGetProcessHeaps(ULONG HeapCount,
1759 HANDLE *HeapArray)
1760 {
1761 ULONG Result = 0;
1762 HEAP ** pptr;
1763
1764 RtlEnterCriticalSection(&RtlpProcessHeapsListLock);
1765
1766 Result = NtCurrentPeb()->NumberOfHeaps;
1767
1768 if (NtCurrentPeb()->NumberOfHeaps <= HeapCount)
1769 {
1770 int i = 0;
1771 for (pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps; *pptr; pptr = &(*pptr)->next)
1772 {
1773 HeapArray[i++] = *pptr;
1774 }
1775 }
1776
1777 RtlLeaveCriticalSection (&RtlpProcessHeapsListLock);
1778
1779 return Result;
1780 }
1781
1782
1783 /*
1784 * @implemented
1785 */
1786 BOOLEAN STDCALL
1787 RtlValidateProcessHeaps(VOID)
1788 {
1789 BOOLEAN Result = TRUE;
1790 HEAP ** pptr;
1791
1792 RtlEnterCriticalSection(&RtlpProcessHeapsListLock);
1793
1794 for (pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps; *pptr; pptr = &(*pptr)->next)
1795 {
1796 if (!RtlValidateHeap(*pptr, 0, NULL))
1797 {
1798 Result = FALSE;
1799 break;
1800 }
1801 }
1802
1803 RtlLeaveCriticalSection (&RtlpProcessHeapsListLock);
1804
1805 return Result;
1806 }
1807
1808
1809 /*
1810 * @unimplemented
1811 */
1812 BOOLEAN STDCALL
1813 RtlZeroHeap(
1814 IN PVOID HeapHandle,
1815 IN ULONG Flags
1816 )
1817 {
1818 UNIMPLEMENTED;
1819 return FALSE;
1820 }
1821
1822 /* EOF */