fixed some prototypes
[reactos.git] / reactos / lib / rtl / heap.c
1 /* COPYRIGHT: See COPYING in the top level directory
2 * PROJECT: ReactOS system libraries
3 * FILE: lib/rtl/image.c
4 * PURPOSE: Image handling functions
5 * PROGRAMMERS: Copyright 1996 Alexandre Julliard
6 * Copyright 1998 Ulrich Weigand
7 */
8
9 /* Note: the heap data structures are based on what Pietrek describes in his
10 * book 'Windows 95 System Programming Secrets'. The layout is not exactly
11 * the same, but could be easily adapted if it turns out some programs
12 * require it.
13 */
14
15 /* INCLUDES *****************************************************************/
16
17 #include <rtl.h>
18
19 #define NDEBUG
20 #include <debug.h>
21
22 /* FUNCTIONS *****************************************************************/
23
24 #define WARN_ON(x) (1)
25
26 #ifdef NDEBUG
27 #define TRACE_ON(x) (0)
28 #else
29 #define TRACE_ON(x) (1)
30 #endif
31
32 static RTL_CRITICAL_SECTION RtlpProcessHeapsListLock;
33
34
35 typedef struct tagARENA_INUSE
36 {
37 ULONG size; /* Block size; must be the first field */
38 USHORT threadId; /* Allocating thread id */
39 USHORT magic; /* Magic number */
40 }
41 ARENA_INUSE;
42
43 typedef struct tagARENA_FREE
44 {
45 ULONG size; /* Block size; must be the first field */
46 USHORT threadId; /* Freeing thread id */
47 USHORT magic; /* Magic number */
48 struct tagARENA_FREE *next; /* Next free arena */
49 struct tagARENA_FREE *prev; /* Prev free arena */
50 }
51 ARENA_FREE;
52
53 #define ARENA_FLAG_FREE 0x00000001 /* flags OR'ed with arena size */
54 #define ARENA_FLAG_PREV_FREE 0x00000002
55 #define ARENA_SIZE_MASK 0xfffffffc
56 #define ARENA_INUSE_MAGIC 0x4842 /* Value for arena 'magic' field */
57 #define ARENA_FREE_MAGIC 0x4846 /* Value for arena 'magic' field */
58
59 #define ARENA_INUSE_FILLER 0x55
60 #define ARENA_FREE_FILLER 0xaa
61
62 #define QUIET 1 /* Suppress messages */
63 #define NOISY 0 /* Report all errors */
64
65 #define HEAP_NB_FREE_LISTS 4 /* Number of free lists */
66
67 /* Max size of the blocks on the free lists */
68 static const ULONG HEAP_freeListSizes[HEAP_NB_FREE_LISTS] =
69 {
70 0x20, 0x80, 0x200, 0xffffffff
71 };
72
73 typedef struct
74 {
75 ULONG size;
76 ARENA_FREE arena;
77 }
78 FREE_LIST_ENTRY;
79
80 struct tagHEAP;
81
82 typedef struct tagSUBHEAP
83 {
84 ULONG size; /* Size of the whole sub-heap */
85 ULONG commitSize; /* Committed size of the sub-heap */
86 ULONG headerSize; /* Size of the heap header */
87 struct tagSUBHEAP *next; /* Next sub-heap */
88 struct tagHEAP *heap; /* Main heap structure */
89 ULONG magic; /* Magic number */
90 }
91 SUBHEAP, *PSUBHEAP;
92
93 #define SUBHEAP_MAGIC ((ULONG)('S' | ('U'<<8) | ('B'<<16) | ('H'<<24)))
94
95 typedef struct tagHEAP
96 {
97 SUBHEAP subheap; /* First sub-heap */
98 struct tagHEAP *next; /* Next heap for this process */
99 FREE_LIST_ENTRY freeList[HEAP_NB_FREE_LISTS]; /* Free lists */
100 RTL_CRITICAL_SECTION critSection; /* Critical section for serialization */
101 ULONG flags; /* Heap flags */
102 ULONG magic; /* Magic number */
103 PRTL_HEAP_COMMIT_ROUTINE commitRoutine;
104 }
105 HEAP, *PHEAP;
106
107 #define HEAP_MAGIC ((ULONG)('H' | ('E'<<8) | ('A'<<16) | ('P'<<24)))
108
109 #define HEAP_DEF_SIZE 0x110000 /* Default heap size = 1Mb + 64Kb */
110 #define HEAP_MIN_BLOCK_SIZE (sizeof(ARENA_FREE) + 8) /* Min. heap block size */
111 #define COMMIT_MASK 0xffff /* bitmask for commit/decommit granularity */
112
113
114 static BOOLEAN HEAP_IsRealArena( HANDLE heap, ULONG flags, PVOID block, BOOLEAN quiet );
115
116
117 /***********************************************************************
118 * HEAP_Dump
119 */
120 VOID
121 HEAP_Dump(PHEAP heap)
122 {
123 int i;
124 SUBHEAP *subheap;
125 char *ptr;
126
127 DPRINT( "Heap: %p\n", heap );
128 DPRINT( "Next: %p Sub-heaps: %p",
129 heap->next, &heap->subheap );
130 subheap = &heap->subheap;
131 while (subheap->next)
132 {
133 DPRINT( " -> %p", subheap->next );
134 subheap = subheap->next;
135 }
136
137 DPRINT( "\nFree lists:\n Block Stat Size Id\n" );
138 for (i = 0; i < HEAP_NB_FREE_LISTS; i++)
139 DPRINT( "%p free %08lx %04x prev=%p next=%p\n",
140 &heap->freeList[i].arena,
141 heap->freeList[i].arena.size,
142 heap->freeList[i].arena.threadId,
143 heap->freeList[i].arena.prev,
144 heap->freeList[i].arena.next );
145
146 subheap = &heap->subheap;
147 while (subheap)
148 {
149 ULONG freeSize = 0, usedSize = 0, arenaSize = subheap->headerSize;
150 DPRINT( "\n\nSub-heap %p: size=%08lx committed=%08lx\n",
151 subheap, subheap->size, subheap->commitSize );
152
153 DPRINT( "\n Block Stat Size Id\n" );
154 ptr = (char*)subheap + subheap->headerSize;
155 while (ptr < (char *)subheap + subheap->size)
156 {
157 if (*(PULONG)ptr & ARENA_FLAG_FREE)
158 {
159 ARENA_FREE *pArena = (ARENA_FREE *)ptr;
160 DPRINT( "%p free %08lx %04x prev=%p next=%p\n",
161 pArena, pArena->size & ARENA_SIZE_MASK,
162 pArena->threadId, pArena->prev,
163 pArena->next);
164 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
165 arenaSize += sizeof(ARENA_FREE);
166 freeSize += pArena->size & ARENA_SIZE_MASK;
167 }
168 else if (*(PULONG)ptr & ARENA_FLAG_PREV_FREE)
169 {
170 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
171 DPRINT( "%p Used %08lx %04x back=%08lx\n",
172 pArena, pArena->size & ARENA_SIZE_MASK,
173 pArena->threadId, *((PULONG)pArena - 1));
174 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
175 arenaSize += sizeof(ARENA_INUSE);
176 usedSize += pArena->size & ARENA_SIZE_MASK;
177 }
178 else
179 {
180 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
181 DPRINT( "%p used %08lx %04x\n",
182 pArena, pArena->size & ARENA_SIZE_MASK,
183 pArena->threadId);
184 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
185 arenaSize += sizeof(ARENA_INUSE);
186 usedSize += pArena->size & ARENA_SIZE_MASK;
187 }
188 }
189 DPRINT( "\nTotal: Size=%08lx Committed=%08lx Free=%08lx Used=%08lx Arenas=%08lx (%ld%%)\n\n",
190 subheap->size, subheap->commitSize, freeSize, usedSize,
191 arenaSize, (arenaSize * 100) / subheap->size );
192 subheap = subheap->next;
193 }
194 }
195
196
197 /***********************************************************************
198 * HEAP_GetPtr
199 * RETURNS
200 * Pointer to the heap
201 * NULL: Failure
202 */
203 static PHEAP
204 HEAP_GetPtr(HANDLE heap) /* [in] Handle to the heap */
205 {
206 HEAP *heapPtr = (HEAP *)heap;
207 if (!heapPtr || (heapPtr->magic != HEAP_MAGIC))
208 {
209 DPRINT("Invalid heap %08x!\n", heap );
210 return NULL;
211 }
212 if (TRACE_ON(heap) && !HEAP_IsRealArena( heap, 0, NULL, NOISY ))
213 {
214 HEAP_Dump( heapPtr );
215 ASSERT( FALSE );
216 return NULL;
217 }
218 return heapPtr;
219 }
220
221
222 /***********************************************************************
223 * HEAP_InsertFreeBlock
224 *
225 * Insert a free block into the free list.
226 */
227 static VOID
228 HEAP_InsertFreeBlock(PHEAP heap,
229 ARENA_FREE *pArena,
230 BOOLEAN last)
231 {
232 FREE_LIST_ENTRY *pEntry = heap->freeList;
233 while (pEntry->size < pArena->size)
234 pEntry++;
235 if (last)
236 {
237 /* insert at end of free list, i.e. before next free list entry */
238 pEntry++;
239 if (pEntry == &heap->freeList[HEAP_NB_FREE_LISTS])
240 {
241 pEntry = heap->freeList;
242 }
243 pArena->prev = pEntry->arena.prev;
244 pArena->prev->next = pArena;
245 pArena->next = &pEntry->arena;
246 pEntry->arena.prev = pArena;
247 }
248 else
249 {
250 /* insert at head of free list */
251 pArena->next = pEntry->arena.next;
252 pArena->next->prev = pArena;
253 pArena->prev = &pEntry->arena;
254 pEntry->arena.next = pArena;
255 }
256 pArena->size |= ARENA_FLAG_FREE;
257 }
258
259
260 /***********************************************************************
261 * HEAP_FindSubHeap
262 * Find the sub-heap containing a given address.
263 *
264 * RETURNS
265 * Pointer: Success
266 * NULL: Failure
267 */
268 static PSUBHEAP
269 HEAP_FindSubHeap(HEAP *heap, /* [in] Heap pointer */
270 PVOID ptr) /* [in] Address */
271 {
272 PSUBHEAP sub = &heap->subheap;
273 while (sub)
274 {
275 if (((char *)ptr >= (char *)sub) &&
276 ((char *)ptr < (char *)sub + sub->size))
277 return sub;
278 sub = sub->next;
279 }
280 return NULL;
281 }
282
283
284 /***********************************************************************
285 * HEAP_Commit
286 *
287 * Make sure the heap storage is committed up to (not including) ptr.
288 */
289 static __inline BOOLEAN
290 HEAP_Commit(SUBHEAP *subheap,
291 PVOID ptr,
292 ULONG flags)
293 {
294 SIZE_T size = (SIZE_T)((char *)ptr - (char *)subheap);
295 NTSTATUS Status;
296 PVOID address;
297 SIZE_T commitsize;
298
299 size = (size + COMMIT_MASK) & ~COMMIT_MASK;
300 if (size > subheap->size)
301 size = subheap->size;
302 if (size <= subheap->commitSize)
303 return TRUE;
304
305 address = (PVOID)((char *)subheap + subheap->commitSize);
306 commitsize = size - subheap->commitSize;
307
308 if (subheap->heap->commitRoutine != NULL)
309 {
310 Status = subheap->heap->commitRoutine(subheap->heap, &address, &commitsize);
311 }
312 else
313 {
314 Status = NtAllocateVirtualMemory(NtCurrentProcess(),
315 &address,
316 0,
317 &commitsize,
318 MEM_COMMIT,
319 PAGE_EXECUTE_READWRITE);
320 }
321 if (!NT_SUCCESS(Status))
322 {
323 DPRINT( "Could not commit %08lx bytes at %p for heap %p\n",
324 size - subheap->commitSize,
325 ((char *)subheap + subheap->commitSize),
326 subheap->heap );
327 return FALSE;
328 }
329
330 subheap->commitSize += commitsize;
331 return TRUE;
332 }
333
334
335 #if 0
336 /***********************************************************************
337 * HEAP_Decommit
338 *
339 * If possible, decommit the heap storage from (including) 'ptr'.
340 */
341 static inline BOOLEAN HEAP_Decommit( SUBHEAP *subheap, PVOID ptr, ULONG flags )
342 {
343 ULONG size = (ULONG)((char *)ptr - (char *)subheap);
344 PVOID address;
345 ULONG decommitsize;
346 NTSTATUS Status;
347 /* round to next block and add one full block */
348 size = ((size + COMMIT_MASK) & ~COMMIT_MASK) + COMMIT_MASK + 1;
349 if (size >= subheap->commitSize)
350 return TRUE;
351
352 address = (PVOID)((char *)subheap + size);
353 decommitsize = subheap->commitSize - size;
354
355 Status = ZwFreeVirtualMemory(NtCurrentProcess(),
356 &address,
357 &decommitsize,
358 MEM_DECOMMIT);
359 if (!NT_SUCCESS(Status))
360 {
361 DPRINT( "Could not decommit %08lx bytes at %p for heap %p\n",
362 subheap->commitSize - size,
363 ((char *)subheap + size),
364 subheap->heap );
365 return FALSE;
366 }
367
368 subheap->commitSize -= decommitsize;
369 return TRUE;
370 }
371 #endif
372
373 /***********************************************************************
374 * HEAP_CreateFreeBlock
375 *
376 * Create a free block at a specified address. 'size' is the size of the
377 * whole block, including the new arena.
378 */
379 static VOID HEAP_CreateFreeBlock( SUBHEAP *subheap, PVOID ptr, ULONG size )
380 {
381 ARENA_FREE *pFree;
382 BOOLEAN last;
383
384 /* Create a free arena */
385
386 pFree = (ARENA_FREE *)ptr;
387 pFree->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
388 pFree->magic = ARENA_FREE_MAGIC;
389
390 /* If debugging, erase the freed block content */
391
392 if (TRACE_ON(heap))
393 {
394 char *pEnd = (char *)ptr + size;
395 if (pEnd > (char *)subheap + subheap->commitSize)
396 pEnd = (char *)subheap + subheap->commitSize;
397 if (pEnd > (char *)(pFree + 1))
398 memset( pFree + 1, ARENA_FREE_FILLER, pEnd - (char *)(pFree + 1) );
399 }
400
401 /* Check if next block is free also */
402
403 if (((char *)ptr + size < (char *)subheap + subheap->size) &&
404 (*(PULONG)((char *)ptr + size) & ARENA_FLAG_FREE))
405 {
406 /* Remove the next arena from the free list */
407 ARENA_FREE *pNext = (ARENA_FREE *)((char *)ptr + size);
408 pNext->next->prev = pNext->prev;
409 pNext->prev->next = pNext->next;
410 size += (pNext->size & ARENA_SIZE_MASK) + sizeof(*pNext);
411 if (TRACE_ON(heap))
412 memset( pNext, ARENA_FREE_FILLER, sizeof(ARENA_FREE) );
413 }
414
415 /* Set the next block PREV_FREE flag and pointer */
416
417 last = ((char *)ptr + size >= (char *)subheap + subheap->size);
418 if (!last)
419 {
420 PULONG pNext = (PULONG)((char *)ptr + size);
421 *pNext |= ARENA_FLAG_PREV_FREE;
422 *(ARENA_FREE **)(pNext - 1) = pFree;
423 }
424
425 /* Last, insert the new block into the free list */
426
427 pFree->size = size - sizeof(*pFree);
428 HEAP_InsertFreeBlock( subheap->heap, pFree, last );
429 }
430
431
432 /***********************************************************************
433 * HEAP_MakeInUseBlockFree
434 *
435 * Turn an in-use block into a free block. Can also decommit the end of
436 * the heap, and possibly even free the sub-heap altogether.
437 */
438 static VOID HEAP_MakeInUseBlockFree( SUBHEAP *subheap, ARENA_INUSE *pArena,
439 ULONG flags)
440 {
441 ARENA_FREE *pFree;
442 ULONG size = (pArena->size & ARENA_SIZE_MASK) + sizeof(*pArena);
443 ULONG dummySize = 0;
444
445 /* Check if we can merge with previous block */
446
447 if (pArena->size & ARENA_FLAG_PREV_FREE)
448 {
449 pFree = *((ARENA_FREE **)pArena - 1);
450 size += (pFree->size & ARENA_SIZE_MASK) + sizeof(ARENA_FREE);
451 /* Remove it from the free list */
452 pFree->next->prev = pFree->prev;
453 pFree->prev->next = pFree->next;
454 }
455 else
456 pFree = (ARENA_FREE *)pArena;
457
458 /* Create a free block */
459
460 HEAP_CreateFreeBlock( subheap, pFree, size );
461 size = (pFree->size & ARENA_SIZE_MASK) + sizeof(ARENA_FREE);
462 if ((char *)pFree + size < (char *)subheap + subheap->size)
463 return; /* Not the last block, so nothing more to do */
464
465 /* Free the whole sub-heap if it's empty and not the original one */
466
467 if (((char *)pFree == (char *)subheap + subheap->headerSize) &&
468 (subheap != &subheap->heap->subheap))
469 {
470 SUBHEAP *pPrev = &subheap->heap->subheap;
471 /* Remove the free block from the list */
472 pFree->next->prev = pFree->prev;
473 pFree->prev->next = pFree->next;
474 /* Remove the subheap from the list */
475 while (pPrev && (pPrev->next != subheap))
476 pPrev = pPrev->next;
477 if (pPrev)
478 pPrev->next = subheap->next;
479 /* Free the memory */
480 subheap->magic = 0;
481 ZwFreeVirtualMemory(NtCurrentProcess(),
482 (PVOID*)&subheap,
483 &dummySize,
484 MEM_RELEASE);
485 return;
486 }
487
488 /* Decommit the end of the heap */
489 }
490
491
492 /***********************************************************************
493 * HEAP_ShrinkBlock
494 *
495 * Shrink an in-use block.
496 */
497 static void HEAP_ShrinkBlock(SUBHEAP *subheap, ARENA_INUSE *pArena, ULONG size)
498 {
499 if ((pArena->size & ARENA_SIZE_MASK) >= size + HEAP_MIN_BLOCK_SIZE)
500 {
501 HEAP_CreateFreeBlock( subheap, (char *)(pArena + 1) + size,
502 (pArena->size & ARENA_SIZE_MASK) - size );
503 /* assign size plus previous arena flags */
504 pArena->size = size | (pArena->size & ~ARENA_SIZE_MASK);
505 }
506 else
507 {
508 /* Turn off PREV_FREE flag in next block */
509 char *pNext = (char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK);
510 if (pNext < (char *)subheap + subheap->size)
511 *(PULONG)pNext &= ~ARENA_FLAG_PREV_FREE;
512 }
513 }
514
515 /***********************************************************************
516 * HEAP_InitSubHeap
517 */
518 static BOOLEAN HEAP_InitSubHeap( HEAP *heap, PVOID address, ULONG flags,
519 SIZE_T commitSize, ULONG totalSize,
520 PRTL_HEAP_PARAMETERS Parameters )
521 {
522 SUBHEAP *subheap = (SUBHEAP *)address;
523 FREE_LIST_ENTRY *pEntry;
524 int i;
525 NTSTATUS Status;
526
527 /* Fill the sub-heap structure */
528
529 subheap = (PSUBHEAP)address;
530 subheap->heap = heap;
531 subheap->size = totalSize;
532 subheap->commitSize = commitSize;
533 subheap->magic = SUBHEAP_MAGIC;
534
535 if ( subheap != (SUBHEAP *)heap )
536 {
537 /* If this is a secondary subheap, insert it into list */
538
539 subheap->headerSize = sizeof(SUBHEAP);
540 subheap->next = heap->subheap.next;
541 heap->subheap.next = subheap;
542 }
543 else
544 {
545 /* If this is a primary subheap, initialize main heap */
546
547 subheap->headerSize = sizeof(HEAP);
548 subheap->next = NULL;
549 heap->next = NULL;
550 heap->flags = flags;
551 heap->magic = HEAP_MAGIC;
552 if (Parameters)
553 heap->commitRoutine = Parameters->CommitRoutine;
554 else
555 heap->commitRoutine = NULL;
556
557 /* Build the free lists */
558
559 for (i = 0, pEntry = heap->freeList; i < HEAP_NB_FREE_LISTS; i++, pEntry++)
560 {
561 pEntry->size = HEAP_freeListSizes[i];
562 pEntry->arena.size = 0 | ARENA_FLAG_FREE;
563 pEntry->arena.next = i < HEAP_NB_FREE_LISTS-1 ?
564 &heap->freeList[i+1].arena : &heap->freeList[0].arena;
565 pEntry->arena.prev = i ? &heap->freeList[i-1].arena :
566 &heap->freeList[HEAP_NB_FREE_LISTS-1].arena;
567 pEntry->arena.threadId = 0;
568 pEntry->arena.magic = ARENA_FREE_MAGIC;
569 }
570
571 /* Initialize critical section */
572
573 RtlInitializeHeapLock( &heap->critSection );
574 }
575
576 /* Commit memory */
577 if (heap->commitRoutine)
578 {
579 Status = heap->commitRoutine(heap, &address, &commitSize);
580 }
581 else
582 {
583 Status = ZwAllocateVirtualMemory(NtCurrentProcess(),
584 &address,
585 0,
586 &commitSize,
587 MEM_COMMIT,
588 PAGE_EXECUTE_READWRITE);
589 }
590 if (!NT_SUCCESS(Status))
591 {
592 DPRINT("Could not commit %08lx bytes for sub-heap %p\n",
593 commitSize, address);
594 return FALSE;
595 }
596
597 /* Create the first free block */
598
599 HEAP_CreateFreeBlock( subheap, (PUCHAR)subheap + subheap->headerSize,
600 subheap->size - subheap->headerSize );
601
602 return TRUE;
603 }
604
605 /***********************************************************************
606 * HEAP_CreateSubHeap
607 *
608 * Create a sub-heap of the given size.
609 * If heap == NULL, creates a main heap.
610 */
611 static PSUBHEAP
612 HEAP_CreateSubHeap(PVOID BaseAddress,
613 HEAP *heap, ULONG flags,
614 SIZE_T commitSize, ULONG totalSize,
615 PRTL_HEAP_PARAMETERS Parameters )
616 {
617 PVOID address;
618 NTSTATUS Status;
619
620 /* Round-up sizes on a 64K boundary */
621
622 totalSize = (totalSize + 0xffff) & 0xffff0000;
623 commitSize = (commitSize + 0xffff) & 0xffff0000;
624 if (!commitSize)
625 commitSize = 0x10000;
626 if (totalSize < commitSize)
627 totalSize = commitSize;
628
629 /* Allocate the memory block */
630 address = BaseAddress;
631 if (!address)
632 {
633 Status = ZwAllocateVirtualMemory(NtCurrentProcess(),
634 &address,
635 0,
636 (PULONG)&totalSize,
637 MEM_RESERVE | MEM_COMMIT,
638 PAGE_EXECUTE_READWRITE);
639 if (!NT_SUCCESS(Status))
640 {
641 DPRINT("Could not VirtualAlloc %08lx bytes\n",
642 totalSize );
643 return NULL;
644 }
645 }
646
647 /* Initialize subheap */
648
649 if (!HEAP_InitSubHeap( heap ? heap : (HEAP *)address,
650 address, flags, commitSize, totalSize,
651 Parameters ))
652 {
653 if (!BaseAddress)
654 {
655 ULONG dummySize = 0;
656 ZwFreeVirtualMemory(NtCurrentProcess(),
657 &address,
658 &dummySize,
659 MEM_RELEASE);
660 }
661 return NULL;
662 }
663
664 return (SUBHEAP *)address;
665 }
666
667
668 /***********************************************************************
669 * HEAP_FindFreeBlock
670 *
671 * Find a free block at least as large as the requested size, and make sure
672 * the requested size is committed.
673 */
674 static ARENA_FREE *HEAP_FindFreeBlock( HEAP *heap, SIZE_T size,
675 SUBHEAP **ppSubHeap )
676 {
677 SUBHEAP *subheap;
678 ARENA_FREE *pArena;
679 FREE_LIST_ENTRY *pEntry = heap->freeList;
680
681 /* Find a suitable free list, and in it find a block large enough */
682
683 while (pEntry->size < size)
684 pEntry++;
685 pArena = pEntry->arena.next;
686 while (pArena != &heap->freeList[0].arena)
687 {
688 ULONG arena_size = (pArena->size & ARENA_SIZE_MASK) +
689 sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
690 if (arena_size >= size)
691 {
692 subheap = HEAP_FindSubHeap( heap, pArena );
693 if (!HEAP_Commit( subheap, (char *)pArena + sizeof(ARENA_INUSE)
694 + size + HEAP_MIN_BLOCK_SIZE,
695 heap->flags))
696 return NULL;
697 *ppSubHeap = subheap;
698 return pArena;
699 }
700
701 pArena = pArena->next;
702 }
703
704 /* If no block was found, attempt to grow the heap */
705
706 if (!(heap->flags & HEAP_GROWABLE))
707 {
708 DPRINT("Not enough space in heap %p for %08lx bytes\n",
709 heap, size );
710 return NULL;
711 }
712 /* make sure that we have a big enough size *committed* to fit another
713 * last free arena in !
714 * So just one heap struct, one first free arena which will eventually
715 * get inuse, and HEAP_MIN_BLOCK_SIZE for the second free arena that
716 * might get assigned all remaining free space in HEAP_ShrinkBlock() */
717 size += sizeof(SUBHEAP) + sizeof(ARENA_FREE) + HEAP_MIN_BLOCK_SIZE;
718 if (!(subheap = HEAP_CreateSubHeap( NULL, heap, heap->flags, size,
719 max( HEAP_DEF_SIZE, size ), NULL )))
720 return NULL;
721
722 DPRINT("created new sub-heap %p of %08lx bytes for heap %p\n",
723 subheap, size, heap );
724
725 *ppSubHeap = subheap;
726 return (ARENA_FREE *)(subheap + 1);
727 }
728
729
730 /***********************************************************************
731 * HEAP_IsValidArenaPtr
732 *
733 * Check that the pointer is inside the range possible for arenas.
734 */
735 static BOOLEAN HEAP_IsValidArenaPtr( HEAP *heap, PVOID ptr )
736 {
737 int i;
738 SUBHEAP *subheap = HEAP_FindSubHeap( heap, ptr );
739 if (!subheap)
740 return FALSE;
741 if ((char *)ptr >= (char *)subheap + subheap->headerSize)
742 return TRUE;
743 if (subheap != &heap->subheap)
744 return FALSE;
745 for (i = 0; i < HEAP_NB_FREE_LISTS; i++)
746 if (ptr == (PVOID)&heap->freeList[i].arena)
747 return TRUE;
748 return FALSE;
749 }
750
751
752 /***********************************************************************
753 * HEAP_ValidateFreeArena
754 */
755 static BOOLEAN HEAP_ValidateFreeArena( SUBHEAP *subheap, ARENA_FREE *pArena )
756 {
757 char *heapEnd = (char *)subheap + subheap->size;
758
759 /* Check magic number */
760 if (pArena->magic != ARENA_FREE_MAGIC)
761 {
762 DPRINT("Heap %p: invalid free arena magic for %p\n",
763 subheap->heap, pArena);
764 return FALSE;
765 }
766 /* Check size flags */
767 if (!(pArena->size & ARENA_FLAG_FREE) ||
768 (pArena->size & ARENA_FLAG_PREV_FREE))
769 {
770 DPRINT("Heap %p: bad flags %lx for free arena %p\n",
771 subheap->heap, pArena->size & ~ARENA_SIZE_MASK, pArena);
772 }
773 /* Check arena size */
774 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) > heapEnd)
775 {
776 DPRINT("Heap %p: bad size %08lx for free arena %p\n",
777 subheap->heap, (ULONG)pArena->size & ARENA_SIZE_MASK, pArena );
778 return FALSE;
779 }
780 /* Check that next pointer is valid */
781 if (!HEAP_IsValidArenaPtr( subheap->heap, pArena->next ))
782 {
783 DPRINT("Heap %p: bad next ptr %p for arena %p\n",
784 subheap->heap, pArena->next, pArena);
785 return FALSE;
786 }
787 /* Check that next arena is free */
788 if (!(pArena->next->size & ARENA_FLAG_FREE) ||
789 (pArena->next->magic != ARENA_FREE_MAGIC))
790 {
791 DPRINT("Heap %p: next arena %p invalid for %p\n",
792 subheap->heap, pArena->next, pArena);
793 return FALSE;
794 }
795 /* Check that prev pointer is valid */
796 if (!HEAP_IsValidArenaPtr( subheap->heap, pArena->prev ))
797 {
798 DPRINT("Heap %p: bad prev ptr %p for arena %p\n",
799 subheap->heap, pArena->prev, pArena);
800 return FALSE;
801 }
802 /* Check that prev arena is free */
803 if (!(pArena->prev->size & ARENA_FLAG_FREE) ||
804 (pArena->prev->magic != ARENA_FREE_MAGIC))
805 {
806 DPRINT("Heap %p: prev arena %p invalid for %p\n",
807 subheap->heap, pArena->prev, pArena);
808 return FALSE;
809 }
810 /* Check that next block has PREV_FREE flag */
811 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) < heapEnd)
812 {
813 if (!(*(PULONG)((char *)(pArena + 1) +
814 (pArena->size & ARENA_SIZE_MASK)) & ARENA_FLAG_PREV_FREE))
815 {
816 DPRINT("Heap %p: free arena %p next block has no PREV_FREE flag\n",
817 subheap->heap, pArena);
818 return FALSE;
819 }
820 /* Check next block back pointer */
821 if (*((ARENA_FREE **)((char *)(pArena + 1) +
822 (pArena->size & ARENA_SIZE_MASK)) - 1) != pArena)
823 {
824 DPRINT("Heap %p: arena %p has wrong back ptr %p\n",
825 subheap->heap, pArena,
826 *((PULONG)((char *)(pArena+1)+ (pArena->size & ARENA_SIZE_MASK)) - 1));
827 return FALSE;
828 }
829 }
830 return TRUE;
831 }
832
833
834 /***********************************************************************
835 * HEAP_ValidateInUseArena
836 */
837 static BOOLEAN HEAP_ValidateInUseArena( SUBHEAP *subheap, ARENA_INUSE *pArena, BOOLEAN quiet )
838 {
839 char *heapEnd = (char *)subheap + subheap->size;
840
841 /* Check magic number */
842 if (pArena->magic != ARENA_INUSE_MAGIC)
843 {
844 if (quiet == NOISY)
845 {
846 DPRINT("Heap %p: invalid in-use arena magic for %p\n",
847 subheap->heap, pArena);
848 if (TRACE_ON(heap))
849 HEAP_Dump( subheap->heap );
850 }
851 else if (WARN_ON(heap))
852 {
853 DPRINT("Heap %p: invalid in-use arena magic for %p\n",
854 subheap->heap, pArena);
855 if (TRACE_ON(heap))
856 HEAP_Dump( subheap->heap );
857 }
858 return FALSE;
859 }
860 /* Check size flags */
861 if (pArena->size & ARENA_FLAG_FREE)
862 {
863 DPRINT("Heap %p: bad flags %lx for in-use arena %p\n",
864 subheap->heap, pArena->size & ~ARENA_SIZE_MASK, pArena);
865 return FALSE;
866 }
867 /* Check arena size */
868 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) > heapEnd)
869 {
870 DPRINT("Heap %p: bad size %08lx for in-use arena %p\n",
871 subheap->heap, (ULONG)pArena->size & ARENA_SIZE_MASK, pArena);
872 return FALSE;
873 }
874 /* Check next arena PREV_FREE flag */
875 if (((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) < heapEnd) &&
876 (*(PULONG)((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK)) & ARENA_FLAG_PREV_FREE))
877 {
878 DPRINT("Heap %p: in-use arena %p next block has PREV_FREE flag\n",
879 subheap->heap, pArena);
880 return FALSE;
881 }
882 /* Check prev free arena */
883 if (pArena->size & ARENA_FLAG_PREV_FREE)
884 {
885 ARENA_FREE *pPrev = *((ARENA_FREE **)pArena - 1);
886 /* Check prev pointer */
887 if (!HEAP_IsValidArenaPtr( subheap->heap, pPrev ))
888 {
889 DPRINT("Heap %p: bad back ptr %p for arena %p\n",
890 subheap->heap, pPrev, pArena );
891 return FALSE;
892 }
893 /* Check that prev arena is free */
894 if (!(pPrev->size & ARENA_FLAG_FREE) ||
895 (pPrev->magic != ARENA_FREE_MAGIC))
896 {
897 DPRINT("Heap %p: prev arena %p invalid for in-use %p\n",
898 subheap->heap, pPrev, pArena);
899 return FALSE;
900 }
901 /* Check that prev arena is really the previous block */
902 if ((char *)(pPrev + 1) + (pPrev->size & ARENA_SIZE_MASK) != (char *)pArena)
903 {
904 DPRINT("Heap %p: prev arena %p is not prev for in-use %p\n",
905 subheap->heap, pPrev, pArena );
906 return FALSE;
907 }
908 }
909 return TRUE;
910 }
911
912
913 /***********************************************************************
914 * HEAP_IsInsideHeap
915 * Checks whether the pointer points to a block inside a given heap.
916 *
917 * NOTES
918 * Should this return BOOL32?
919 *
920 * RETURNS
921 * !0: Success
922 * 0: Failure
923 */
924 int HEAP_IsInsideHeap(
925 HANDLE heap, /* [in] Heap */
926 ULONG flags, /* [in] Flags */
927 PVOID ptr /* [in] Pointer */
928 )
929 {
930 HEAP *heapPtr = HEAP_GetPtr( heap );
931 SUBHEAP *subheap;
932 int ret;
933
934 /* Validate the parameters */
935
936 if (!heapPtr)
937 return 0;
938 flags |= heapPtr->flags;
939 if (!(flags & HEAP_NO_SERIALIZE))
940 RtlEnterHeapLock( &heapPtr->critSection );
941 ret = (((subheap = HEAP_FindSubHeap( heapPtr, ptr )) != NULL) &&
942 (((char *)ptr >= (char *)subheap + subheap->headerSize
943 + sizeof(ARENA_INUSE))));
944 if (!(flags & HEAP_NO_SERIALIZE))
945 RtlLeaveHeapLock( &heapPtr->critSection );
946 return ret;
947 }
948
949
950 void DumpStackFrames ( PULONG Frame, ULONG FrameCount )
951 {
952 ULONG i=0;
953
954 DbgPrint("Frames: ");
955 if ( !Frame )
956 {
957 #if defined __GNUC__
958 __asm__("mov %%ebp, %0" : "=r" (Frame) : );
959 #elif defined(_MSC_VER)
960 __asm mov [Frame], ebp
961 #endif
962 Frame = (PULONG)Frame[0]; // step out of DumpStackFrames
963 }
964 while ( Frame != 0 && (ULONG)Frame != 0xDEADBEEF && (ULONG)Frame != 0xcdcdcdcd && (ULONG)Frame != 0xcccccccc && i++ < FrameCount )
965 {
966 DbgPrint("<%p>", (PVOID)Frame[1]);
967 if (Frame[1] == 0xdeadbeef)
968 break;
969 Frame = (PULONG)Frame[0];
970 DbgPrint(" ");
971 }
972 DbgPrint("\n");
973 }
974
975 /***********************************************************************
976 * HEAP_IsRealArena [Internal]
977 * Validates a block is a valid arena.
978 *
979 * RETURNS
980 * TRUE: Success
981 * FALSE: Failure
982 */
983 static BOOLEAN HEAP_IsRealArena(
984 HANDLE heap, /* [in] Handle to the heap */
985 ULONG flags, /* [in] Bit flags that control access during operation */
986 PVOID block, /* [in] Optional pointer to memory block to validate */
987 BOOLEAN quiet /* [in] Flag - if true, HEAP_ValidateInUseArena
988 * does not complain */
989 )
990 {
991 SUBHEAP *subheap;
992 HEAP *heapPtr = (HEAP *)(heap);
993 BOOLEAN ret = TRUE;
994
995 if (!heapPtr || (heapPtr->magic != HEAP_MAGIC))
996 {
997 DPRINT("Invalid heap %08x!\n", heap );
998 return FALSE;
999 }
1000
1001 flags &= HEAP_NO_SERIALIZE;
1002 flags |= heapPtr->flags;
1003 /* calling HeapLock may result in infinite recursion, so do the critsect directly */
1004 if (!(flags & HEAP_NO_SERIALIZE))
1005 RtlEnterHeapLock( &heapPtr->critSection );
1006
1007 if (block)
1008 {
1009 /* Only check this single memory block */
1010
1011 /* The following code is really HEAP_IsInsideHeap *
1012 * with serialization already done. */
1013 if (!(subheap = HEAP_FindSubHeap( heapPtr, block )) ||
1014 ((char *)block < (char *)subheap + subheap->headerSize
1015 + sizeof(ARENA_INUSE)))
1016 {
1017 if (quiet == NOISY)
1018 {
1019 DPRINT("Heap %p: block %p is not inside heap\n",
1020 heap, block );
1021 DumpStackFrames(NULL,10);
1022 }
1023 else if (WARN_ON(heap))
1024 {
1025 DPRINT1("Heap %p: block %p is not inside heap\n",
1026 heap, block );
1027 DumpStackFrames(NULL,10);
1028 }
1029 ret = FALSE;
1030 }
1031 else
1032 ret = HEAP_ValidateInUseArena( subheap, (ARENA_INUSE *)block - 1, quiet );
1033
1034 if (!(flags & HEAP_NO_SERIALIZE))
1035 RtlLeaveHeapLock( &heapPtr->critSection );
1036 return ret;
1037 }
1038
1039 subheap = &heapPtr->subheap;
1040 while (subheap && ret)
1041 {
1042 char *ptr = (char *)subheap + subheap->headerSize;
1043 while (ptr < (char *)subheap + subheap->size)
1044 {
1045 if (*(PULONG)ptr & ARENA_FLAG_FREE)
1046 {
1047 if (!HEAP_ValidateFreeArena( subheap, (ARENA_FREE *)ptr ))
1048 {
1049 ret = FALSE;
1050 break;
1051 }
1052 ptr += sizeof(ARENA_FREE) + (*(PULONG)ptr & ARENA_SIZE_MASK);
1053 }
1054 else
1055 {
1056 if (!HEAP_ValidateInUseArena( subheap, (ARENA_INUSE *)ptr, NOISY ))
1057 {
1058 ret = FALSE;
1059 break;
1060 }
1061 ptr += sizeof(ARENA_INUSE) + (*(PULONG)ptr & ARENA_SIZE_MASK);
1062 }
1063 }
1064 subheap = subheap->next;
1065 }
1066
1067 if (!(flags & HEAP_NO_SERIALIZE))
1068 RtlLeaveHeapLock( &heapPtr->critSection );
1069 return ret;
1070 }
1071
1072
1073 /***********************************************************************
1074 * HeapCreate (KERNEL32.336)
1075 * RETURNS
1076 * Handle of heap: Success
1077 * NULL: Failure
1078 *
1079 * @implemented
1080 */
1081 HANDLE NTAPI
1082 RtlCreateHeap(ULONG flags,
1083 PVOID BaseAddress,
1084 SIZE_T maxSize,
1085 SIZE_T initialSize,
1086 PVOID Lock,
1087 PRTL_HEAP_PARAMETERS Parameters)
1088 {
1089 SUBHEAP *subheap;
1090 HEAP *heapPtr;
1091
1092 /* Allocate the heap block */
1093
1094 if (!maxSize)
1095 {
1096 maxSize = HEAP_DEF_SIZE;
1097 flags |= HEAP_GROWABLE;
1098 }
1099 if (!(subheap = HEAP_CreateSubHeap( BaseAddress, NULL, flags, initialSize,
1100 maxSize, Parameters )))
1101 {
1102 return 0;
1103 }
1104
1105 if (RtlpGetMode() == UserMode)
1106 {
1107 /* link it into the per-process heap list */
1108 RtlEnterHeapLock (&RtlpProcessHeapsListLock);
1109
1110 heapPtr = subheap->heap;
1111 heapPtr->next = (HEAP*)NtCurrentPeb()->ProcessHeaps;
1112 NtCurrentPeb()->ProcessHeaps = (HANDLE)heapPtr;
1113 NtCurrentPeb()->NumberOfHeaps++;
1114
1115 RtlLeaveHeapLock (&RtlpProcessHeapsListLock);
1116 }
1117
1118 return (HANDLE)subheap;
1119 }
1120
1121 /***********************************************************************
1122 * HeapDestroy (KERNEL32.337)
1123 * RETURNS
1124 * TRUE: Success
1125 * FALSE: Failure
1126 *
1127 * @implemented
1128 *
1129 * RETURNS
1130 * Success: A NULL HANDLE, if heap is NULL or it was destroyed
1131 * Failure: The Heap handle, if heap is the process heap.
1132 */
1133 HANDLE NTAPI
1134 RtlDestroyHeap(HANDLE heap) /* [in] Handle of heap */
1135 {
1136 HEAP *heapPtr = HEAP_GetPtr( heap );
1137 SUBHEAP *subheap;
1138 ULONG flags;
1139 HEAP **pptr;
1140
1141 DPRINT("%08x\n", heap );
1142 if (!heapPtr)
1143 return heap;
1144
1145 if (RtlpGetMode() == UserMode)
1146 {
1147 if (heap == NtCurrentPeb()->ProcessHeap)
1148 return heap; /* cannot delete the main process heap */
1149
1150 /* remove it from the per-process list */
1151 RtlEnterHeapLock (&RtlpProcessHeapsListLock);
1152
1153 pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps;
1154 while (*pptr && *pptr != heapPtr) pptr = &(*pptr)->next;
1155 if (*pptr) *pptr = (*pptr)->next;
1156 NtCurrentPeb()->NumberOfHeaps--;
1157
1158 RtlLeaveHeapLock (&RtlpProcessHeapsListLock);
1159 }
1160
1161 RtlDeleteHeapLock( &heapPtr->critSection );
1162 subheap = &heapPtr->subheap;
1163 // We must save the flags. The first subheap is located after
1164 // the heap structure. If we release the first subheap,
1165 // we release also the heap structure.
1166 flags = heapPtr->flags;
1167 while (subheap)
1168 {
1169 SUBHEAP *next = subheap->next;
1170 ULONG dummySize = 0;
1171 ZwFreeVirtualMemory(NtCurrentProcess(),
1172 (PVOID*)&subheap,
1173 &dummySize,
1174 MEM_RELEASE);
1175 subheap = next;
1176 }
1177 return (HANDLE)NULL;
1178 }
1179
1180
1181 /***********************************************************************
1182 * HeapAlloc (KERNEL32.334)
1183 * RETURNS
1184 * Pointer to allocated memory block
1185 * NULL: Failure
1186 * 0x7d030f60--invalid flags in RtlHeapAllocate
1187 * @implemented
1188 */
1189 PVOID NTAPI
1190 RtlAllocateHeap(HANDLE heap, /* [in] Handle of private heap block */
1191 ULONG flags, /* [in] Heap allocation control flags */
1192 ULONG size) /* [in] Number of bytes to allocate */
1193 {
1194 ARENA_FREE *pArena;
1195 ARENA_INUSE *pInUse;
1196 SUBHEAP *subheap;
1197 HEAP *heapPtr = HEAP_GetPtr( heap );
1198
1199 /* Validate the parameters */
1200
1201 if (!heapPtr)
1202 {
1203 if (flags & HEAP_GENERATE_EXCEPTIONS)
1204 RtlRaiseStatus( STATUS_NO_MEMORY );
1205 return NULL;
1206 }
1207 flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY;
1208 flags |= heapPtr->flags;
1209 if (!(flags & HEAP_NO_SERIALIZE))
1210 RtlEnterHeapLock( &heapPtr->critSection );
1211 size = (size + 7) & ~7;
1212 if (size < HEAP_MIN_BLOCK_SIZE)
1213 size = HEAP_MIN_BLOCK_SIZE;
1214
1215 /* Locate a suitable free block */
1216
1217 if (!(pArena = HEAP_FindFreeBlock( heapPtr, size, &subheap )))
1218 {
1219 DPRINT("(%08x,%08lx,%08lx): returning NULL\n",
1220 heap, flags, size );
1221 if (!(flags & HEAP_NO_SERIALIZE))
1222 RtlLeaveHeapLock( &heapPtr->critSection );
1223 if (flags & HEAP_GENERATE_EXCEPTIONS)
1224 RtlRaiseStatus( STATUS_NO_MEMORY );
1225 return NULL;
1226 }
1227
1228 /* Remove the arena from the free list */
1229
1230 pArena->next->prev = pArena->prev;
1231 pArena->prev->next = pArena->next;
1232
1233 /* Build the in-use arena */
1234
1235 pInUse = (ARENA_INUSE *)pArena;
1236 pInUse->size = (pInUse->size & ~ARENA_FLAG_FREE)
1237 + sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
1238 pInUse->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
1239 pInUse->magic = ARENA_INUSE_MAGIC;
1240
1241 /* Shrink the block */
1242
1243 HEAP_ShrinkBlock( subheap, pInUse, size );
1244
1245 if (flags & HEAP_ZERO_MEMORY)
1246 memset( pInUse + 1, 0, pInUse->size & ARENA_SIZE_MASK );
1247 else if (TRACE_ON(heap))
1248 memset( pInUse + 1, ARENA_INUSE_FILLER, pInUse->size & ARENA_SIZE_MASK );
1249
1250 if (!(flags & HEAP_NO_SERIALIZE))
1251 RtlLeaveHeapLock( &heapPtr->critSection );
1252
1253 DPRINT("(%08x,%08lx,%08lx): returning %p\n",
1254 heap, flags, size, (PVOID)(pInUse + 1) );
1255 return (PVOID)(pInUse + 1);
1256 }
1257
1258
1259 /***********************************************************************
1260 * HeapFree (KERNEL32.338)
1261 * RETURNS
1262 * TRUE: Success
1263 * FALSE: Failure
1264 *
1265 * @implemented
1266 */
1267 BOOLEAN NTAPI RtlFreeHeap(
1268 HANDLE heap, /* [in] Handle of heap */
1269 ULONG flags, /* [in] Heap freeing flags */
1270 PVOID ptr /* [in] Address of memory to free */
1271 )
1272 {
1273 ARENA_INUSE *pInUse;
1274 SUBHEAP *subheap;
1275 HEAP *heapPtr = HEAP_GetPtr( heap );
1276
1277 /* Validate the parameters */
1278
1279 if (!heapPtr)
1280 return FALSE;
1281 if (!ptr) /* Freeing a NULL ptr is doesn't indicate an error in Win2k */
1282 {
1283 DPRINT("(%08x,%08lx,%p): asked to free NULL\n",
1284 heap, flags, ptr );
1285 return TRUE;
1286 }
1287
1288 flags &= HEAP_NO_SERIALIZE;
1289 flags |= heapPtr->flags;
1290 if (!(flags & HEAP_NO_SERIALIZE))
1291 RtlEnterHeapLock( &heapPtr->critSection );
1292 if (!HEAP_IsRealArena( heap, HEAP_NO_SERIALIZE, ptr, QUIET ))
1293 {
1294 if (!(flags & HEAP_NO_SERIALIZE))
1295 RtlLeaveHeapLock( &heapPtr->critSection );
1296 DPRINT("(%08x,%08lx,%p): returning FALSE\n",
1297 heap, flags, ptr );
1298 return FALSE;
1299 }
1300
1301 /* Turn the block into a free block */
1302
1303 pInUse = (ARENA_INUSE *)ptr - 1;
1304 subheap = HEAP_FindSubHeap( heapPtr, pInUse );
1305 HEAP_MakeInUseBlockFree( subheap, pInUse, heapPtr->flags );
1306
1307 if (!(flags & HEAP_NO_SERIALIZE))
1308 RtlLeaveHeapLock( &heapPtr->critSection );
1309
1310 DPRINT("(%08x,%08lx,%p): returning TRUE\n",
1311 heap, flags, ptr );
1312 return TRUE;
1313 }
1314
1315
1316 /***********************************************************************
1317 * RtlReAllocateHeap
1318 * PARAMS
1319 * Heap [in] Handle of heap block
1320 * Flags [in] Heap reallocation flags
1321 * Ptr, [in] Address of memory to reallocate
1322 * Size [in] Number of bytes to reallocate
1323 *
1324 * RETURNS
1325 * Pointer to reallocated memory block
1326 * NULL: Failure
1327 * 0x7d030f60--invalid flags in RtlHeapAllocate
1328 * @implemented
1329 */
1330 PVOID NTAPI RtlReAllocateHeap(
1331 HANDLE Heap,
1332 ULONG Flags,
1333 PVOID Ptr,
1334 ULONG Size
1335 )
1336 {
1337 ARENA_INUSE *pArena;
1338 ULONG oldSize;
1339 HEAP *heapPtr;
1340 SUBHEAP *subheap;
1341
1342 if (!Ptr)
1343 return FALSE;
1344 if (!(heapPtr = HEAP_GetPtr( Heap )))
1345 return FALSE;
1346
1347 /* Validate the parameters */
1348
1349 Flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY |
1350 HEAP_REALLOC_IN_PLACE_ONLY;
1351 Flags |= heapPtr->flags;
1352 Size = (Size + 7) & ~7;
1353 if (Size < HEAP_MIN_BLOCK_SIZE)
1354 Size = HEAP_MIN_BLOCK_SIZE;
1355
1356 if (!(Flags & HEAP_NO_SERIALIZE))
1357 RtlEnterHeapLock( &heapPtr->critSection );
1358 if (!HEAP_IsRealArena( Heap, HEAP_NO_SERIALIZE, Ptr, QUIET ))
1359 {
1360 if (!(Flags & HEAP_NO_SERIALIZE))
1361 RtlLeaveHeapLock( &heapPtr->critSection );
1362 DPRINT("(%08x,%08lx,%p,%08lx): returning NULL\n",
1363 Heap, Flags, Ptr, Size );
1364 if (Flags & HEAP_GENERATE_EXCEPTIONS)
1365 RtlRaiseStatus( STATUS_NO_MEMORY );
1366 return NULL;
1367 }
1368
1369 /* Check if we need to grow the block */
1370
1371 pArena = (ARENA_INUSE *)Ptr - 1;
1372 pArena->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
1373
1374 subheap = HEAP_FindSubHeap( heapPtr, pArena );
1375 oldSize = (pArena->size & ARENA_SIZE_MASK);
1376 if (Size > oldSize)
1377 {
1378 char *pNext = (char *)(pArena + 1) + oldSize;
1379 if ((pNext < (char *)subheap + subheap->size) &&
1380 (*(PULONG)pNext & ARENA_FLAG_FREE) &&
1381 (oldSize + (*(PULONG)pNext & ARENA_SIZE_MASK) + sizeof(ARENA_FREE) >= Size))
1382 {
1383 /* The next block is free and large enough */
1384 ARENA_FREE *pFree = (ARENA_FREE *)pNext;
1385 pFree->next->prev = pFree->prev;
1386 pFree->prev->next = pFree->next;
1387 pArena->size += (pFree->size & ARENA_SIZE_MASK) + sizeof(*pFree);
1388 if (!HEAP_Commit( subheap, (char *)pArena + sizeof(ARENA_INUSE)
1389 + Size + HEAP_MIN_BLOCK_SIZE,
1390 heapPtr->flags))
1391 {
1392 if (!(Flags & HEAP_NO_SERIALIZE))
1393 RtlLeaveHeapLock( &heapPtr->critSection );
1394 if (Flags & HEAP_GENERATE_EXCEPTIONS)
1395 RtlRaiseStatus( STATUS_NO_MEMORY );
1396 return NULL;
1397 }
1398 HEAP_ShrinkBlock( subheap, pArena, Size );
1399 }
1400 else /* Do it the hard way */
1401 {
1402 ARENA_FREE *pNew;
1403 ARENA_INUSE *pInUse;
1404 SUBHEAP *newsubheap;
1405
1406 if ((Flags & HEAP_REALLOC_IN_PLACE_ONLY) ||
1407 !(pNew = HEAP_FindFreeBlock( heapPtr, Size, &newsubheap )))
1408 {
1409 if (!(Flags & HEAP_NO_SERIALIZE))
1410 RtlLeaveHeapLock( &heapPtr->critSection );
1411 if (Flags & HEAP_GENERATE_EXCEPTIONS)
1412 RtlRaiseStatus( STATUS_NO_MEMORY );
1413 return NULL;
1414 }
1415
1416 /* Build the in-use arena */
1417
1418 pNew->next->prev = pNew->prev;
1419 pNew->prev->next = pNew->next;
1420 pInUse = (ARENA_INUSE *)pNew;
1421 pInUse->size = (pInUse->size & ~ARENA_FLAG_FREE)
1422 + sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
1423 pInUse->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
1424 pInUse->magic = ARENA_INUSE_MAGIC;
1425 HEAP_ShrinkBlock( newsubheap, pInUse, Size );
1426 memcpy( pInUse + 1, pArena + 1, oldSize );
1427
1428 /* Free the previous block */
1429
1430 HEAP_MakeInUseBlockFree( subheap, pArena, Flags );
1431 subheap = newsubheap;
1432 pArena = pInUse;
1433 }
1434 }
1435 else
1436 HEAP_ShrinkBlock( subheap, pArena, Size ); /* Shrink the block */
1437
1438 /* Clear the extra bytes if needed */
1439
1440 if (Size > oldSize)
1441 {
1442 if (Flags & HEAP_ZERO_MEMORY)
1443 memset( (char *)(pArena + 1) + oldSize, 0,
1444 (pArena->size & ARENA_SIZE_MASK) - oldSize );
1445 else if (TRACE_ON(heap))
1446 memset( (char *)(pArena + 1) + oldSize, ARENA_INUSE_FILLER,
1447 (pArena->size & ARENA_SIZE_MASK) - oldSize );
1448 }
1449
1450 /* Return the new arena */
1451
1452 if (!(Flags & HEAP_NO_SERIALIZE))
1453 RtlLeaveHeapLock( &heapPtr->critSection );
1454
1455 DPRINT("(%08x,%08lx,%p,%08lx): returning %p\n",
1456 Heap, Flags, Ptr, Size, (PVOID)(pArena + 1) );
1457 return (PVOID)(pArena + 1);
1458 }
1459
1460
1461 /***********************************************************************
1462 * RtlCompactHeap
1463 *
1464 * @unimplemented
1465 */
1466 ULONG NTAPI
1467 RtlCompactHeap(HANDLE Heap,
1468 ULONG Flags)
1469 {
1470 UNIMPLEMENTED;
1471 return 0;
1472 }
1473
1474
1475 /***********************************************************************
1476 * RtlLockHeap
1477 * Attempts to acquire the critical section object for a specified heap.
1478 *
1479 * PARAMS
1480 * Heap [in] Handle of heap to lock for exclusive access
1481 *
1482 * RETURNS
1483 * TRUE: Success
1484 * FALSE: Failure
1485 *
1486 * @implemented
1487 */
1488 BOOLEAN NTAPI
1489 RtlLockHeap(IN HANDLE Heap)
1490 {
1491 HEAP *heapPtr = HEAP_GetPtr( Heap );
1492 if (!heapPtr)
1493 return FALSE;
1494 RtlEnterHeapLock( &heapPtr->critSection );
1495 return TRUE;
1496 }
1497
1498
1499 /***********************************************************************
1500 * RtlUnlockHeap
1501 * Releases ownership of the critical section object.
1502 *
1503 * PARAMS
1504 * Heap [in] Handle to the heap to unlock
1505 *
1506 * RETURNS
1507 * TRUE: Success
1508 * FALSE: Failure
1509 *
1510 * @implemented
1511 */
1512 BOOLEAN NTAPI
1513 RtlUnlockHeap(HANDLE Heap)
1514 {
1515 HEAP *heapPtr = HEAP_GetPtr( Heap );
1516 if (!heapPtr)
1517 return FALSE;
1518 RtlLeaveHeapLock( &heapPtr->critSection );
1519 return TRUE;
1520 }
1521
1522
1523 /***********************************************************************
1524 * RtlSizeHeap
1525 * PARAMS
1526 * Heap [in] Handle of heap
1527 * Flags [in] Heap size control flags
1528 * Ptr [in] Address of memory to return size for
1529 *
1530 * RETURNS
1531 * Size in bytes of allocated memory
1532 * 0xffffffff: Failure
1533 *
1534 * @implemented
1535 */
1536 ULONG NTAPI
1537 RtlSizeHeap(
1538 HANDLE Heap,
1539 ULONG Flags,
1540 PVOID Ptr
1541 )
1542 {
1543 ULONG ret;
1544 HEAP *heapPtr = HEAP_GetPtr( Heap );
1545
1546 if (!heapPtr)
1547 return 0;
1548 Flags &= HEAP_NO_SERIALIZE;
1549 Flags |= heapPtr->flags;
1550 if (!(Flags & HEAP_NO_SERIALIZE))
1551 RtlEnterHeapLock( &heapPtr->critSection );
1552 if (!HEAP_IsRealArena( Heap, HEAP_NO_SERIALIZE, Ptr, QUIET ))
1553 {
1554 ret = 0xffffffff;
1555 }
1556 else
1557 {
1558 ARENA_INUSE *pArena = (ARENA_INUSE *)Ptr - 1;
1559 ret = pArena->size & ARENA_SIZE_MASK;
1560 }
1561 if (!(Flags & HEAP_NO_SERIALIZE))
1562 RtlLeaveHeapLock( &heapPtr->critSection );
1563
1564 DPRINT("(%08x,%08lx,%p): returning %08lx\n",
1565 Heap, Flags, Ptr, ret );
1566 return ret;
1567 }
1568
1569
1570 /***********************************************************************
1571 * RtlValidateHeap
1572 * Validates a specified heap.
1573 *
1574 * PARAMS
1575 * Heap [in] Handle to the heap
1576 * Flags [in] Bit flags that control access during operation
1577 * Block [in] Optional pointer to memory block to validate
1578 *
1579 * NOTES
1580 * Flags is ignored.
1581 *
1582 * RETURNS
1583 * TRUE: Success
1584 * FALSE: Failure
1585 *
1586 * @implemented
1587 */
1588 BOOLEAN NTAPI RtlValidateHeap(
1589 HANDLE Heap,
1590 ULONG Flags,
1591 PVOID Block
1592 )
1593 {
1594 HEAP *heapPtr = HEAP_GetPtr( Heap );
1595 if (!heapPtr)
1596 return FALSE;
1597 return HEAP_IsRealArena( heapPtr, Flags, Block, QUIET );
1598 }
1599
1600
1601 /***********************************************************************
1602 * HeapWalk (KERNEL32.344)
1603 * Enumerates the memory blocks in a specified heap.
1604 * See HEAP_Dump() for info on heap structure.
1605 *
1606 * TODO
1607 * - handling of PROCESS_HEAP_ENTRY_MOVEABLE and
1608 * PROCESS_HEAP_ENTRY_DDESHARE (needs heap.c support)
1609 *
1610 * RETURNS
1611 * TRUE: Success
1612 * FALSE: Failure
1613 */
1614 #if 0
1615 BOOLEAN NTAPI HeapWalk(
1616 HANDLE heap, /* [in] Handle to heap to enumerate */
1617 LPPROCESS_HEAP_ENTRY entry /* [out] Pointer to structure of enumeration info */
1618 )
1619 {
1620 HEAP *heapPtr = HEAP_GetPtr(heap);
1621 SUBHEAP *sub, *currentheap = NULL;
1622 BOOLEAN ret = FALSE;
1623 char *ptr;
1624 int region_index = 0;
1625
1626 if (!heapPtr || !entry)
1627 {
1628 return FALSE;
1629 }
1630
1631 if (!(heapPtr->flags & HEAP_NO_SERIALIZE))
1632 RtlEnterHeapLock( &heapPtr->critSection );
1633
1634 /* set ptr to the next arena to be examined */
1635
1636 if (!entry->lpData) /* first call (init) ? */
1637 {
1638 DPRINT("begin walking of heap 0x%08x.\n", heap);
1639 /*HEAP_Dump(heapPtr);*/
1640 currentheap = &heapPtr->subheap;
1641 ptr = (char*)currentheap + currentheap->headerSize;
1642 }
1643 else
1644 {
1645 ptr = entry->lpData;
1646 sub = &heapPtr->subheap;
1647 while (sub)
1648 {
1649 if (((char *)ptr >= (char *)sub) &&
1650 ((char *)ptr < (char *)sub + sub->size))
1651 {
1652 currentheap = sub;
1653 break;
1654 }
1655 sub = sub->next;
1656 region_index++;
1657 }
1658 if (currentheap == NULL)
1659 {
1660 DPRINT("no matching subheap found, shouldn't happen !\n");
1661 goto HW_end;
1662 }
1663
1664 ptr += entry->cbData; /* point to next arena */
1665 if (ptr > (char *)currentheap + currentheap->size - 1)
1666 { /* proceed with next subheap */
1667 if (!(currentheap = currentheap->next))
1668 { /* successfully finished */
1669 DPRINT("end reached.\n");
1670 goto HW_end;
1671 }
1672 ptr = (char*)currentheap + currentheap->headerSize;
1673 }
1674 }
1675
1676 entry->wFlags = 0;
1677 if (*(PULONG)ptr & ARENA_FLAG_FREE)
1678 {
1679 ARENA_FREE *pArena = (ARENA_FREE *)ptr;
1680
1681 /*DPRINT("free, magic: %04x\n", pArena->magic);*/
1682
1683 entry->lpData = pArena + 1;
1684 entry->cbData = pArena->size & ARENA_SIZE_MASK;
1685 entry->cbOverhead = sizeof(ARENA_FREE);
1686 entry->wFlags = PROCESS_HEAP_UNCOMMITTED_RANGE;
1687 }
1688 else
1689 {
1690 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
1691
1692 /*DPRINT("busy, magic: %04x\n", pArena->magic);*/
1693
1694 entry->lpData = pArena + 1;
1695 entry->cbData = pArena->size & ARENA_SIZE_MASK;
1696 entry->cbOverhead = sizeof(ARENA_INUSE);
1697 entry->wFlags = PROCESS_HEAP_ENTRY_BUSY;
1698 /* FIXME: can't handle PROCESS_HEAP_ENTRY_MOVEABLE
1699 and PROCESS_HEAP_ENTRY_DDESHARE yet */
1700 }
1701
1702 entry->iRegionIndex = region_index;
1703
1704 /* first element of heap ? */
1705 if (ptr == (char *)(currentheap + currentheap->headerSize))
1706 {
1707 entry->wFlags |= PROCESS_HEAP_REGION;
1708 entry->Foo.Region.dwCommittedSize = currentheap->commitSize;
1709 entry->Foo.Region.dwUnCommittedSize =
1710 currentheap->size - currentheap->commitSize;
1711 entry->Foo.Region.lpFirstBlock = /* first valid block */
1712 currentheap + currentheap->headerSize;
1713 entry->Foo.Region.lpLastBlock = /* first invalid block */
1714 currentheap + currentheap->size;
1715 }
1716 ret = TRUE;
1717
1718 HW_end:
1719 if (!(heapPtr->flags & HEAP_NO_SERIALIZE))
1720 RtlLeaveHeapLock( &heapPtr->critSection );
1721
1722 return ret;
1723 }
1724 #endif
1725
1726
1727 VOID
1728 RtlInitializeHeapManager(VOID)
1729 {
1730 PPEB Peb;
1731
1732 Peb = NtCurrentPeb();
1733
1734 Peb->NumberOfHeaps = 0;
1735 Peb->MaximumNumberOfHeaps = -1; /* no limit */
1736 Peb->ProcessHeaps = NULL;
1737
1738 RtlInitializeHeapLock(&RtlpProcessHeapsListLock);
1739 }
1740
1741
1742 /*
1743 * @implemented
1744 */
1745 NTSTATUS NTAPI
1746 RtlEnumProcessHeaps(PHEAP_ENUMERATION_ROUTINE HeapEnumerationRoutine,
1747 PVOID lParam)
1748 {
1749 NTSTATUS Status = STATUS_SUCCESS;
1750 HEAP** pptr;
1751
1752 RtlEnterHeapLock(&RtlpProcessHeapsListLock);
1753
1754 for (pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps; *pptr; pptr = &(*pptr)->next)
1755 {
1756 Status = HeapEnumerationRoutine(*pptr,lParam);
1757 if (!NT_SUCCESS(Status))
1758 break;
1759 }
1760
1761 RtlLeaveHeapLock(&RtlpProcessHeapsListLock);
1762
1763 return Status;
1764 }
1765
1766
1767 /*
1768 * @implemented
1769 */
1770 ULONG NTAPI
1771 RtlGetProcessHeaps(ULONG HeapCount,
1772 HANDLE *HeapArray)
1773 {
1774 ULONG Result = 0;
1775 HEAP ** pptr;
1776
1777 RtlEnterHeapLock(&RtlpProcessHeapsListLock);
1778
1779 Result = NtCurrentPeb()->NumberOfHeaps;
1780
1781 if (NtCurrentPeb()->NumberOfHeaps <= HeapCount)
1782 {
1783 int i = 0;
1784 for (pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps; *pptr; pptr = &(*pptr)->next)
1785 {
1786 HeapArray[i++] = *pptr;
1787 }
1788 }
1789
1790 RtlLeaveHeapLock (&RtlpProcessHeapsListLock);
1791
1792 return Result;
1793 }
1794
1795
1796 /*
1797 * @implemented
1798 */
1799 BOOLEAN NTAPI
1800 RtlValidateProcessHeaps(VOID)
1801 {
1802 BOOLEAN Result = TRUE;
1803 HEAP ** pptr;
1804
1805 RtlEnterHeapLock(&RtlpProcessHeapsListLock);
1806
1807 for (pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps; *pptr; pptr = &(*pptr)->next)
1808 {
1809 if (!RtlValidateHeap(*pptr, 0, NULL))
1810 {
1811 Result = FALSE;
1812 break;
1813 }
1814 }
1815
1816 RtlLeaveHeapLock (&RtlpProcessHeapsListLock);
1817
1818 return Result;
1819 }
1820
1821
1822 /*
1823 * @unimplemented
1824 */
1825 BOOLEAN NTAPI
1826 RtlZeroHeap(
1827 IN PVOID HeapHandle,
1828 IN ULONG Flags
1829 )
1830 {
1831 UNIMPLEMENTED;
1832 return FALSE;
1833 }
1834
1835 /* EOF */