ReactOS PowerPC WIP. This will be edited and merged, and likely shrunk.
[reactos.git] / reactos / lib / rtl / heap.c
1 /* COPYRIGHT: See COPYING in the top level directory
2 * PROJECT: ReactOS system libraries
3 * FILE: lib/rtl/image.c
4 * PURPOSE: Image handling functions
5 * PROGRAMMERS: Copyright 1996 Alexandre Julliard
6 * Copyright 1998 Ulrich Weigand
7 */
8
9 /* Note: the heap data structures are based on what Pietrek describes in his
10 * book 'Windows 95 System Programming Secrets'. The layout is not exactly
11 * the same, but could be easily adapted if it turns out some programs
12 * require it.
13 */
14
15 /* INCLUDES *****************************************************************/
16
17 #include <rtl.h>
18
19 #define NDEBUG
20 #include <debug.h>
21
22 /* FUNCTIONS *****************************************************************/
23
24 #define WARN_ON(x) (1)
25
26 #ifdef NDEBUG
27 #define TRACE_ON(x) (0)
28 #else
29 #define TRACE_ON(x) (1)
30 #endif
31
32 static RTL_CRITICAL_SECTION RtlpProcessHeapsListLock;
33
34
35 typedef struct tagARENA_INUSE
36 {
37 ULONG size; /* Block size; must be the first field */
38 USHORT threadId; /* Allocating thread id */
39 USHORT magic; /* Magic number */
40 }
41 ARENA_INUSE;
42
43 typedef struct tagARENA_FREE
44 {
45 ULONG size; /* Block size; must be the first field */
46 USHORT threadId; /* Freeing thread id */
47 USHORT magic; /* Magic number */
48 struct tagARENA_FREE *next; /* Next free arena */
49 struct tagARENA_FREE *prev; /* Prev free arena */
50 }
51 ARENA_FREE;
52
53 #define ARENA_FLAG_FREE 0x00000001 /* flags OR'ed with arena size */
54 #define ARENA_FLAG_PREV_FREE 0x00000002
55 #define ARENA_SIZE_MASK 0xfffffffc
56 #define ARENA_INUSE_MAGIC 0x4842 /* Value for arena 'magic' field */
57 #define ARENA_FREE_MAGIC 0x4846 /* Value for arena 'magic' field */
58
59 #define ARENA_INUSE_FILLER 0x55
60 #define ARENA_FREE_FILLER 0xaa
61
62 #define QUIET 1 /* Suppress messages */
63 #define NOISY 0 /* Report all errors */
64
65 #define HEAP_NB_FREE_LISTS 4 /* Number of free lists */
66
67 /* Max size of the blocks on the free lists */
68 static const ULONG HEAP_freeListSizes[HEAP_NB_FREE_LISTS] =
69 {
70 0x20, 0x80, 0x200, 0xffffffff
71 };
72
73 typedef struct
74 {
75 ULONG size;
76 ARENA_FREE arena;
77 }
78 FREE_LIST_ENTRY;
79
80 struct tagHEAP;
81
82 typedef struct tagSUBHEAP
83 {
84 ULONG size; /* Size of the whole sub-heap */
85 ULONG commitSize; /* Committed size of the sub-heap */
86 ULONG headerSize; /* Size of the heap header */
87 struct tagSUBHEAP *next; /* Next sub-heap */
88 struct tagHEAP *heap; /* Main heap structure */
89 ULONG magic; /* Magic number */
90 ULONG UserFlags;
91 PVOID UserValue;
92 }
93 SUBHEAP, *PSUBHEAP;
94
95 #define SUBHEAP_MAGIC ((ULONG)('S' | ('U'<<8) | ('B'<<16) | ('H'<<24)))
96
97 typedef struct tagHEAP
98 {
99 SUBHEAP subheap; /* First sub-heap */
100 struct tagHEAP *next; /* Next heap for this process */
101 FREE_LIST_ENTRY freeList[HEAP_NB_FREE_LISTS]; /* Free lists */
102 RTL_CRITICAL_SECTION critSection; /* Critical section for serialization */
103 ULONG flags; /* Heap flags */
104 ULONG magic; /* Magic number */
105 PRTL_HEAP_COMMIT_ROUTINE commitRoutine;
106 }
107 HEAP, *PHEAP;
108
109 #define HEAP_MAGIC ((ULONG)('H' | ('E'<<8) | ('A'<<16) | ('P'<<24)))
110
111 #define HEAP_DEF_SIZE 0x110000 /* Default heap size = 1Mb + 64Kb */
112 #define HEAP_MIN_BLOCK_SIZE (sizeof(ARENA_FREE) + 8) /* Min. heap block size */
113 #define COMMIT_MASK 0xffff /* bitmask for commit/decommit granularity */
114
115
116 static BOOLEAN HEAP_IsRealArena( HANDLE heap, ULONG flags, PVOID block, BOOLEAN quiet );
117
118
119 /***********************************************************************
120 * HEAP_Dump
121 */
122 VOID
123 HEAP_Dump(PHEAP heap)
124 {
125 int i;
126 SUBHEAP *subheap;
127 char *ptr;
128
129 DPRINT( "Heap: %p\n", heap );
130 DPRINT( "Next: %p Sub-heaps: %p",
131 heap->next, &heap->subheap );
132 subheap = &heap->subheap;
133 while (subheap->next)
134 {
135 DPRINT( " -> %p", subheap->next );
136 subheap = subheap->next;
137 }
138
139 DPRINT( "\nFree lists:\n Block Stat Size Id\n" );
140 for (i = 0; i < HEAP_NB_FREE_LISTS; i++)
141 DPRINT( "%p free %08lx %04x prev=%p next=%p\n",
142 &heap->freeList[i].arena,
143 heap->freeList[i].arena.size,
144 heap->freeList[i].arena.threadId,
145 heap->freeList[i].arena.prev,
146 heap->freeList[i].arena.next );
147
148 subheap = &heap->subheap;
149 while (subheap)
150 {
151 ULONG freeSize = 0, usedSize = 0, arenaSize = subheap->headerSize;
152 DPRINT( "\n\nSub-heap %p: size=%08lx committed=%08lx\n",
153 subheap, subheap->size, subheap->commitSize );
154
155 DPRINT( "\n Block Stat Size Id\n" );
156 ptr = (char*)subheap + subheap->headerSize;
157 while (ptr < (char *)subheap + subheap->size)
158 {
159 if (*(PULONG)ptr & ARENA_FLAG_FREE)
160 {
161 ARENA_FREE *pArena = (ARENA_FREE *)ptr;
162 DPRINT( "%p free %08lx %04x prev=%p next=%p\n",
163 pArena, pArena->size & ARENA_SIZE_MASK,
164 pArena->threadId, pArena->prev,
165 pArena->next);
166 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
167 arenaSize += sizeof(ARENA_FREE);
168 freeSize += pArena->size & ARENA_SIZE_MASK;
169 }
170 else if (*(PULONG)ptr & ARENA_FLAG_PREV_FREE)
171 {
172 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
173 DPRINT( "%p Used %08lx %04x back=%08lx\n",
174 pArena, pArena->size & ARENA_SIZE_MASK,
175 pArena->threadId, *((PULONG)pArena - 1));
176 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
177 arenaSize += sizeof(ARENA_INUSE);
178 usedSize += pArena->size & ARENA_SIZE_MASK;
179 }
180 else
181 {
182 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
183 DPRINT( "%p used %08lx %04x\n",
184 pArena, pArena->size & ARENA_SIZE_MASK,
185 pArena->threadId);
186 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
187 arenaSize += sizeof(ARENA_INUSE);
188 usedSize += pArena->size & ARENA_SIZE_MASK;
189 }
190 }
191 DPRINT( "\nTotal: Size=%08lx Committed=%08lx Free=%08lx Used=%08lx Arenas=%08lx (%ld%%)\n\n",
192 subheap->size, subheap->commitSize, freeSize, usedSize,
193 arenaSize, (arenaSize * 100) / subheap->size );
194 subheap = subheap->next;
195 }
196 }
197
198
199 /***********************************************************************
200 * HEAP_GetPtr
201 * RETURNS
202 * Pointer to the heap
203 * NULL: Failure
204 */
205 static PHEAP
206 HEAP_GetPtr(HANDLE heap) /* [in] Handle to the heap */
207 {
208 HEAP *heapPtr = (HEAP *)heap;
209 if (!heapPtr || (heapPtr->magic != HEAP_MAGIC))
210 {
211 DPRINT("Invalid heap %p!\n", heap );
212 return NULL;
213 }
214 if (TRACE_ON(heap) && !HEAP_IsRealArena( heap, 0, NULL, NOISY ))
215 {
216 HEAP_Dump( heapPtr );
217 ASSERT( FALSE );
218 return NULL;
219 }
220 return heapPtr;
221 }
222
223
224 /***********************************************************************
225 * HEAP_InsertFreeBlock
226 *
227 * Insert a free block into the free list.
228 */
229 static VOID
230 HEAP_InsertFreeBlock(PHEAP heap,
231 ARENA_FREE *pArena,
232 BOOLEAN last)
233 {
234 FREE_LIST_ENTRY *pEntry = heap->freeList;
235 while (pEntry->size < pArena->size)
236 pEntry++;
237 if (last)
238 {
239 /* insert at end of free list, i.e. before next free list entry */
240 pEntry++;
241 if (pEntry == &heap->freeList[HEAP_NB_FREE_LISTS])
242 {
243 pEntry = heap->freeList;
244 }
245 pArena->prev = pEntry->arena.prev;
246 pArena->prev->next = pArena;
247 pArena->next = &pEntry->arena;
248 pEntry->arena.prev = pArena;
249 }
250 else
251 {
252 /* insert at head of free list */
253 pArena->next = pEntry->arena.next;
254 pArena->next->prev = pArena;
255 pArena->prev = &pEntry->arena;
256 pEntry->arena.next = pArena;
257 }
258 pArena->size |= ARENA_FLAG_FREE;
259 }
260
261
262 /***********************************************************************
263 * HEAP_FindSubHeap
264 * Find the sub-heap containing a given address.
265 *
266 * RETURNS
267 * Pointer: Success
268 * NULL: Failure
269 */
270 static PSUBHEAP
271 HEAP_FindSubHeap(HEAP *heap, /* [in] Heap pointer */
272 PVOID ptr) /* [in] Address */
273 {
274 PSUBHEAP sub = &heap->subheap;
275 while (sub)
276 {
277 if (((char *)ptr >= (char *)sub) &&
278 ((char *)ptr < (char *)sub + sub->size))
279 return sub;
280 sub = sub->next;
281 }
282 return NULL;
283 }
284
285
286 /***********************************************************************
287 * HEAP_Commit
288 *
289 * Make sure the heap storage is committed up to (not including) ptr.
290 */
291 static __inline BOOLEAN
292 HEAP_Commit(SUBHEAP *subheap,
293 PVOID ptr,
294 ULONG flags)
295 {
296 SIZE_T size = (SIZE_T)((char *)ptr - (char *)subheap);
297 NTSTATUS Status;
298 PVOID address;
299 SIZE_T commitsize;
300
301 size = (size + COMMIT_MASK) & ~COMMIT_MASK;
302 if (size > subheap->size)
303 size = subheap->size;
304 if (size <= subheap->commitSize)
305 return TRUE;
306
307 address = (PVOID)((char *)subheap + subheap->commitSize);
308 commitsize = size - subheap->commitSize;
309
310 if (subheap->heap->commitRoutine != NULL)
311 {
312 Status = subheap->heap->commitRoutine(subheap->heap, &address, &commitsize);
313 }
314 else
315 {
316 Status = ZwAllocateVirtualMemory(NtCurrentProcess(),
317 &address,
318 0,
319 &commitsize,
320 MEM_COMMIT,
321 PAGE_EXECUTE_READWRITE);
322 }
323 if (!NT_SUCCESS(Status))
324 {
325 DPRINT( "Could not commit %08lx bytes at %p for heap %p\n",
326 size - subheap->commitSize,
327 ((char *)subheap + subheap->commitSize),
328 subheap->heap );
329 return FALSE;
330 }
331
332 subheap->commitSize += commitsize;
333 return TRUE;
334 }
335
336
337 #if 0
338 /***********************************************************************
339 * HEAP_Decommit
340 *
341 * If possible, decommit the heap storage from (including) 'ptr'.
342 */
343 static inline BOOLEAN HEAP_Decommit( SUBHEAP *subheap, PVOID ptr, ULONG flags )
344 {
345 ULONG size = (ULONG)((char *)ptr - (char *)subheap);
346 PVOID address;
347 ULONG decommitsize;
348 NTSTATUS Status;
349 /* round to next block and add one full block */
350 size = ((size + COMMIT_MASK) & ~COMMIT_MASK) + COMMIT_MASK + 1;
351 if (size >= subheap->commitSize)
352 return TRUE;
353
354 address = (PVOID)((char *)subheap + size);
355 decommitsize = subheap->commitSize - size;
356
357 Status = ZwFreeVirtualMemory(NtCurrentProcess(),
358 &address,
359 &decommitsize,
360 MEM_DECOMMIT);
361 if (!NT_SUCCESS(Status))
362 {
363 DPRINT( "Could not decommit %08lx bytes at %p for heap %p\n",
364 subheap->commitSize - size,
365 ((char *)subheap + size),
366 subheap->heap );
367 return FALSE;
368 }
369
370 subheap->commitSize -= decommitsize;
371 return TRUE;
372 }
373 #endif
374
375 /***********************************************************************
376 * HEAP_CreateFreeBlock
377 *
378 * Create a free block at a specified address. 'size' is the size of the
379 * whole block, including the new arena.
380 */
381 static VOID HEAP_CreateFreeBlock( SUBHEAP *subheap, PVOID ptr, ULONG size )
382 {
383 ARENA_FREE *pFree;
384 BOOLEAN last;
385
386 /* Create a free arena */
387
388 pFree = (ARENA_FREE *)ptr;
389 pFree->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
390 pFree->magic = ARENA_FREE_MAGIC;
391
392 /* If debugging, erase the freed block content */
393
394 if (TRACE_ON(heap))
395 {
396 char *pEnd = (char *)ptr + size;
397 if (pEnd > (char *)subheap + subheap->commitSize)
398 pEnd = (char *)subheap + subheap->commitSize;
399 if (pEnd > (char *)(pFree + 1))
400 memset( pFree + 1, ARENA_FREE_FILLER, pEnd - (char *)(pFree + 1) );
401 }
402
403 /* Check if next block is free also */
404
405 if (((char *)ptr + size < (char *)subheap + subheap->size) &&
406 (*(PULONG)((char *)ptr + size) & ARENA_FLAG_FREE))
407 {
408 /* Remove the next arena from the free list */
409 ARENA_FREE *pNext = (ARENA_FREE *)((char *)ptr + size);
410 pNext->next->prev = pNext->prev;
411 pNext->prev->next = pNext->next;
412 size += (pNext->size & ARENA_SIZE_MASK) + sizeof(*pNext);
413 if (TRACE_ON(heap))
414 memset( pNext, ARENA_FREE_FILLER, sizeof(ARENA_FREE) );
415 }
416
417 /* Set the next block PREV_FREE flag and pointer */
418
419 last = ((char *)ptr + size >= (char *)subheap + subheap->size);
420 if (!last)
421 {
422 PULONG pNext = (PULONG)((char *)ptr + size);
423 *pNext |= ARENA_FLAG_PREV_FREE;
424 *(ARENA_FREE **)(pNext - 1) = pFree;
425 }
426
427 /* Last, insert the new block into the free list */
428
429 pFree->size = size - sizeof(*pFree);
430 HEAP_InsertFreeBlock( subheap->heap, pFree, last );
431 }
432
433
434 /***********************************************************************
435 * HEAP_MakeInUseBlockFree
436 *
437 * Turn an in-use block into a free block. Can also decommit the end of
438 * the heap, and possibly even free the sub-heap altogether.
439 */
440 static VOID HEAP_MakeInUseBlockFree( SUBHEAP *subheap, ARENA_INUSE *pArena,
441 ULONG flags)
442 {
443 ARENA_FREE *pFree;
444 ULONG size = (pArena->size & ARENA_SIZE_MASK) + sizeof(*pArena);
445 ULONG dummySize = 0;
446
447 /* Check if we can merge with previous block */
448
449 if (pArena->size & ARENA_FLAG_PREV_FREE)
450 {
451 pFree = *((ARENA_FREE **)pArena - 1);
452 size += (pFree->size & ARENA_SIZE_MASK) + sizeof(ARENA_FREE);
453 /* Remove it from the free list */
454 pFree->next->prev = pFree->prev;
455 pFree->prev->next = pFree->next;
456 }
457 else
458 pFree = (ARENA_FREE *)pArena;
459
460 /* Create a free block */
461
462 HEAP_CreateFreeBlock( subheap, pFree, size );
463 size = (pFree->size & ARENA_SIZE_MASK) + sizeof(ARENA_FREE);
464 if ((char *)pFree + size < (char *)subheap + subheap->size)
465 return; /* Not the last block, so nothing more to do */
466
467 /* Free the whole sub-heap if it's empty and not the original one */
468
469 if (((char *)pFree == (char *)subheap + subheap->headerSize) &&
470 (subheap != &subheap->heap->subheap))
471 {
472 SUBHEAP *pPrev = &subheap->heap->subheap;
473 /* Remove the free block from the list */
474 pFree->next->prev = pFree->prev;
475 pFree->prev->next = pFree->next;
476 /* Remove the subheap from the list */
477 while (pPrev && (pPrev->next != subheap))
478 pPrev = pPrev->next;
479 if (pPrev)
480 pPrev->next = subheap->next;
481 /* Free the memory */
482 subheap->magic = 0;
483 ZwFreeVirtualMemory(NtCurrentProcess(),
484 (PVOID*)&subheap,
485 &dummySize,
486 MEM_RELEASE);
487 return;
488 }
489
490 /* Decommit the end of the heap */
491 }
492
493
494 /***********************************************************************
495 * HEAP_ShrinkBlock
496 *
497 * Shrink an in-use block.
498 */
499 static void HEAP_ShrinkBlock(SUBHEAP *subheap, ARENA_INUSE *pArena, ULONG size)
500 {
501 if ((pArena->size & ARENA_SIZE_MASK) >= size + HEAP_MIN_BLOCK_SIZE)
502 {
503 HEAP_CreateFreeBlock( subheap, (char *)(pArena + 1) + size,
504 (pArena->size & ARENA_SIZE_MASK) - size );
505 /* assign size plus previous arena flags */
506 pArena->size = size | (pArena->size & ~ARENA_SIZE_MASK);
507 }
508 else
509 {
510 /* Turn off PREV_FREE flag in next block */
511 char *pNext = (char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK);
512 if (pNext < (char *)subheap + subheap->size)
513 *(PULONG)pNext &= ~ARENA_FLAG_PREV_FREE;
514 }
515 }
516
517 /***********************************************************************
518 * HEAP_InitSubHeap
519 */
520 static BOOLEAN HEAP_InitSubHeap( HEAP *heap, PVOID address, ULONG flags,
521 SIZE_T commitSize, ULONG totalSize,
522 PRTL_HEAP_PARAMETERS Parameters )
523 {
524 SUBHEAP *subheap = (SUBHEAP *)address;
525 FREE_LIST_ENTRY *pEntry;
526 int i;
527 NTSTATUS Status;
528
529 /* Fill the sub-heap structure */
530
531 subheap = (PSUBHEAP)address;
532 subheap->heap = heap;
533 subheap->size = totalSize;
534 subheap->commitSize = commitSize;
535 subheap->magic = SUBHEAP_MAGIC;
536
537 if ( subheap != (SUBHEAP *)heap )
538 {
539 /* If this is a secondary subheap, insert it into list */
540
541 subheap->headerSize = sizeof(SUBHEAP);
542 subheap->next = heap->subheap.next;
543 heap->subheap.next = subheap;
544 }
545 else
546 {
547 /* If this is a primary subheap, initialize main heap */
548
549 subheap->headerSize = sizeof(HEAP);
550 subheap->next = NULL;
551 heap->next = NULL;
552 heap->flags = flags;
553 heap->magic = HEAP_MAGIC;
554 if (Parameters)
555 heap->commitRoutine = Parameters->CommitRoutine;
556 else
557 heap->commitRoutine = NULL;
558
559 /* Build the free lists */
560
561 for (i = 0, pEntry = heap->freeList; i < HEAP_NB_FREE_LISTS; i++, pEntry++)
562 {
563 pEntry->size = HEAP_freeListSizes[i];
564 pEntry->arena.size = 0 | ARENA_FLAG_FREE;
565 pEntry->arena.next = i < HEAP_NB_FREE_LISTS-1 ?
566 &heap->freeList[i+1].arena : &heap->freeList[0].arena;
567 pEntry->arena.prev = i ? &heap->freeList[i-1].arena :
568 &heap->freeList[HEAP_NB_FREE_LISTS-1].arena;
569 pEntry->arena.threadId = 0;
570 pEntry->arena.magic = ARENA_FREE_MAGIC;
571 }
572
573 /* Initialize critical section */
574
575 if (RtlpGetMode() == UserMode)
576 {
577 RtlInitializeHeapLock( &heap->critSection );
578 }
579 }
580
581 /* Commit memory */
582 if (heap->commitRoutine)
583 {
584 if (subheap != (SUBHEAP *)heap)
585 {
586 Status = heap->commitRoutine(heap, &address, &commitSize);
587 }
588 else
589 {
590 /* the caller is responsible for committing the first page! */
591 Status = STATUS_SUCCESS;
592 }
593 }
594 else
595 {
596 Status = ZwAllocateVirtualMemory(NtCurrentProcess(),
597 &address,
598 0,
599 &commitSize,
600 MEM_COMMIT,
601 PAGE_EXECUTE_READWRITE);
602 }
603 if (!NT_SUCCESS(Status))
604 {
605 DPRINT("Could not commit %08lx bytes for sub-heap %p\n",
606 commitSize, address);
607 return FALSE;
608 }
609
610 /* Create the first free block */
611
612 HEAP_CreateFreeBlock( subheap, (PUCHAR)subheap + subheap->headerSize,
613 subheap->size - subheap->headerSize );
614
615 return TRUE;
616 }
617
618 /***********************************************************************
619 * HEAP_CreateSubHeap
620 *
621 * Create a sub-heap of the given size.
622 * If heap == NULL, creates a main heap.
623 */
624 static PSUBHEAP
625 HEAP_CreateSubHeap(PVOID BaseAddress,
626 HEAP *heap, ULONG flags,
627 SIZE_T commitSize, ULONG totalSize,
628 PRTL_HEAP_PARAMETERS Parameters )
629 {
630 PVOID address;
631 NTSTATUS Status;
632
633 /* Round-up sizes on a 64K boundary */
634
635 totalSize = (totalSize + 0xffff) & 0xffff0000;
636 commitSize = (commitSize + 0xffff) & 0xffff0000;
637 if (!commitSize)
638 commitSize = 0x10000;
639 if (totalSize < commitSize)
640 totalSize = commitSize;
641
642 /* Allocate the memory block */
643 address = BaseAddress;
644 if (!address)
645 {
646 Status = ZwAllocateVirtualMemory(NtCurrentProcess(),
647 &address,
648 0,
649 (PULONG)&totalSize,
650 MEM_RESERVE | MEM_COMMIT,
651 PAGE_EXECUTE_READWRITE);
652 if (!NT_SUCCESS(Status))
653 {
654 DPRINT("Could not VirtualAlloc %08lx bytes\n",
655 totalSize );
656 return NULL;
657 }
658 }
659
660 /* Initialize subheap */
661
662 if (!HEAP_InitSubHeap( heap ? heap : (HEAP *)address,
663 address, flags, commitSize, totalSize,
664 Parameters ))
665 {
666 if (!BaseAddress)
667 {
668 ULONG dummySize = 0;
669 ZwFreeVirtualMemory(NtCurrentProcess(),
670 &address,
671 &dummySize,
672 MEM_RELEASE);
673 }
674 return NULL;
675 }
676
677 return (SUBHEAP *)address;
678 }
679
680
681 /***********************************************************************
682 * HEAP_FindFreeBlock
683 *
684 * Find a free block at least as large as the requested size, and make sure
685 * the requested size is committed.
686 */
687 static ARENA_FREE *HEAP_FindFreeBlock( HEAP *heap, SIZE_T size,
688 SUBHEAP **ppSubHeap )
689 {
690 SUBHEAP *subheap;
691 ARENA_FREE *pArena;
692 FREE_LIST_ENTRY *pEntry = heap->freeList;
693
694 /* Find a suitable free list, and in it find a block large enough */
695
696 while (pEntry->size < size)
697 pEntry++;
698 pArena = pEntry->arena.next;
699 while (pArena != &heap->freeList[0].arena)
700 {
701 ULONG arena_size = (pArena->size & ARENA_SIZE_MASK) +
702 sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
703 if (arena_size >= size)
704 {
705 subheap = HEAP_FindSubHeap( heap, pArena );
706 if (!HEAP_Commit( subheap, (char *)pArena + sizeof(ARENA_INUSE)
707 + size + HEAP_MIN_BLOCK_SIZE,
708 heap->flags))
709 return NULL;
710 *ppSubHeap = subheap;
711 return pArena;
712 }
713
714 pArena = pArena->next;
715 }
716
717 /* If no block was found, attempt to grow the heap */
718
719 if (!(heap->flags & HEAP_GROWABLE))
720 {
721 DPRINT("Not enough space in heap %p for %08lx bytes\n",
722 heap, size );
723 return NULL;
724 }
725 /* make sure that we have a big enough size *committed* to fit another
726 * last free arena in !
727 * So just one heap struct, one first free arena which will eventually
728 * get inuse, and HEAP_MIN_BLOCK_SIZE for the second free arena that
729 * might get assigned all remaining free space in HEAP_ShrinkBlock() */
730 size += sizeof(SUBHEAP) + sizeof(ARENA_FREE) + HEAP_MIN_BLOCK_SIZE;
731 if (!(subheap = HEAP_CreateSubHeap( NULL, heap, heap->flags, size,
732 max( HEAP_DEF_SIZE, size ), NULL )))
733 return NULL;
734
735 DPRINT("created new sub-heap %p of %08lx bytes for heap %p\n",
736 subheap, size, heap );
737
738 *ppSubHeap = subheap;
739 return (ARENA_FREE *)(subheap + 1);
740 }
741
742
743 /***********************************************************************
744 * HEAP_IsValidArenaPtr
745 *
746 * Check that the pointer is inside the range possible for arenas.
747 */
748 static BOOLEAN HEAP_IsValidArenaPtr( HEAP *heap, PVOID ptr )
749 {
750 int i;
751 SUBHEAP *subheap = HEAP_FindSubHeap( heap, ptr );
752 if (!subheap)
753 return FALSE;
754 if ((char *)ptr >= (char *)subheap + subheap->headerSize)
755 return TRUE;
756 if (subheap != &heap->subheap)
757 return FALSE;
758 for (i = 0; i < HEAP_NB_FREE_LISTS; i++)
759 if (ptr == (PVOID)&heap->freeList[i].arena)
760 return TRUE;
761 return FALSE;
762 }
763
764
765 /***********************************************************************
766 * HEAP_ValidateFreeArena
767 */
768 static BOOLEAN HEAP_ValidateFreeArena( SUBHEAP *subheap, ARENA_FREE *pArena )
769 {
770 char *heapEnd = (char *)subheap + subheap->size;
771
772 /* Check magic number */
773 if (pArena->magic != ARENA_FREE_MAGIC)
774 {
775 DPRINT("Heap %p: invalid free arena magic for %p\n",
776 subheap->heap, pArena);
777 return FALSE;
778 }
779 /* Check size flags */
780 if (!(pArena->size & ARENA_FLAG_FREE) ||
781 (pArena->size & ARENA_FLAG_PREV_FREE))
782 {
783 DPRINT("Heap %p: bad flags %lx for free arena %p\n",
784 subheap->heap, pArena->size & ~ARENA_SIZE_MASK, pArena);
785 }
786 /* Check arena size */
787 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) > heapEnd)
788 {
789 DPRINT("Heap %p: bad size %08lx for free arena %p\n",
790 subheap->heap, (ULONG)pArena->size & ARENA_SIZE_MASK, pArena );
791 return FALSE;
792 }
793 /* Check that next pointer is valid */
794 if (!HEAP_IsValidArenaPtr( subheap->heap, pArena->next ))
795 {
796 DPRINT("Heap %p: bad next ptr %p for arena %p\n",
797 subheap->heap, pArena->next, pArena);
798 return FALSE;
799 }
800 /* Check that next arena is free */
801 if (!(pArena->next->size & ARENA_FLAG_FREE) ||
802 (pArena->next->magic != ARENA_FREE_MAGIC))
803 {
804 DPRINT("Heap %p: next arena %p invalid for %p\n",
805 subheap->heap, pArena->next, pArena);
806 return FALSE;
807 }
808 /* Check that prev pointer is valid */
809 if (!HEAP_IsValidArenaPtr( subheap->heap, pArena->prev ))
810 {
811 DPRINT("Heap %p: bad prev ptr %p for arena %p\n",
812 subheap->heap, pArena->prev, pArena);
813 return FALSE;
814 }
815 /* Check that prev arena is free */
816 if (!(pArena->prev->size & ARENA_FLAG_FREE) ||
817 (pArena->prev->magic != ARENA_FREE_MAGIC))
818 {
819 DPRINT("Heap %p: prev arena %p invalid for %p\n",
820 subheap->heap, pArena->prev, pArena);
821 return FALSE;
822 }
823 /* Check that next block has PREV_FREE flag */
824 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) < heapEnd)
825 {
826 if (!(*(PULONG)((char *)(pArena + 1) +
827 (pArena->size & ARENA_SIZE_MASK)) & ARENA_FLAG_PREV_FREE))
828 {
829 DPRINT("Heap %p: free arena %p next block has no PREV_FREE flag\n",
830 subheap->heap, pArena);
831 return FALSE;
832 }
833 /* Check next block back pointer */
834 if (*((ARENA_FREE **)((char *)(pArena + 1) +
835 (pArena->size & ARENA_SIZE_MASK)) - 1) != pArena)
836 {
837 DPRINT("Heap %p: arena %p has wrong back ptr %lx\n",
838 subheap->heap, pArena,
839 *((PULONG)((char *)(pArena+1)+ (pArena->size & ARENA_SIZE_MASK)) - 1));
840 return FALSE;
841 }
842 }
843 return TRUE;
844 }
845
846
847 /***********************************************************************
848 * HEAP_ValidateInUseArena
849 */
850 static BOOLEAN HEAP_ValidateInUseArena( SUBHEAP *subheap, ARENA_INUSE *pArena, BOOLEAN quiet )
851 {
852 char *heapEnd = (char *)subheap + subheap->size;
853
854 /* Check magic number */
855 if (pArena->magic != ARENA_INUSE_MAGIC)
856 {
857 if (quiet == NOISY)
858 {
859 DPRINT("Heap %p: invalid in-use arena magic for %p\n",
860 subheap->heap, pArena);
861 if (TRACE_ON(heap))
862 HEAP_Dump( subheap->heap );
863 }
864 else if (WARN_ON(heap))
865 {
866 DPRINT("Heap %p: invalid in-use arena magic for %p\n",
867 subheap->heap, pArena);
868 if (TRACE_ON(heap))
869 HEAP_Dump( subheap->heap );
870 }
871 return FALSE;
872 }
873 /* Check size flags */
874 if (pArena->size & ARENA_FLAG_FREE)
875 {
876 DPRINT("Heap %p: bad flags %lx for in-use arena %p\n",
877 subheap->heap, pArena->size & ~ARENA_SIZE_MASK, pArena);
878 return FALSE;
879 }
880 /* Check arena size */
881 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) > heapEnd)
882 {
883 DPRINT("Heap %p: bad size %08lx for in-use arena %p\n",
884 subheap->heap, (ULONG)pArena->size & ARENA_SIZE_MASK, pArena);
885 return FALSE;
886 }
887 /* Check next arena PREV_FREE flag */
888 if (((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) < heapEnd) &&
889 (*(PULONG)((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK)) & ARENA_FLAG_PREV_FREE))
890 {
891 DPRINT("Heap %p: in-use arena %p next block has PREV_FREE flag\n",
892 subheap->heap, pArena);
893 return FALSE;
894 }
895 /* Check prev free arena */
896 if (pArena->size & ARENA_FLAG_PREV_FREE)
897 {
898 ARENA_FREE *pPrev = *((ARENA_FREE **)pArena - 1);
899 /* Check prev pointer */
900 if (!HEAP_IsValidArenaPtr( subheap->heap, pPrev ))
901 {
902 DPRINT("Heap %p: bad back ptr %p for arena %p\n",
903 subheap->heap, pPrev, pArena );
904 return FALSE;
905 }
906 /* Check that prev arena is free */
907 if (!(pPrev->size & ARENA_FLAG_FREE) ||
908 (pPrev->magic != ARENA_FREE_MAGIC))
909 {
910 DPRINT("Heap %p: prev arena %p invalid for in-use %p\n",
911 subheap->heap, pPrev, pArena);
912 return FALSE;
913 }
914 /* Check that prev arena is really the previous block */
915 if ((char *)(pPrev + 1) + (pPrev->size & ARENA_SIZE_MASK) != (char *)pArena)
916 {
917 DPRINT("Heap %p: prev arena %p is not prev for in-use %p\n",
918 subheap->heap, pPrev, pArena );
919 return FALSE;
920 }
921 }
922 return TRUE;
923 }
924
925
926 /***********************************************************************
927 * HEAP_IsInsideHeap
928 * Checks whether the pointer points to a block inside a given heap.
929 *
930 * NOTES
931 * Should this return BOOL32?
932 *
933 * RETURNS
934 * !0: Success
935 * 0: Failure
936 */
937 int HEAP_IsInsideHeap(
938 HANDLE heap, /* [in] Heap */
939 ULONG flags, /* [in] Flags */
940 PVOID ptr /* [in] Pointer */
941 )
942 {
943 HEAP *heapPtr = HEAP_GetPtr( heap );
944 SUBHEAP *subheap;
945 int ret;
946
947 /* Validate the parameters */
948
949 if (!heapPtr)
950 return 0;
951 flags |= heapPtr->flags;
952 if (!(flags & HEAP_NO_SERIALIZE))
953 RtlEnterHeapLock( &heapPtr->critSection );
954 ret = (((subheap = HEAP_FindSubHeap( heapPtr, ptr )) != NULL) &&
955 (((char *)ptr >= (char *)subheap + subheap->headerSize
956 + sizeof(ARENA_INUSE))));
957 if (!(flags & HEAP_NO_SERIALIZE))
958 RtlLeaveHeapLock( &heapPtr->critSection );
959 return ret;
960 }
961
962
963 void DumpStackFrames ( PULONG Frame, ULONG FrameCount )
964 {
965 #ifdef _M_IX86
966 ULONG i=0;
967
968 DbgPrint("Frames: ");
969 if ( !Frame )
970 {
971 #if defined __GNUC__
972 __asm__("mov %%ebp, %0" : "=r" (Frame) : );
973 #elif defined(_MSC_VER)
974 __asm mov [Frame], ebp
975 #endif
976 Frame = (PULONG)Frame[0]; // step out of DumpStackFrames
977 }
978 while ( Frame != 0 && (ULONG)Frame != 0xDEADBEEF && (ULONG)Frame != 0xcdcdcdcd && (ULONG)Frame != 0xcccccccc && i++ < FrameCount )
979 {
980 DbgPrint("<%p>", (PVOID)Frame[1]);
981 if (Frame[1] == 0xdeadbeef)
982 break;
983 Frame = (PULONG)Frame[0];
984 DbgPrint(" ");
985 }
986 DbgPrint("\n");
987 #endif
988 }
989
990 /***********************************************************************
991 * HEAP_IsRealArena [Internal]
992 * Validates a block is a valid arena.
993 *
994 * RETURNS
995 * TRUE: Success
996 * FALSE: Failure
997 */
998 static BOOLEAN HEAP_IsRealArena(
999 HANDLE heap, /* [in] Handle to the heap */
1000 ULONG flags, /* [in] Bit flags that control access during operation */
1001 PVOID block, /* [in] Optional pointer to memory block to validate */
1002 BOOLEAN quiet /* [in] Flag - if true, HEAP_ValidateInUseArena
1003 * does not complain */
1004 )
1005 {
1006 SUBHEAP *subheap;
1007 HEAP *heapPtr = (HEAP *)(heap);
1008 BOOLEAN ret = TRUE;
1009
1010 if (!heapPtr || (heapPtr->magic != HEAP_MAGIC))
1011 {
1012 DPRINT("Invalid heap %p!\n", heap );
1013 return FALSE;
1014 }
1015
1016 flags &= HEAP_NO_SERIALIZE;
1017 flags |= heapPtr->flags;
1018 /* calling HeapLock may result in infinite recursion, so do the critsect directly */
1019 if (!(flags & HEAP_NO_SERIALIZE))
1020 RtlEnterHeapLock( &heapPtr->critSection );
1021
1022 if (block)
1023 {
1024 /* Only check this single memory block */
1025
1026 /* The following code is really HEAP_IsInsideHeap *
1027 * with serialization already done. */
1028 if (!(subheap = HEAP_FindSubHeap( heapPtr, block )) ||
1029 ((char *)block < (char *)subheap + subheap->headerSize
1030 + sizeof(ARENA_INUSE)))
1031 {
1032 if (quiet == NOISY)
1033 {
1034 DPRINT("Heap %p: block %p is not inside heap\n",
1035 heap, block );
1036 DumpStackFrames(NULL,10);
1037 }
1038 else if (WARN_ON(heap))
1039 {
1040 DPRINT1("Heap %p: block %p is not inside heap\n",
1041 heap, block );
1042 DumpStackFrames(NULL,10);
1043 }
1044 ret = FALSE;
1045 }
1046 else
1047 ret = HEAP_ValidateInUseArena( subheap, (ARENA_INUSE *)block - 1, quiet );
1048
1049 if (!(flags & HEAP_NO_SERIALIZE))
1050 RtlLeaveHeapLock( &heapPtr->critSection );
1051 return ret;
1052 }
1053
1054 subheap = &heapPtr->subheap;
1055 while (subheap && ret)
1056 {
1057 char *ptr = (char *)subheap + subheap->headerSize;
1058 while (ptr < (char *)subheap + subheap->size)
1059 {
1060 if (*(PULONG)ptr & ARENA_FLAG_FREE)
1061 {
1062 if (!HEAP_ValidateFreeArena( subheap, (ARENA_FREE *)ptr ))
1063 {
1064 ret = FALSE;
1065 break;
1066 }
1067 ptr += sizeof(ARENA_FREE) + (*(PULONG)ptr & ARENA_SIZE_MASK);
1068 }
1069 else
1070 {
1071 if (!HEAP_ValidateInUseArena( subheap, (ARENA_INUSE *)ptr, NOISY ))
1072 {
1073 ret = FALSE;
1074 break;
1075 }
1076 ptr += sizeof(ARENA_INUSE) + (*(PULONG)ptr & ARENA_SIZE_MASK);
1077 }
1078 }
1079 subheap = subheap->next;
1080 }
1081
1082 if (!(flags & HEAP_NO_SERIALIZE))
1083 RtlLeaveHeapLock( &heapPtr->critSection );
1084 return ret;
1085 }
1086
1087
1088 /***********************************************************************
1089 * HeapCreate (KERNEL32.336)
1090 * RETURNS
1091 * Handle of heap: Success
1092 * NULL: Failure
1093 *
1094 * @implemented
1095 */
1096 HANDLE NTAPI
1097 RtlCreateHeap(ULONG flags,
1098 PVOID BaseAddress,
1099 SIZE_T maxSize,
1100 SIZE_T initialSize,
1101 PVOID Lock,
1102 PRTL_HEAP_PARAMETERS Parameters)
1103 {
1104 SUBHEAP *subheap;
1105 HEAP *heapPtr;
1106
1107 /* Allocate the heap block */
1108
1109 if (!maxSize)
1110 {
1111 maxSize = HEAP_DEF_SIZE;
1112 flags |= HEAP_GROWABLE;
1113 }
1114 if (!(subheap = HEAP_CreateSubHeap( BaseAddress, NULL, flags, initialSize,
1115 maxSize, Parameters )))
1116 {
1117 return 0;
1118 }
1119
1120 if (RtlpGetMode() == UserMode)
1121 {
1122 /* link it into the per-process heap list */
1123 RtlEnterHeapLock (&RtlpProcessHeapsListLock);
1124
1125 heapPtr = subheap->heap;
1126 heapPtr->next = (HEAP*)NtCurrentPeb()->ProcessHeaps;
1127 NtCurrentPeb()->ProcessHeaps = (HANDLE)heapPtr;
1128 NtCurrentPeb()->NumberOfHeaps++;
1129
1130 RtlLeaveHeapLock (&RtlpProcessHeapsListLock);
1131 }
1132
1133 return (HANDLE)subheap;
1134 }
1135
1136 /***********************************************************************
1137 * HeapDestroy (KERNEL32.337)
1138 * RETURNS
1139 * TRUE: Success
1140 * FALSE: Failure
1141 *
1142 * @implemented
1143 *
1144 * RETURNS
1145 * Success: A NULL HANDLE, if heap is NULL or it was destroyed
1146 * Failure: The Heap handle, if heap is the process heap.
1147 */
1148 HANDLE NTAPI
1149 RtlDestroyHeap(HANDLE heap) /* [in] Handle of heap */
1150 {
1151 HEAP *heapPtr = HEAP_GetPtr( heap );
1152 SUBHEAP *subheap;
1153 ULONG flags;
1154 HEAP **pptr;
1155
1156 DPRINT("%p\n", heap );
1157 if (!heapPtr)
1158 return heap;
1159
1160 if (RtlpGetMode() == UserMode)
1161 {
1162 if (heap == NtCurrentPeb()->ProcessHeap)
1163 return heap; /* cannot delete the main process heap */
1164
1165 /* remove it from the per-process list */
1166 RtlEnterHeapLock (&RtlpProcessHeapsListLock);
1167
1168 pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps;
1169 while (*pptr && *pptr != heapPtr) pptr = &(*pptr)->next;
1170 if (*pptr) *pptr = (*pptr)->next;
1171 NtCurrentPeb()->NumberOfHeaps--;
1172
1173 RtlLeaveHeapLock (&RtlpProcessHeapsListLock);
1174 }
1175
1176 RtlDeleteHeapLock( &heapPtr->critSection );
1177 subheap = &heapPtr->subheap;
1178 // We must save the flags. The first subheap is located after
1179 // the heap structure. If we release the first subheap,
1180 // we release also the heap structure.
1181 flags = heapPtr->flags;
1182 while (subheap)
1183 {
1184 SUBHEAP *next = subheap->next;
1185 ULONG dummySize = 0;
1186 ZwFreeVirtualMemory(NtCurrentProcess(),
1187 (PVOID*)&subheap,
1188 &dummySize,
1189 MEM_RELEASE);
1190 subheap = next;
1191 }
1192 return (HANDLE)NULL;
1193 }
1194
1195
1196 /***********************************************************************
1197 * HeapAlloc (KERNEL32.334)
1198 * RETURNS
1199 * Pointer to allocated memory block
1200 * NULL: Failure
1201 * 0x7d030f60--invalid flags in RtlHeapAllocate
1202 * @implemented
1203 */
1204 PVOID NTAPI
1205 RtlAllocateHeap(HANDLE heap, /* [in] Handle of private heap block */
1206 ULONG flags, /* [in] Heap allocation control flags */
1207 ULONG size) /* [in] Number of bytes to allocate */
1208 {
1209 ARENA_FREE *pArena;
1210 ARENA_INUSE *pInUse;
1211 SUBHEAP *subheap;
1212 HEAP *heapPtr = HEAP_GetPtr( heap );
1213
1214 /* Validate the parameters */
1215
1216 if (!heapPtr)
1217 {
1218 if (flags & HEAP_GENERATE_EXCEPTIONS)
1219 RtlRaiseStatus( STATUS_NO_MEMORY );
1220 return NULL;
1221 }
1222 //flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY;
1223 flags |= heapPtr->flags;
1224 if (!(flags & HEAP_NO_SERIALIZE))
1225 RtlEnterHeapLock( &heapPtr->critSection );
1226 size = (size + 7) & ~7;
1227 if (size < HEAP_MIN_BLOCK_SIZE)
1228 size = HEAP_MIN_BLOCK_SIZE;
1229
1230 /* Locate a suitable free block */
1231
1232 if (!(pArena = HEAP_FindFreeBlock( heapPtr, size, &subheap )))
1233 {
1234 DPRINT("(%p,%08lx,%08lx): returning NULL\n",
1235 heap, flags, size );
1236 if (!(flags & HEAP_NO_SERIALIZE))
1237 RtlLeaveHeapLock( &heapPtr->critSection );
1238 if (flags & HEAP_GENERATE_EXCEPTIONS)
1239 RtlRaiseStatus( STATUS_NO_MEMORY );
1240 return NULL;
1241 }
1242
1243 /* Remove the arena from the free list */
1244
1245 pArena->next->prev = pArena->prev;
1246 pArena->prev->next = pArena->next;
1247
1248 /* Build the in-use arena */
1249
1250 pInUse = (ARENA_INUSE *)pArena;
1251 pInUse->size = (pInUse->size & ~ARENA_FLAG_FREE)
1252 + sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
1253 pInUse->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
1254 pInUse->magic = ARENA_INUSE_MAGIC;
1255
1256 /* Save user flags */
1257 subheap->UserFlags = flags & HEAP_SETTABLE_USER_FLAGS;
1258
1259 /* Shrink the block */
1260
1261 HEAP_ShrinkBlock( subheap, pInUse, size );
1262
1263 if (flags & HEAP_ZERO_MEMORY)
1264 memset( pInUse + 1, 0, pInUse->size & ARENA_SIZE_MASK );
1265 else if (TRACE_ON(heap))
1266 memset( pInUse + 1, ARENA_INUSE_FILLER, pInUse->size & ARENA_SIZE_MASK );
1267
1268 if (!(flags & HEAP_NO_SERIALIZE))
1269 RtlLeaveHeapLock( &heapPtr->critSection );
1270
1271 DPRINT("(%p,%08lx,%08lx): returning %p\n",
1272 heap, flags, size, (PVOID)(pInUse + 1) );
1273 return (PVOID)(pInUse + 1);
1274 }
1275
1276
1277 /***********************************************************************
1278 * HeapFree (KERNEL32.338)
1279 * RETURNS
1280 * TRUE: Success
1281 * FALSE: Failure
1282 *
1283 * @implemented
1284 */
1285 BOOLEAN NTAPI RtlFreeHeap(
1286 HANDLE heap, /* [in] Handle of heap */
1287 ULONG flags, /* [in] Heap freeing flags */
1288 PVOID ptr /* [in] Address of memory to free */
1289 )
1290 {
1291 ARENA_INUSE *pInUse;
1292 SUBHEAP *subheap;
1293 HEAP *heapPtr = HEAP_GetPtr( heap );
1294
1295 /* Validate the parameters */
1296
1297 if (!heapPtr)
1298 return FALSE;
1299 if (!ptr) /* Freeing a NULL ptr is doesn't indicate an error in Win2k */
1300 {
1301 DPRINT("(%p,%08lx,%p): asked to free NULL\n",
1302 heap, flags, ptr );
1303 return TRUE;
1304 }
1305
1306 flags &= HEAP_NO_SERIALIZE;
1307 flags |= heapPtr->flags;
1308 if (!(flags & HEAP_NO_SERIALIZE))
1309 RtlEnterHeapLock( &heapPtr->critSection );
1310 if (!HEAP_IsRealArena( heap, HEAP_NO_SERIALIZE, ptr, QUIET ))
1311 {
1312 if (!(flags & HEAP_NO_SERIALIZE))
1313 RtlLeaveHeapLock( &heapPtr->critSection );
1314 DPRINT("(%p,%08lx,%p): returning FALSE\n",
1315 heap, flags, ptr );
1316 return FALSE;
1317 }
1318
1319 /* Turn the block into a free block */
1320
1321 pInUse = (ARENA_INUSE *)ptr - 1;
1322 subheap = HEAP_FindSubHeap( heapPtr, pInUse );
1323 HEAP_MakeInUseBlockFree( subheap, pInUse, heapPtr->flags );
1324
1325 if (!(flags & HEAP_NO_SERIALIZE))
1326 RtlLeaveHeapLock( &heapPtr->critSection );
1327
1328 DPRINT("(%p,%08lx,%p): returning TRUE\n",
1329 heap, flags, ptr );
1330 return TRUE;
1331 }
1332
1333
1334 /***********************************************************************
1335 * RtlReAllocateHeap
1336 * PARAMS
1337 * Heap [in] Handle of heap block
1338 * Flags [in] Heap reallocation flags
1339 * Ptr, [in] Address of memory to reallocate
1340 * Size [in] Number of bytes to reallocate
1341 *
1342 * RETURNS
1343 * Pointer to reallocated memory block
1344 * NULL: Failure
1345 * 0x7d030f60--invalid flags in RtlHeapAllocate
1346 * @implemented
1347 */
1348 PVOID NTAPI RtlReAllocateHeap(
1349 HANDLE Heap,
1350 ULONG Flags,
1351 PVOID Ptr,
1352 ULONG Size
1353 )
1354 {
1355 ARENA_INUSE *pArena;
1356 ULONG oldSize;
1357 HEAP *heapPtr;
1358 SUBHEAP *subheap;
1359
1360 if (!Ptr)
1361 return FALSE;
1362 if (!(heapPtr = HEAP_GetPtr( Heap )))
1363 return FALSE;
1364
1365 /* Validate the parameters */
1366
1367 //Flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY |
1368 // HEAP_REALLOC_IN_PLACE_ONLY;
1369 Flags |= heapPtr->flags;
1370 Size = (Size + 7) & ~7;
1371 if (Size < HEAP_MIN_BLOCK_SIZE)
1372 Size = HEAP_MIN_BLOCK_SIZE;
1373
1374 if (!(Flags & HEAP_NO_SERIALIZE))
1375 RtlEnterHeapLock( &heapPtr->critSection );
1376 if (!HEAP_IsRealArena( Heap, HEAP_NO_SERIALIZE, Ptr, QUIET ))
1377 {
1378 if (!(Flags & HEAP_NO_SERIALIZE))
1379 RtlLeaveHeapLock( &heapPtr->critSection );
1380 DPRINT("(%p,%08lx,%p,%08lx): returning NULL\n",
1381 Heap, Flags, Ptr, Size );
1382 if (Flags & HEAP_GENERATE_EXCEPTIONS)
1383 RtlRaiseStatus( STATUS_NO_MEMORY );
1384 return NULL;
1385 }
1386
1387 /* Check if we need to grow the block */
1388
1389 pArena = (ARENA_INUSE *)Ptr - 1;
1390 pArena->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
1391
1392 subheap = HEAP_FindSubHeap( heapPtr, pArena );
1393 oldSize = (pArena->size & ARENA_SIZE_MASK);
1394 if (Size > oldSize)
1395 {
1396 char *pNext = (char *)(pArena + 1) + oldSize;
1397 if ((pNext < (char *)subheap + subheap->size) &&
1398 (*(PULONG)pNext & ARENA_FLAG_FREE) &&
1399 (oldSize + (*(PULONG)pNext & ARENA_SIZE_MASK) + sizeof(ARENA_FREE) >= Size))
1400 {
1401 /* The next block is free and large enough */
1402 ARENA_FREE *pFree = (ARENA_FREE *)pNext;
1403 pFree->next->prev = pFree->prev;
1404 pFree->prev->next = pFree->next;
1405 pArena->size += (pFree->size & ARENA_SIZE_MASK) + sizeof(*pFree);
1406 if (!HEAP_Commit( subheap, (char *)pArena + sizeof(ARENA_INUSE)
1407 + Size + HEAP_MIN_BLOCK_SIZE,
1408 heapPtr->flags))
1409 {
1410 if (!(Flags & HEAP_NO_SERIALIZE))
1411 RtlLeaveHeapLock( &heapPtr->critSection );
1412 if (Flags & HEAP_GENERATE_EXCEPTIONS)
1413 RtlRaiseStatus( STATUS_NO_MEMORY );
1414 return NULL;
1415 }
1416 HEAP_ShrinkBlock( subheap, pArena, Size );
1417 }
1418 else /* Do it the hard way */
1419 {
1420 ARENA_FREE *pNew;
1421 ARENA_INUSE *pInUse;
1422 SUBHEAP *newsubheap;
1423
1424 if ((Flags & HEAP_REALLOC_IN_PLACE_ONLY) ||
1425 !(pNew = HEAP_FindFreeBlock( heapPtr, Size, &newsubheap )))
1426 {
1427 if (!(Flags & HEAP_NO_SERIALIZE))
1428 RtlLeaveHeapLock( &heapPtr->critSection );
1429 if (Flags & HEAP_GENERATE_EXCEPTIONS)
1430 RtlRaiseStatus( STATUS_NO_MEMORY );
1431 return NULL;
1432 }
1433
1434 /* Build the in-use arena */
1435
1436 pNew->next->prev = pNew->prev;
1437 pNew->prev->next = pNew->next;
1438 pInUse = (ARENA_INUSE *)pNew;
1439 pInUse->size = (pInUse->size & ~ARENA_FLAG_FREE)
1440 + sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
1441 pInUse->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
1442 pInUse->magic = ARENA_INUSE_MAGIC;
1443 HEAP_ShrinkBlock( newsubheap, pInUse, Size );
1444 memcpy( pInUse + 1, pArena + 1, oldSize );
1445
1446 /* Free the previous block */
1447
1448 HEAP_MakeInUseBlockFree( subheap, pArena, Flags );
1449 subheap = newsubheap;
1450 pArena = pInUse;
1451 }
1452 }
1453 else
1454 HEAP_ShrinkBlock( subheap, pArena, Size ); /* Shrink the block */
1455
1456 /* Clear the extra bytes if needed */
1457
1458 if (Size > oldSize)
1459 {
1460 if (Flags & HEAP_ZERO_MEMORY)
1461 memset( (char *)(pArena + 1) + oldSize, 0,
1462 (pArena->size & ARENA_SIZE_MASK) - oldSize );
1463 else if (TRACE_ON(heap))
1464 memset( (char *)(pArena + 1) + oldSize, ARENA_INUSE_FILLER,
1465 (pArena->size & ARENA_SIZE_MASK) - oldSize );
1466 }
1467
1468 /* Return the new arena */
1469
1470 if (!(Flags & HEAP_NO_SERIALIZE))
1471 RtlLeaveHeapLock( &heapPtr->critSection );
1472
1473 DPRINT("(%p,%08lx,%p,%08lx): returning %p\n",
1474 Heap, Flags, Ptr, Size, (PVOID)(pArena + 1) );
1475 return (PVOID)(pArena + 1);
1476 }
1477
1478
1479 /***********************************************************************
1480 * RtlCompactHeap
1481 *
1482 * @unimplemented
1483 */
1484 ULONG NTAPI
1485 RtlCompactHeap(HANDLE Heap,
1486 ULONG Flags)
1487 {
1488 UNIMPLEMENTED;
1489 return 0;
1490 }
1491
1492
1493 /***********************************************************************
1494 * RtlLockHeap
1495 * Attempts to acquire the critical section object for a specified heap.
1496 *
1497 * PARAMS
1498 * Heap [in] Handle of heap to lock for exclusive access
1499 *
1500 * RETURNS
1501 * TRUE: Success
1502 * FALSE: Failure
1503 *
1504 * @implemented
1505 */
1506 BOOLEAN NTAPI
1507 RtlLockHeap(IN HANDLE Heap)
1508 {
1509 HEAP *heapPtr = HEAP_GetPtr( Heap );
1510 if (!heapPtr)
1511 return FALSE;
1512 RtlEnterHeapLock( &heapPtr->critSection );
1513 return TRUE;
1514 }
1515
1516
1517 /***********************************************************************
1518 * RtlUnlockHeap
1519 * Releases ownership of the critical section object.
1520 *
1521 * PARAMS
1522 * Heap [in] Handle to the heap to unlock
1523 *
1524 * RETURNS
1525 * TRUE: Success
1526 * FALSE: Failure
1527 *
1528 * @implemented
1529 */
1530 BOOLEAN NTAPI
1531 RtlUnlockHeap(HANDLE Heap)
1532 {
1533 HEAP *heapPtr = HEAP_GetPtr( Heap );
1534 if (!heapPtr)
1535 return FALSE;
1536 RtlLeaveHeapLock( &heapPtr->critSection );
1537 return TRUE;
1538 }
1539
1540
1541 /***********************************************************************
1542 * RtlSizeHeap
1543 * PARAMS
1544 * Heap [in] Handle of heap
1545 * Flags [in] Heap size control flags
1546 * Ptr [in] Address of memory to return size for
1547 *
1548 * RETURNS
1549 * Size in bytes of allocated memory
1550 * 0xffffffff: Failure
1551 *
1552 * @implemented
1553 */
1554 ULONG NTAPI
1555 RtlSizeHeap(
1556 HANDLE Heap,
1557 ULONG Flags,
1558 PVOID Ptr
1559 )
1560 {
1561 ULONG ret;
1562 HEAP *heapPtr = HEAP_GetPtr( Heap );
1563
1564 if (!heapPtr)
1565 return 0;
1566 Flags &= HEAP_NO_SERIALIZE;
1567 Flags |= heapPtr->flags;
1568 if (!(Flags & HEAP_NO_SERIALIZE))
1569 RtlEnterHeapLock( &heapPtr->critSection );
1570 if (!HEAP_IsRealArena( Heap, HEAP_NO_SERIALIZE, Ptr, QUIET ))
1571 {
1572 ret = 0xffffffff;
1573 }
1574 else
1575 {
1576 ARENA_INUSE *pArena = (ARENA_INUSE *)Ptr - 1;
1577 ret = pArena->size & ARENA_SIZE_MASK;
1578 }
1579 if (!(Flags & HEAP_NO_SERIALIZE))
1580 RtlLeaveHeapLock( &heapPtr->critSection );
1581
1582 DPRINT("(%p,%08lx,%p): returning %08lx\n",
1583 Heap, Flags, Ptr, ret );
1584 return ret;
1585 }
1586
1587
1588 /***********************************************************************
1589 * RtlValidateHeap
1590 * Validates a specified heap.
1591 *
1592 * PARAMS
1593 * Heap [in] Handle to the heap
1594 * Flags [in] Bit flags that control access during operation
1595 * Block [in] Optional pointer to memory block to validate
1596 *
1597 * NOTES
1598 * Flags is ignored.
1599 *
1600 * RETURNS
1601 * TRUE: Success
1602 * FALSE: Failure
1603 *
1604 * @implemented
1605 */
1606 BOOLEAN NTAPI RtlValidateHeap(
1607 HANDLE Heap,
1608 ULONG Flags,
1609 PVOID Block
1610 )
1611 {
1612 HEAP *heapPtr = HEAP_GetPtr( Heap );
1613 if (!heapPtr)
1614 return FALSE;
1615 return HEAP_IsRealArena( heapPtr, Flags, Block, QUIET );
1616 }
1617
1618
1619 /***********************************************************************
1620 * HeapWalk (KERNEL32.344)
1621 * Enumerates the memory blocks in a specified heap.
1622 * See HEAP_Dump() for info on heap structure.
1623 *
1624 * TODO
1625 * - handling of PROCESS_HEAP_ENTRY_MOVEABLE and
1626 * PROCESS_HEAP_ENTRY_DDESHARE (needs heap.c support)
1627 *
1628 * RETURNS
1629 * TRUE: Success
1630 * FALSE: Failure
1631 */
1632 #if 0
1633 BOOLEAN NTAPI HeapWalk(
1634 HANDLE heap, /* [in] Handle to heap to enumerate */
1635 LPPROCESS_HEAP_ENTRY entry /* [out] Pointer to structure of enumeration info */
1636 )
1637 {
1638 HEAP *heapPtr = HEAP_GetPtr(heap);
1639 SUBHEAP *sub, *currentheap = NULL;
1640 BOOLEAN ret = FALSE;
1641 char *ptr;
1642 int region_index = 0;
1643
1644 if (!heapPtr || !entry)
1645 {
1646 return FALSE;
1647 }
1648
1649 if (!(heapPtr->flags & HEAP_NO_SERIALIZE))
1650 RtlEnterHeapLock( &heapPtr->critSection );
1651
1652 /* set ptr to the next arena to be examined */
1653
1654 if (!entry->lpData) /* first call (init) ? */
1655 {
1656 DPRINT("begin walking of heap 0x%08x.\n", heap);
1657 /*HEAP_Dump(heapPtr);*/
1658 currentheap = &heapPtr->subheap;
1659 ptr = (char*)currentheap + currentheap->headerSize;
1660 }
1661 else
1662 {
1663 ptr = entry->lpData;
1664 sub = &heapPtr->subheap;
1665 while (sub)
1666 {
1667 if (((char *)ptr >= (char *)sub) &&
1668 ((char *)ptr < (char *)sub + sub->size))
1669 {
1670 currentheap = sub;
1671 break;
1672 }
1673 sub = sub->next;
1674 region_index++;
1675 }
1676 if (currentheap == NULL)
1677 {
1678 DPRINT("no matching subheap found, shouldn't happen !\n");
1679 goto HW_end;
1680 }
1681
1682 ptr += entry->cbData; /* point to next arena */
1683 if (ptr > (char *)currentheap + currentheap->size - 1)
1684 { /* proceed with next subheap */
1685 if (!(currentheap = currentheap->next))
1686 { /* successfully finished */
1687 DPRINT("end reached.\n");
1688 goto HW_end;
1689 }
1690 ptr = (char*)currentheap + currentheap->headerSize;
1691 }
1692 }
1693
1694 entry->wFlags = 0;
1695 if (*(PULONG)ptr & ARENA_FLAG_FREE)
1696 {
1697 ARENA_FREE *pArena = (ARENA_FREE *)ptr;
1698
1699 /*DPRINT("free, magic: %04x\n", pArena->magic);*/
1700
1701 entry->lpData = pArena + 1;
1702 entry->cbData = pArena->size & ARENA_SIZE_MASK;
1703 entry->cbOverhead = sizeof(ARENA_FREE);
1704 entry->wFlags = PROCESS_HEAP_UNCOMMITTED_RANGE;
1705 }
1706 else
1707 {
1708 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
1709
1710 /*DPRINT("busy, magic: %04x\n", pArena->magic);*/
1711
1712 entry->lpData = pArena + 1;
1713 entry->cbData = pArena->size & ARENA_SIZE_MASK;
1714 entry->cbOverhead = sizeof(ARENA_INUSE);
1715 entry->wFlags = PROCESS_HEAP_ENTRY_BUSY;
1716 /* FIXME: can't handle PROCESS_HEAP_ENTRY_MOVEABLE
1717 and PROCESS_HEAP_ENTRY_DDESHARE yet */
1718 }
1719
1720 entry->iRegionIndex = region_index;
1721
1722 /* first element of heap ? */
1723 if (ptr == (char *)(currentheap + currentheap->headerSize))
1724 {
1725 entry->wFlags |= PROCESS_HEAP_REGION;
1726 entry->Foo.Region.dwCommittedSize = currentheap->commitSize;
1727 entry->Foo.Region.dwUnCommittedSize =
1728 currentheap->size - currentheap->commitSize;
1729 entry->Foo.Region.lpFirstBlock = /* first valid block */
1730 currentheap + currentheap->headerSize;
1731 entry->Foo.Region.lpLastBlock = /* first invalid block */
1732 currentheap + currentheap->size;
1733 }
1734 ret = TRUE;
1735
1736 HW_end:
1737 if (!(heapPtr->flags & HEAP_NO_SERIALIZE))
1738 RtlLeaveHeapLock( &heapPtr->critSection );
1739
1740 return ret;
1741 }
1742 #endif
1743
1744
1745 VOID
1746 RtlInitializeHeapManager(VOID)
1747 {
1748 PPEB Peb;
1749
1750 Peb = NtCurrentPeb();
1751
1752 Peb->NumberOfHeaps = 0;
1753 Peb->MaximumNumberOfHeaps = -1; /* no limit */
1754 Peb->ProcessHeaps = NULL;
1755
1756 RtlInitializeHeapLock(&RtlpProcessHeapsListLock);
1757 }
1758
1759
1760 /*
1761 * @implemented
1762 */
1763 NTSTATUS NTAPI
1764 RtlEnumProcessHeaps(PHEAP_ENUMERATION_ROUTINE HeapEnumerationRoutine,
1765 PVOID lParam)
1766 {
1767 NTSTATUS Status = STATUS_SUCCESS;
1768 HEAP** pptr;
1769
1770 RtlEnterHeapLock(&RtlpProcessHeapsListLock);
1771
1772 for (pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps; *pptr; pptr = &(*pptr)->next)
1773 {
1774 Status = HeapEnumerationRoutine(*pptr,lParam);
1775 if (!NT_SUCCESS(Status))
1776 break;
1777 }
1778
1779 RtlLeaveHeapLock(&RtlpProcessHeapsListLock);
1780
1781 return Status;
1782 }
1783
1784
1785 /*
1786 * @implemented
1787 */
1788 ULONG NTAPI
1789 RtlGetProcessHeaps(ULONG HeapCount,
1790 HANDLE *HeapArray)
1791 {
1792 ULONG Result = 0;
1793 HEAP ** pptr;
1794
1795 RtlEnterHeapLock(&RtlpProcessHeapsListLock);
1796
1797 Result = NtCurrentPeb()->NumberOfHeaps;
1798
1799 if (NtCurrentPeb()->NumberOfHeaps <= HeapCount)
1800 {
1801 int i = 0;
1802 for (pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps; *pptr; pptr = &(*pptr)->next)
1803 {
1804 HeapArray[i++] = *pptr;
1805 }
1806 }
1807
1808 RtlLeaveHeapLock (&RtlpProcessHeapsListLock);
1809
1810 return Result;
1811 }
1812
1813
1814 /*
1815 * @implemented
1816 */
1817 BOOLEAN NTAPI
1818 RtlValidateProcessHeaps(VOID)
1819 {
1820 BOOLEAN Result = TRUE;
1821 HEAP ** pptr;
1822
1823 RtlEnterHeapLock(&RtlpProcessHeapsListLock);
1824
1825 for (pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps; *pptr; pptr = &(*pptr)->next)
1826 {
1827 if (!RtlValidateHeap(*pptr, 0, NULL))
1828 {
1829 Result = FALSE;
1830 break;
1831 }
1832 }
1833
1834 RtlLeaveHeapLock (&RtlpProcessHeapsListLock);
1835
1836 return Result;
1837 }
1838
1839
1840 /*
1841 * @unimplemented
1842 */
1843 BOOLEAN NTAPI
1844 RtlZeroHeap(
1845 IN PVOID HeapHandle,
1846 IN ULONG Flags
1847 )
1848 {
1849 UNIMPLEMENTED;
1850 return FALSE;
1851 }
1852
1853 /*
1854 * @unimplemented
1855 */
1856 BOOLEAN
1857 NTAPI
1858 RtlSetUserValueHeap(IN PVOID HeapHandle,
1859 IN ULONG Flags,
1860 IN PVOID BaseAddress,
1861 IN PVOID UserValue)
1862 {
1863 HEAP *heapPtr = HEAP_GetPtr(HeapHandle);
1864 ARENA_INUSE *pInUse;
1865 SUBHEAP *subheap;
1866
1867 /* Get the subheap */
1868 pInUse = (ARENA_INUSE *)BaseAddress - 1;
1869 subheap = HEAP_FindSubHeap( heapPtr, pInUse );
1870
1871 /* Hack */
1872 subheap->UserValue = UserValue;
1873 return TRUE;
1874 }
1875
1876 /*
1877 * @unimplemented
1878 */
1879 BOOLEAN
1880 NTAPI
1881 RtlGetUserInfoHeap(IN PVOID HeapHandle,
1882 IN ULONG Flags,
1883 IN PVOID BaseAddress,
1884 OUT PVOID *UserValue,
1885 OUT PULONG UserFlags)
1886 {
1887 HEAP *heapPtr = HEAP_GetPtr(HeapHandle);
1888 ARENA_INUSE *pInUse;
1889 SUBHEAP *subheap;
1890
1891 /* Get the subheap */
1892 pInUse = (ARENA_INUSE *)BaseAddress - 1;
1893 subheap = HEAP_FindSubHeap( heapPtr, pInUse );
1894
1895 /* Hack */
1896 DPRINT1("V/F: %lx %p\n", subheap->UserValue, subheap->UserFlags);
1897 if (UserValue) *UserValue = subheap->UserValue;
1898 if (UserFlags) *UserFlags = subheap->UserFlags;
1899 return TRUE;
1900 }
1901
1902 /*
1903 * @unimplemented
1904 */
1905 NTSTATUS
1906 NTAPI
1907 RtlUsageHeap(IN HANDLE Heap,
1908 IN ULONG Flags,
1909 OUT PRTL_HEAP_USAGE Usage)
1910 {
1911 /* TODO */
1912 UNIMPLEMENTED;
1913 return STATUS_NOT_IMPLEMENTED;
1914 }
1915
1916 PWSTR
1917 NTAPI
1918 RtlQueryTagHeap(IN PVOID HeapHandle,
1919 IN ULONG Flags,
1920 IN USHORT TagIndex,
1921 IN BOOLEAN ResetCounters,
1922 OUT PRTL_HEAP_TAG_INFO HeapTagInfo)
1923 {
1924 /* TODO */
1925 UNIMPLEMENTED;
1926 return NULL;
1927 }
1928
1929 ULONG
1930 NTAPI
1931 RtlExtendHeap(IN HANDLE Heap,
1932 IN ULONG Flags,
1933 IN PVOID P,
1934 IN ULONG Size)
1935 {
1936 /* TODO */
1937 UNIMPLEMENTED;
1938 return 0;
1939 }
1940
1941 ULONG
1942 NTAPI
1943 RtlCreateTagHeap(IN HANDLE HeapHandle,
1944 IN ULONG Flags,
1945 IN PWSTR TagName,
1946 IN PWSTR TagSubName)
1947 {
1948 /* TODO */
1949 UNIMPLEMENTED;
1950 return 0;
1951 }
1952
1953 /* EOF */