d420a85a7f7be2c9b18c56a548f0b6ceede951f8
[reactos.git] / reactos / lib / rtl / heap.c
1 /* COPYRIGHT: See COPYING in the top level directory
2 * PROJECT: ReactOS system libraries
3 * FILE: lib/rtl/image.c
4 * PURPOSE: Image handling functions
5 * PROGRAMMERS: Copyright 1996 Alexandre Julliard
6 * Copyright 1998 Ulrich Weigand
7 */
8
9 /* Note: the heap data structures are based on what Pietrek describes in his
10 * book 'Windows 95 System Programming Secrets'. The layout is not exactly
11 * the same, but could be easily adapted if it turns out some programs
12 * require it.
13 */
14
15 /* INCLUDES *****************************************************************/
16
17 #include <rtl.h>
18
19 #define NDEBUG
20 #include <debug.h>
21
22 /* FUNCTIONS *****************************************************************/
23
24 #define WARN_ON(x) (1)
25
26 #ifdef NDEBUG
27 #define TRACE_ON(x) (0)
28 #else
29 #define TRACE_ON(x) (1)
30 #endif
31
32 static RTL_CRITICAL_SECTION RtlpProcessHeapsListLock;
33
34
35 typedef struct tagARENA_INUSE
36 {
37 ULONG size; /* Block size; must be the first field */
38 USHORT threadId; /* Allocating thread id */
39 USHORT magic; /* Magic number */
40 }
41 ARENA_INUSE;
42
43 typedef struct tagARENA_FREE
44 {
45 ULONG size; /* Block size; must be the first field */
46 USHORT threadId; /* Freeing thread id */
47 USHORT magic; /* Magic number */
48 struct tagARENA_FREE *next; /* Next free arena */
49 struct tagARENA_FREE *prev; /* Prev free arena */
50 }
51 ARENA_FREE;
52
53 #define ARENA_FLAG_FREE 0x00000001 /* flags OR'ed with arena size */
54 #define ARENA_FLAG_PREV_FREE 0x00000002
55 #define ARENA_SIZE_MASK 0xfffffffc
56 #define ARENA_INUSE_MAGIC 0x4842 /* Value for arena 'magic' field */
57 #define ARENA_FREE_MAGIC 0x4846 /* Value for arena 'magic' field */
58
59 #define ARENA_INUSE_FILLER 0x55
60 #define ARENA_FREE_FILLER 0xaa
61
62 #define QUIET 1 /* Suppress messages */
63 #define NOISY 0 /* Report all errors */
64
65 #define HEAP_NB_FREE_LISTS 4 /* Number of free lists */
66
67 /* Max size of the blocks on the free lists */
68 static const ULONG HEAP_freeListSizes[HEAP_NB_FREE_LISTS] =
69 {
70 0x20, 0x80, 0x200, 0xffffffff
71 };
72
73 typedef struct
74 {
75 ULONG size;
76 ARENA_FREE arena;
77 }
78 FREE_LIST_ENTRY;
79
80 struct tagHEAP;
81
82 typedef struct tagSUBHEAP
83 {
84 ULONG size; /* Size of the whole sub-heap */
85 ULONG commitSize; /* Committed size of the sub-heap */
86 ULONG headerSize; /* Size of the heap header */
87 struct tagSUBHEAP *next; /* Next sub-heap */
88 struct tagHEAP *heap; /* Main heap structure */
89 ULONG magic; /* Magic number */
90 ULONG UserFlags;
91 PVOID UserValue;
92 }
93 SUBHEAP, *PSUBHEAP;
94
95 #define SUBHEAP_MAGIC ((ULONG)('S' | ('U'<<8) | ('B'<<16) | ('H'<<24)))
96
97 typedef struct tagHEAP
98 {
99 SUBHEAP subheap; /* First sub-heap */
100 struct tagHEAP *next; /* Next heap for this process */
101 FREE_LIST_ENTRY freeList[HEAP_NB_FREE_LISTS]; /* Free lists */
102 RTL_CRITICAL_SECTION critSection; /* Critical section for serialization */
103 ULONG flags; /* Heap flags */
104 ULONG magic; /* Magic number */
105 PRTL_HEAP_COMMIT_ROUTINE commitRoutine;
106 }
107 HEAP, *PHEAP;
108
109 #define HEAP_MAGIC ((ULONG)('H' | ('E'<<8) | ('A'<<16) | ('P'<<24)))
110
111 #define HEAP_DEF_SIZE 0x110000 /* Default heap size = 1Mb + 64Kb */
112 #define HEAP_MIN_BLOCK_SIZE (sizeof(ARENA_FREE) + 8) /* Min. heap block size */
113 #define COMMIT_MASK 0xffff /* bitmask for commit/decommit granularity */
114
115
116 static BOOLEAN HEAP_IsRealArena( HANDLE heap, ULONG flags, PVOID block, BOOLEAN quiet );
117
118
119 /***********************************************************************
120 * HEAP_Dump
121 */
122 VOID
123 HEAP_Dump(PHEAP heap)
124 {
125 int i;
126 SUBHEAP *subheap;
127 char *ptr;
128
129 DPRINT( "Heap: %p\n", heap );
130 DPRINT( "Next: %p Sub-heaps: %p",
131 heap->next, &heap->subheap );
132 subheap = &heap->subheap;
133 while (subheap->next)
134 {
135 DPRINT( " -> %p", subheap->next );
136 subheap = subheap->next;
137 }
138
139 DPRINT( "\nFree lists:\n Block Stat Size Id\n" );
140 for (i = 0; i < HEAP_NB_FREE_LISTS; i++)
141 DPRINT( "%p free %08lx %04x prev=%p next=%p\n",
142 &heap->freeList[i].arena,
143 heap->freeList[i].arena.size,
144 heap->freeList[i].arena.threadId,
145 heap->freeList[i].arena.prev,
146 heap->freeList[i].arena.next );
147
148 subheap = &heap->subheap;
149 while (subheap)
150 {
151 ULONG freeSize = 0, usedSize = 0, arenaSize = subheap->headerSize;
152 DPRINT( "\n\nSub-heap %p: size=%08lx committed=%08lx\n",
153 subheap, subheap->size, subheap->commitSize );
154
155 DPRINT( "\n Block Stat Size Id\n" );
156 ptr = (char*)subheap + subheap->headerSize;
157 while (ptr < (char *)subheap + subheap->size)
158 {
159 if (*(PULONG)ptr & ARENA_FLAG_FREE)
160 {
161 ARENA_FREE *pArena = (ARENA_FREE *)ptr;
162 DPRINT( "%p free %08lx %04x prev=%p next=%p\n",
163 pArena, pArena->size & ARENA_SIZE_MASK,
164 pArena->threadId, pArena->prev,
165 pArena->next);
166 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
167 arenaSize += sizeof(ARENA_FREE);
168 freeSize += pArena->size & ARENA_SIZE_MASK;
169 }
170 else if (*(PULONG)ptr & ARENA_FLAG_PREV_FREE)
171 {
172 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
173 DPRINT( "%p Used %08lx %04x back=%08lx\n",
174 pArena, pArena->size & ARENA_SIZE_MASK,
175 pArena->threadId, *((PULONG)pArena - 1));
176 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
177 arenaSize += sizeof(ARENA_INUSE);
178 usedSize += pArena->size & ARENA_SIZE_MASK;
179 }
180 else
181 {
182 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
183 DPRINT( "%p used %08lx %04x\n",
184 pArena, pArena->size & ARENA_SIZE_MASK,
185 pArena->threadId);
186 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
187 arenaSize += sizeof(ARENA_INUSE);
188 usedSize += pArena->size & ARENA_SIZE_MASK;
189 }
190 }
191 DPRINT( "\nTotal: Size=%08lx Committed=%08lx Free=%08lx Used=%08lx Arenas=%08lx (%ld%%)\n\n",
192 subheap->size, subheap->commitSize, freeSize, usedSize,
193 arenaSize, (arenaSize * 100) / subheap->size );
194 subheap = subheap->next;
195 }
196 }
197
198
199 /***********************************************************************
200 * HEAP_GetPtr
201 * RETURNS
202 * Pointer to the heap
203 * NULL: Failure
204 */
205 static PHEAP
206 HEAP_GetPtr(HANDLE heap) /* [in] Handle to the heap */
207 {
208 HEAP *heapPtr = (HEAP *)heap;
209 if (!heapPtr || (heapPtr->magic != HEAP_MAGIC))
210 {
211 DPRINT("Invalid heap %p!\n", heap );
212 return NULL;
213 }
214 if (TRACE_ON(heap) && !HEAP_IsRealArena( heap, 0, NULL, NOISY ))
215 {
216 HEAP_Dump( heapPtr );
217 ASSERT( FALSE );
218 return NULL;
219 }
220 return heapPtr;
221 }
222
223
224 /***********************************************************************
225 * HEAP_InsertFreeBlock
226 *
227 * Insert a free block into the free list.
228 */
229 static VOID
230 HEAP_InsertFreeBlock(PHEAP heap,
231 ARENA_FREE *pArena,
232 BOOLEAN last)
233 {
234 FREE_LIST_ENTRY *pEntry = heap->freeList;
235 while (pEntry->size < pArena->size)
236 pEntry++;
237 if (last)
238 {
239 /* insert at end of free list, i.e. before next free list entry */
240 pEntry++;
241 if (pEntry == &heap->freeList[HEAP_NB_FREE_LISTS])
242 {
243 pEntry = heap->freeList;
244 }
245 pArena->prev = pEntry->arena.prev;
246 pArena->prev->next = pArena;
247 pArena->next = &pEntry->arena;
248 pEntry->arena.prev = pArena;
249 }
250 else
251 {
252 /* insert at head of free list */
253 pArena->next = pEntry->arena.next;
254 pArena->next->prev = pArena;
255 pArena->prev = &pEntry->arena;
256 pEntry->arena.next = pArena;
257 }
258 pArena->size |= ARENA_FLAG_FREE;
259 }
260
261
262 /***********************************************************************
263 * HEAP_FindSubHeap
264 * Find the sub-heap containing a given address.
265 *
266 * RETURNS
267 * Pointer: Success
268 * NULL: Failure
269 */
270 static PSUBHEAP
271 HEAP_FindSubHeap(HEAP *heap, /* [in] Heap pointer */
272 PVOID ptr) /* [in] Address */
273 {
274 PSUBHEAP sub = &heap->subheap;
275 while (sub)
276 {
277 if (((char *)ptr >= (char *)sub) &&
278 ((char *)ptr < (char *)sub + sub->size))
279 return sub;
280 sub = sub->next;
281 }
282 return NULL;
283 }
284
285
286 /***********************************************************************
287 * HEAP_Commit
288 *
289 * Make sure the heap storage is committed up to (not including) ptr.
290 */
291 static __inline BOOLEAN
292 HEAP_Commit(SUBHEAP *subheap,
293 PVOID ptr,
294 ULONG flags)
295 {
296 SIZE_T size = (SIZE_T)((char *)ptr - (char *)subheap);
297 NTSTATUS Status;
298 PVOID address;
299 SIZE_T commitsize;
300
301 size = (size + COMMIT_MASK) & ~COMMIT_MASK;
302 if (size > subheap->size)
303 size = subheap->size;
304 if (size <= subheap->commitSize)
305 return TRUE;
306
307 address = (PVOID)((char *)subheap + subheap->commitSize);
308 commitsize = size - subheap->commitSize;
309
310 if (subheap->heap->commitRoutine != NULL)
311 {
312 Status = subheap->heap->commitRoutine(subheap->heap, &address, &commitsize);
313 }
314 else
315 {
316 Status = ZwAllocateVirtualMemory(NtCurrentProcess(),
317 &address,
318 0,
319 &commitsize,
320 MEM_COMMIT,
321 PAGE_EXECUTE_READWRITE);
322 }
323 if (!NT_SUCCESS(Status))
324 {
325 DPRINT( "Could not commit %08lx bytes at %p for heap %p\n",
326 size - subheap->commitSize,
327 ((char *)subheap + subheap->commitSize),
328 subheap->heap );
329 return FALSE;
330 }
331
332 subheap->commitSize += commitsize;
333 return TRUE;
334 }
335
336
337 #if 0
338 /***********************************************************************
339 * HEAP_Decommit
340 *
341 * If possible, decommit the heap storage from (including) 'ptr'.
342 */
343 static inline BOOLEAN HEAP_Decommit( SUBHEAP *subheap, PVOID ptr, ULONG flags )
344 {
345 ULONG size = (ULONG)((char *)ptr - (char *)subheap);
346 PVOID address;
347 ULONG decommitsize;
348 NTSTATUS Status;
349 /* round to next block and add one full block */
350 size = ((size + COMMIT_MASK) & ~COMMIT_MASK) + COMMIT_MASK + 1;
351 if (size >= subheap->commitSize)
352 return TRUE;
353
354 address = (PVOID)((char *)subheap + size);
355 decommitsize = subheap->commitSize - size;
356
357 Status = ZwFreeVirtualMemory(NtCurrentProcess(),
358 &address,
359 &decommitsize,
360 MEM_DECOMMIT);
361 if (!NT_SUCCESS(Status))
362 {
363 DPRINT( "Could not decommit %08lx bytes at %p for heap %p\n",
364 subheap->commitSize - size,
365 ((char *)subheap + size),
366 subheap->heap );
367 return FALSE;
368 }
369
370 subheap->commitSize -= decommitsize;
371 return TRUE;
372 }
373 #endif
374
375 /***********************************************************************
376 * HEAP_CreateFreeBlock
377 *
378 * Create a free block at a specified address. 'size' is the size of the
379 * whole block, including the new arena.
380 */
381 static VOID HEAP_CreateFreeBlock( SUBHEAP *subheap, PVOID ptr, ULONG size )
382 {
383 ARENA_FREE *pFree;
384 BOOLEAN last;
385
386 /* Create a free arena */
387
388 pFree = (ARENA_FREE *)ptr;
389 pFree->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
390 pFree->magic = ARENA_FREE_MAGIC;
391
392 /* If debugging, erase the freed block content */
393
394 if (TRACE_ON(heap))
395 {
396 char *pEnd = (char *)ptr + size;
397 if (pEnd > (char *)subheap + subheap->commitSize)
398 pEnd = (char *)subheap + subheap->commitSize;
399 if (pEnd > (char *)(pFree + 1))
400 memset( pFree + 1, ARENA_FREE_FILLER, pEnd - (char *)(pFree + 1) );
401 }
402
403 /* Check if next block is free also */
404
405 if (((char *)ptr + size < (char *)subheap + subheap->size) &&
406 (*(PULONG)((char *)ptr + size) & ARENA_FLAG_FREE))
407 {
408 /* Remove the next arena from the free list */
409 ARENA_FREE *pNext = (ARENA_FREE *)((char *)ptr + size);
410 pNext->next->prev = pNext->prev;
411 pNext->prev->next = pNext->next;
412 size += (pNext->size & ARENA_SIZE_MASK) + sizeof(*pNext);
413 if (TRACE_ON(heap))
414 memset( pNext, ARENA_FREE_FILLER, sizeof(ARENA_FREE) );
415 }
416
417 /* Set the next block PREV_FREE flag and pointer */
418
419 last = ((char *)ptr + size >= (char *)subheap + subheap->size);
420 if (!last)
421 {
422 PULONG pNext = (PULONG)((char *)ptr + size);
423 *pNext |= ARENA_FLAG_PREV_FREE;
424 *(ARENA_FREE **)(pNext - 1) = pFree;
425 }
426
427 /* Last, insert the new block into the free list */
428
429 pFree->size = size - sizeof(*pFree);
430 HEAP_InsertFreeBlock( subheap->heap, pFree, last );
431 }
432
433
434 /***********************************************************************
435 * HEAP_MakeInUseBlockFree
436 *
437 * Turn an in-use block into a free block. Can also decommit the end of
438 * the heap, and possibly even free the sub-heap altogether.
439 */
440 static VOID HEAP_MakeInUseBlockFree( SUBHEAP *subheap, ARENA_INUSE *pArena,
441 ULONG flags)
442 {
443 ARENA_FREE *pFree;
444 ULONG size = (pArena->size & ARENA_SIZE_MASK) + sizeof(*pArena);
445 ULONG dummySize = 0;
446
447 /* Check if we can merge with previous block */
448
449 if (pArena->size & ARENA_FLAG_PREV_FREE)
450 {
451 pFree = *((ARENA_FREE **)pArena - 1);
452 size += (pFree->size & ARENA_SIZE_MASK) + sizeof(ARENA_FREE);
453 /* Remove it from the free list */
454 pFree->next->prev = pFree->prev;
455 pFree->prev->next = pFree->next;
456 }
457 else
458 pFree = (ARENA_FREE *)pArena;
459
460 /* Create a free block */
461
462 HEAP_CreateFreeBlock( subheap, pFree, size );
463 size = (pFree->size & ARENA_SIZE_MASK) + sizeof(ARENA_FREE);
464 if ((char *)pFree + size < (char *)subheap + subheap->size)
465 return; /* Not the last block, so nothing more to do */
466
467 /* Free the whole sub-heap if it's empty and not the original one */
468
469 if (((char *)pFree == (char *)subheap + subheap->headerSize) &&
470 (subheap != &subheap->heap->subheap))
471 {
472 SUBHEAP *pPrev = &subheap->heap->subheap;
473 /* Remove the free block from the list */
474 pFree->next->prev = pFree->prev;
475 pFree->prev->next = pFree->next;
476 /* Remove the subheap from the list */
477 while (pPrev && (pPrev->next != subheap))
478 pPrev = pPrev->next;
479 if (pPrev)
480 pPrev->next = subheap->next;
481 /* Free the memory */
482 subheap->magic = 0;
483 ZwFreeVirtualMemory(NtCurrentProcess(),
484 (PVOID*)&subheap,
485 &dummySize,
486 MEM_RELEASE);
487 return;
488 }
489
490 /* Decommit the end of the heap */
491 }
492
493
494 /***********************************************************************
495 * HEAP_ShrinkBlock
496 *
497 * Shrink an in-use block.
498 */
499 static void HEAP_ShrinkBlock(SUBHEAP *subheap, ARENA_INUSE *pArena, ULONG size)
500 {
501 if ((pArena->size & ARENA_SIZE_MASK) >= size + HEAP_MIN_BLOCK_SIZE)
502 {
503 HEAP_CreateFreeBlock( subheap, (char *)(pArena + 1) + size,
504 (pArena->size & ARENA_SIZE_MASK) - size );
505 /* assign size plus previous arena flags */
506 pArena->size = size | (pArena->size & ~ARENA_SIZE_MASK);
507 }
508 else
509 {
510 /* Turn off PREV_FREE flag in next block */
511 char *pNext = (char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK);
512 if (pNext < (char *)subheap + subheap->size)
513 *(PULONG)pNext &= ~ARENA_FLAG_PREV_FREE;
514 }
515 }
516
517 /***********************************************************************
518 * HEAP_InitSubHeap
519 */
520 static BOOLEAN HEAP_InitSubHeap( HEAP *heap, PVOID address, ULONG flags,
521 SIZE_T commitSize, ULONG totalSize,
522 PRTL_HEAP_PARAMETERS Parameters )
523 {
524 SUBHEAP *subheap = (SUBHEAP *)address;
525 FREE_LIST_ENTRY *pEntry;
526 int i;
527 NTSTATUS Status;
528
529 /* Fill the sub-heap structure */
530
531 subheap = (PSUBHEAP)address;
532 subheap->heap = heap;
533 subheap->size = totalSize;
534 subheap->commitSize = commitSize;
535 subheap->magic = SUBHEAP_MAGIC;
536
537 if ( subheap != (SUBHEAP *)heap )
538 {
539 /* If this is a secondary subheap, insert it into list */
540
541 subheap->headerSize = sizeof(SUBHEAP);
542 subheap->next = heap->subheap.next;
543 heap->subheap.next = subheap;
544 }
545 else
546 {
547 /* If this is a primary subheap, initialize main heap */
548
549 subheap->headerSize = sizeof(HEAP);
550 subheap->next = NULL;
551 heap->next = NULL;
552 heap->flags = flags;
553 heap->magic = HEAP_MAGIC;
554 if (Parameters)
555 heap->commitRoutine = Parameters->CommitRoutine;
556 else
557 heap->commitRoutine = NULL;
558
559 /* Build the free lists */
560
561 for (i = 0, pEntry = heap->freeList; i < HEAP_NB_FREE_LISTS; i++, pEntry++)
562 {
563 pEntry->size = HEAP_freeListSizes[i];
564 pEntry->arena.size = 0 | ARENA_FLAG_FREE;
565 pEntry->arena.next = i < HEAP_NB_FREE_LISTS-1 ?
566 &heap->freeList[i+1].arena : &heap->freeList[0].arena;
567 pEntry->arena.prev = i ? &heap->freeList[i-1].arena :
568 &heap->freeList[HEAP_NB_FREE_LISTS-1].arena;
569 pEntry->arena.threadId = 0;
570 pEntry->arena.magic = ARENA_FREE_MAGIC;
571 }
572
573 /* Initialize critical section */
574
575 if (RtlpGetMode() == UserMode)
576 {
577 RtlInitializeHeapLock( &heap->critSection );
578 }
579 }
580
581 /* Commit memory */
582 if (heap->commitRoutine)
583 {
584 if (subheap != (SUBHEAP *)heap)
585 {
586 Status = heap->commitRoutine(heap, &address, &commitSize);
587 }
588 else
589 {
590 /* the caller is responsible for committing the first page! */
591 Status = STATUS_SUCCESS;
592 }
593 }
594 else
595 {
596 Status = ZwAllocateVirtualMemory(NtCurrentProcess(),
597 &address,
598 0,
599 &commitSize,
600 MEM_COMMIT,
601 PAGE_EXECUTE_READWRITE);
602 }
603 if (!NT_SUCCESS(Status))
604 {
605 DPRINT("Could not commit %08lx bytes for sub-heap %p\n",
606 commitSize, address);
607 return FALSE;
608 }
609
610 /* Create the first free block */
611
612 HEAP_CreateFreeBlock( subheap, (PUCHAR)subheap + subheap->headerSize,
613 subheap->size - subheap->headerSize );
614
615 return TRUE;
616 }
617
618 /***********************************************************************
619 * HEAP_CreateSubHeap
620 *
621 * Create a sub-heap of the given size.
622 * If heap == NULL, creates a main heap.
623 */
624 static PSUBHEAP
625 HEAP_CreateSubHeap(PVOID BaseAddress,
626 HEAP *heap, ULONG flags,
627 SIZE_T commitSize, ULONG totalSize,
628 PRTL_HEAP_PARAMETERS Parameters )
629 {
630 PVOID address;
631 NTSTATUS Status;
632
633 /* Round-up sizes on a 64K boundary */
634
635 totalSize = (totalSize + 0xffff) & 0xffff0000;
636 commitSize = (commitSize + 0xffff) & 0xffff0000;
637 if (!commitSize)
638 commitSize = 0x10000;
639 if (totalSize < commitSize)
640 totalSize = commitSize;
641
642 /* Allocate the memory block */
643 address = BaseAddress;
644 if (!address)
645 {
646 Status = ZwAllocateVirtualMemory(NtCurrentProcess(),
647 &address,
648 0,
649 (PULONG)&totalSize,
650 MEM_RESERVE | MEM_COMMIT,
651 PAGE_EXECUTE_READWRITE);
652 if (!NT_SUCCESS(Status))
653 {
654 DPRINT("Could not VirtualAlloc %08lx bytes\n",
655 totalSize );
656 return NULL;
657 }
658 }
659
660 /* Initialize subheap */
661
662 if (!HEAP_InitSubHeap( heap ? heap : (HEAP *)address,
663 address, flags, commitSize, totalSize,
664 Parameters ))
665 {
666 if (!BaseAddress)
667 {
668 ULONG dummySize = 0;
669 ZwFreeVirtualMemory(NtCurrentProcess(),
670 &address,
671 &dummySize,
672 MEM_RELEASE);
673 }
674 return NULL;
675 }
676
677 return (SUBHEAP *)address;
678 }
679
680
681 /***********************************************************************
682 * HEAP_FindFreeBlock
683 *
684 * Find a free block at least as large as the requested size, and make sure
685 * the requested size is committed.
686 */
687 static ARENA_FREE *HEAP_FindFreeBlock( HEAP *heap, SIZE_T size,
688 SUBHEAP **ppSubHeap )
689 {
690 SUBHEAP *subheap;
691 ARENA_FREE *pArena;
692 FREE_LIST_ENTRY *pEntry = heap->freeList;
693
694 /* Find a suitable free list, and in it find a block large enough */
695
696 while (pEntry->size < size)
697 pEntry++;
698 pArena = pEntry->arena.next;
699 while (pArena != &heap->freeList[0].arena)
700 {
701 ULONG arena_size = (pArena->size & ARENA_SIZE_MASK) +
702 sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
703 if (arena_size >= size)
704 {
705 subheap = HEAP_FindSubHeap( heap, pArena );
706 if (!HEAP_Commit( subheap, (char *)pArena + sizeof(ARENA_INUSE)
707 + size + HEAP_MIN_BLOCK_SIZE,
708 heap->flags))
709 return NULL;
710 *ppSubHeap = subheap;
711 return pArena;
712 }
713
714 pArena = pArena->next;
715 }
716
717 /* If no block was found, attempt to grow the heap */
718
719 if (!(heap->flags & HEAP_GROWABLE))
720 {
721 DPRINT("Not enough space in heap %p for %08lx bytes\n",
722 heap, size );
723 return NULL;
724 }
725 /* make sure that we have a big enough size *committed* to fit another
726 * last free arena in !
727 * So just one heap struct, one first free arena which will eventually
728 * get inuse, and HEAP_MIN_BLOCK_SIZE for the second free arena that
729 * might get assigned all remaining free space in HEAP_ShrinkBlock() */
730 size += sizeof(SUBHEAP) + sizeof(ARENA_FREE) + HEAP_MIN_BLOCK_SIZE;
731 if (!(subheap = HEAP_CreateSubHeap( NULL, heap, heap->flags, size,
732 max( HEAP_DEF_SIZE, size ), NULL )))
733 return NULL;
734
735 DPRINT("created new sub-heap %p of %08lx bytes for heap %p\n",
736 subheap, size, heap );
737
738 *ppSubHeap = subheap;
739 return (ARENA_FREE *)(subheap + 1);
740 }
741
742
743 /***********************************************************************
744 * HEAP_IsValidArenaPtr
745 *
746 * Check that the pointer is inside the range possible for arenas.
747 */
748 static BOOLEAN HEAP_IsValidArenaPtr( HEAP *heap, PVOID ptr )
749 {
750 int i;
751 SUBHEAP *subheap = HEAP_FindSubHeap( heap, ptr );
752 if (!subheap)
753 return FALSE;
754 if ((char *)ptr >= (char *)subheap + subheap->headerSize)
755 return TRUE;
756 if (subheap != &heap->subheap)
757 return FALSE;
758 for (i = 0; i < HEAP_NB_FREE_LISTS; i++)
759 if (ptr == (PVOID)&heap->freeList[i].arena)
760 return TRUE;
761 return FALSE;
762 }
763
764
765 /***********************************************************************
766 * HEAP_ValidateFreeArena
767 */
768 static BOOLEAN HEAP_ValidateFreeArena( SUBHEAP *subheap, ARENA_FREE *pArena )
769 {
770 char *heapEnd = (char *)subheap + subheap->size;
771
772 /* Check magic number */
773 if (pArena->magic != ARENA_FREE_MAGIC)
774 {
775 DPRINT("Heap %p: invalid free arena magic for %p\n",
776 subheap->heap, pArena);
777 return FALSE;
778 }
779 /* Check size flags */
780 if (!(pArena->size & ARENA_FLAG_FREE) ||
781 (pArena->size & ARENA_FLAG_PREV_FREE))
782 {
783 DPRINT("Heap %p: bad flags %lx for free arena %p\n",
784 subheap->heap, pArena->size & ~ARENA_SIZE_MASK, pArena);
785 }
786 /* Check arena size */
787 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) > heapEnd)
788 {
789 DPRINT("Heap %p: bad size %08lx for free arena %p\n",
790 subheap->heap, (ULONG)pArena->size & ARENA_SIZE_MASK, pArena );
791 return FALSE;
792 }
793 /* Check that next pointer is valid */
794 if (!HEAP_IsValidArenaPtr( subheap->heap, pArena->next ))
795 {
796 DPRINT("Heap %p: bad next ptr %p for arena %p\n",
797 subheap->heap, pArena->next, pArena);
798 return FALSE;
799 }
800 /* Check that next arena is free */
801 if (!(pArena->next->size & ARENA_FLAG_FREE) ||
802 (pArena->next->magic != ARENA_FREE_MAGIC))
803 {
804 DPRINT("Heap %p: next arena %p invalid for %p\n",
805 subheap->heap, pArena->next, pArena);
806 return FALSE;
807 }
808 /* Check that prev pointer is valid */
809 if (!HEAP_IsValidArenaPtr( subheap->heap, pArena->prev ))
810 {
811 DPRINT("Heap %p: bad prev ptr %p for arena %p\n",
812 subheap->heap, pArena->prev, pArena);
813 return FALSE;
814 }
815 /* Check that prev arena is free */
816 if (!(pArena->prev->size & ARENA_FLAG_FREE) ||
817 (pArena->prev->magic != ARENA_FREE_MAGIC))
818 {
819 DPRINT("Heap %p: prev arena %p invalid for %p\n",
820 subheap->heap, pArena->prev, pArena);
821 return FALSE;
822 }
823 /* Check that next block has PREV_FREE flag */
824 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) < heapEnd)
825 {
826 if (!(*(PULONG)((char *)(pArena + 1) +
827 (pArena->size & ARENA_SIZE_MASK)) & ARENA_FLAG_PREV_FREE))
828 {
829 DPRINT("Heap %p: free arena %p next block has no PREV_FREE flag\n",
830 subheap->heap, pArena);
831 return FALSE;
832 }
833 /* Check next block back pointer */
834 if (*((ARENA_FREE **)((char *)(pArena + 1) +
835 (pArena->size & ARENA_SIZE_MASK)) - 1) != pArena)
836 {
837 DPRINT("Heap %p: arena %p has wrong back ptr %lx\n",
838 subheap->heap, pArena,
839 *((PULONG)((char *)(pArena+1)+ (pArena->size & ARENA_SIZE_MASK)) - 1));
840 return FALSE;
841 }
842 }
843 return TRUE;
844 }
845
846
847 /***********************************************************************
848 * HEAP_ValidateInUseArena
849 */
850 static BOOLEAN HEAP_ValidateInUseArena( SUBHEAP *subheap, ARENA_INUSE *pArena, BOOLEAN quiet )
851 {
852 char *heapEnd = (char *)subheap + subheap->size;
853
854 /* Check magic number */
855 if (pArena->magic != ARENA_INUSE_MAGIC)
856 {
857 if (quiet == NOISY)
858 {
859 DPRINT("Heap %p: invalid in-use arena magic for %p\n",
860 subheap->heap, pArena);
861 if (TRACE_ON(heap))
862 HEAP_Dump( subheap->heap );
863 }
864 else if (WARN_ON(heap))
865 {
866 DPRINT("Heap %p: invalid in-use arena magic for %p\n",
867 subheap->heap, pArena);
868 if (TRACE_ON(heap))
869 HEAP_Dump( subheap->heap );
870 }
871 return FALSE;
872 }
873 /* Check size flags */
874 if (pArena->size & ARENA_FLAG_FREE)
875 {
876 DPRINT("Heap %p: bad flags %lx for in-use arena %p\n",
877 subheap->heap, pArena->size & ~ARENA_SIZE_MASK, pArena);
878 return FALSE;
879 }
880 /* Check arena size */
881 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) > heapEnd)
882 {
883 DPRINT("Heap %p: bad size %08lx for in-use arena %p\n",
884 subheap->heap, (ULONG)pArena->size & ARENA_SIZE_MASK, pArena);
885 return FALSE;
886 }
887 /* Check next arena PREV_FREE flag */
888 if (((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) < heapEnd) &&
889 (*(PULONG)((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK)) & ARENA_FLAG_PREV_FREE))
890 {
891 DPRINT("Heap %p: in-use arena %p next block has PREV_FREE flag\n",
892 subheap->heap, pArena);
893 return FALSE;
894 }
895 /* Check prev free arena */
896 if (pArena->size & ARENA_FLAG_PREV_FREE)
897 {
898 ARENA_FREE *pPrev = *((ARENA_FREE **)pArena - 1);
899 /* Check prev pointer */
900 if (!HEAP_IsValidArenaPtr( subheap->heap, pPrev ))
901 {
902 DPRINT("Heap %p: bad back ptr %p for arena %p\n",
903 subheap->heap, pPrev, pArena );
904 return FALSE;
905 }
906 /* Check that prev arena is free */
907 if (!(pPrev->size & ARENA_FLAG_FREE) ||
908 (pPrev->magic != ARENA_FREE_MAGIC))
909 {
910 DPRINT("Heap %p: prev arena %p invalid for in-use %p\n",
911 subheap->heap, pPrev, pArena);
912 return FALSE;
913 }
914 /* Check that prev arena is really the previous block */
915 if ((char *)(pPrev + 1) + (pPrev->size & ARENA_SIZE_MASK) != (char *)pArena)
916 {
917 DPRINT("Heap %p: prev arena %p is not prev for in-use %p\n",
918 subheap->heap, pPrev, pArena );
919 return FALSE;
920 }
921 }
922 return TRUE;
923 }
924
925
926 /***********************************************************************
927 * HEAP_IsInsideHeap
928 * Checks whether the pointer points to a block inside a given heap.
929 *
930 * NOTES
931 * Should this return BOOL32?
932 *
933 * RETURNS
934 * !0: Success
935 * 0: Failure
936 */
937 int HEAP_IsInsideHeap(
938 HANDLE heap, /* [in] Heap */
939 ULONG flags, /* [in] Flags */
940 PVOID ptr /* [in] Pointer */
941 )
942 {
943 HEAP *heapPtr = HEAP_GetPtr( heap );
944 SUBHEAP *subheap;
945 int ret;
946
947 /* Validate the parameters */
948
949 if (!heapPtr)
950 return 0;
951 flags |= heapPtr->flags;
952 if (!(flags & HEAP_NO_SERIALIZE))
953 RtlEnterHeapLock( &heapPtr->critSection );
954 ret = (((subheap = HEAP_FindSubHeap( heapPtr, ptr )) != NULL) &&
955 (((char *)ptr >= (char *)subheap + subheap->headerSize
956 + sizeof(ARENA_INUSE))));
957 if (!(flags & HEAP_NO_SERIALIZE))
958 RtlLeaveHeapLock( &heapPtr->critSection );
959 return ret;
960 }
961
962
963 void DumpStackFrames ( PULONG Frame, ULONG FrameCount )
964 {
965 ULONG i=0;
966
967 DbgPrint("Frames: ");
968 if ( !Frame )
969 {
970 #if defined __GNUC__
971 __asm__("mov %%ebp, %0" : "=r" (Frame) : );
972 #elif defined(_MSC_VER)
973 __asm mov [Frame], ebp
974 #endif
975 Frame = (PULONG)Frame[0]; // step out of DumpStackFrames
976 }
977 while ( Frame != 0 && (ULONG)Frame != 0xDEADBEEF && (ULONG)Frame != 0xcdcdcdcd && (ULONG)Frame != 0xcccccccc && i++ < FrameCount )
978 {
979 DbgPrint("<%p>", (PVOID)Frame[1]);
980 if (Frame[1] == 0xdeadbeef)
981 break;
982 Frame = (PULONG)Frame[0];
983 DbgPrint(" ");
984 }
985 DbgPrint("\n");
986 }
987
988 /***********************************************************************
989 * HEAP_IsRealArena [Internal]
990 * Validates a block is a valid arena.
991 *
992 * RETURNS
993 * TRUE: Success
994 * FALSE: Failure
995 */
996 static BOOLEAN HEAP_IsRealArena(
997 HANDLE heap, /* [in] Handle to the heap */
998 ULONG flags, /* [in] Bit flags that control access during operation */
999 PVOID block, /* [in] Optional pointer to memory block to validate */
1000 BOOLEAN quiet /* [in] Flag - if true, HEAP_ValidateInUseArena
1001 * does not complain */
1002 )
1003 {
1004 SUBHEAP *subheap;
1005 HEAP *heapPtr = (HEAP *)(heap);
1006 BOOLEAN ret = TRUE;
1007
1008 if (!heapPtr || (heapPtr->magic != HEAP_MAGIC))
1009 {
1010 DPRINT("Invalid heap %p!\n", heap );
1011 return FALSE;
1012 }
1013
1014 flags &= HEAP_NO_SERIALIZE;
1015 flags |= heapPtr->flags;
1016 /* calling HeapLock may result in infinite recursion, so do the critsect directly */
1017 if (!(flags & HEAP_NO_SERIALIZE))
1018 RtlEnterHeapLock( &heapPtr->critSection );
1019
1020 if (block)
1021 {
1022 /* Only check this single memory block */
1023
1024 /* The following code is really HEAP_IsInsideHeap *
1025 * with serialization already done. */
1026 if (!(subheap = HEAP_FindSubHeap( heapPtr, block )) ||
1027 ((char *)block < (char *)subheap + subheap->headerSize
1028 + sizeof(ARENA_INUSE)))
1029 {
1030 if (quiet == NOISY)
1031 {
1032 DPRINT("Heap %p: block %p is not inside heap\n",
1033 heap, block );
1034 DumpStackFrames(NULL,10);
1035 }
1036 else if (WARN_ON(heap))
1037 {
1038 DPRINT1("Heap %p: block %p is not inside heap\n",
1039 heap, block );
1040 DumpStackFrames(NULL,10);
1041 }
1042 ret = FALSE;
1043 }
1044 else
1045 ret = HEAP_ValidateInUseArena( subheap, (ARENA_INUSE *)block - 1, quiet );
1046
1047 if (!(flags & HEAP_NO_SERIALIZE))
1048 RtlLeaveHeapLock( &heapPtr->critSection );
1049 return ret;
1050 }
1051
1052 subheap = &heapPtr->subheap;
1053 while (subheap && ret)
1054 {
1055 char *ptr = (char *)subheap + subheap->headerSize;
1056 while (ptr < (char *)subheap + subheap->size)
1057 {
1058 if (*(PULONG)ptr & ARENA_FLAG_FREE)
1059 {
1060 if (!HEAP_ValidateFreeArena( subheap, (ARENA_FREE *)ptr ))
1061 {
1062 ret = FALSE;
1063 break;
1064 }
1065 ptr += sizeof(ARENA_FREE) + (*(PULONG)ptr & ARENA_SIZE_MASK);
1066 }
1067 else
1068 {
1069 if (!HEAP_ValidateInUseArena( subheap, (ARENA_INUSE *)ptr, NOISY ))
1070 {
1071 ret = FALSE;
1072 break;
1073 }
1074 ptr += sizeof(ARENA_INUSE) + (*(PULONG)ptr & ARENA_SIZE_MASK);
1075 }
1076 }
1077 subheap = subheap->next;
1078 }
1079
1080 if (!(flags & HEAP_NO_SERIALIZE))
1081 RtlLeaveHeapLock( &heapPtr->critSection );
1082 return ret;
1083 }
1084
1085
1086 /***********************************************************************
1087 * HeapCreate (KERNEL32.336)
1088 * RETURNS
1089 * Handle of heap: Success
1090 * NULL: Failure
1091 *
1092 * @implemented
1093 */
1094 HANDLE NTAPI
1095 RtlCreateHeap(ULONG flags,
1096 PVOID BaseAddress,
1097 SIZE_T maxSize,
1098 SIZE_T initialSize,
1099 PVOID Lock,
1100 PRTL_HEAP_PARAMETERS Parameters)
1101 {
1102 SUBHEAP *subheap;
1103 HEAP *heapPtr;
1104
1105 /* Allocate the heap block */
1106
1107 if (!maxSize)
1108 {
1109 maxSize = HEAP_DEF_SIZE;
1110 flags |= HEAP_GROWABLE;
1111 }
1112 if (!(subheap = HEAP_CreateSubHeap( BaseAddress, NULL, flags, initialSize,
1113 maxSize, Parameters )))
1114 {
1115 return 0;
1116 }
1117
1118 if (RtlpGetMode() == UserMode)
1119 {
1120 /* link it into the per-process heap list */
1121 RtlEnterHeapLock (&RtlpProcessHeapsListLock);
1122
1123 heapPtr = subheap->heap;
1124 heapPtr->next = (HEAP*)NtCurrentPeb()->ProcessHeaps;
1125 NtCurrentPeb()->ProcessHeaps = (HANDLE)heapPtr;
1126 NtCurrentPeb()->NumberOfHeaps++;
1127
1128 RtlLeaveHeapLock (&RtlpProcessHeapsListLock);
1129 }
1130
1131 return (HANDLE)subheap;
1132 }
1133
1134 /***********************************************************************
1135 * HeapDestroy (KERNEL32.337)
1136 * RETURNS
1137 * TRUE: Success
1138 * FALSE: Failure
1139 *
1140 * @implemented
1141 *
1142 * RETURNS
1143 * Success: A NULL HANDLE, if heap is NULL or it was destroyed
1144 * Failure: The Heap handle, if heap is the process heap.
1145 */
1146 HANDLE NTAPI
1147 RtlDestroyHeap(HANDLE heap) /* [in] Handle of heap */
1148 {
1149 HEAP *heapPtr = HEAP_GetPtr( heap );
1150 SUBHEAP *subheap;
1151 ULONG flags;
1152 HEAP **pptr;
1153
1154 DPRINT("%p\n", heap );
1155 if (!heapPtr)
1156 return heap;
1157
1158 if (RtlpGetMode() == UserMode)
1159 {
1160 if (heap == NtCurrentPeb()->ProcessHeap)
1161 return heap; /* cannot delete the main process heap */
1162
1163 /* remove it from the per-process list */
1164 RtlEnterHeapLock (&RtlpProcessHeapsListLock);
1165
1166 pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps;
1167 while (*pptr && *pptr != heapPtr) pptr = &(*pptr)->next;
1168 if (*pptr) *pptr = (*pptr)->next;
1169 NtCurrentPeb()->NumberOfHeaps--;
1170
1171 RtlLeaveHeapLock (&RtlpProcessHeapsListLock);
1172 }
1173
1174 RtlDeleteHeapLock( &heapPtr->critSection );
1175 subheap = &heapPtr->subheap;
1176 // We must save the flags. The first subheap is located after
1177 // the heap structure. If we release the first subheap,
1178 // we release also the heap structure.
1179 flags = heapPtr->flags;
1180 while (subheap)
1181 {
1182 SUBHEAP *next = subheap->next;
1183 ULONG dummySize = 0;
1184 ZwFreeVirtualMemory(NtCurrentProcess(),
1185 (PVOID*)&subheap,
1186 &dummySize,
1187 MEM_RELEASE);
1188 subheap = next;
1189 }
1190 return (HANDLE)NULL;
1191 }
1192
1193
1194 /***********************************************************************
1195 * HeapAlloc (KERNEL32.334)
1196 * RETURNS
1197 * Pointer to allocated memory block
1198 * NULL: Failure
1199 * 0x7d030f60--invalid flags in RtlHeapAllocate
1200 * @implemented
1201 */
1202 PVOID NTAPI
1203 RtlAllocateHeap(HANDLE heap, /* [in] Handle of private heap block */
1204 ULONG flags, /* [in] Heap allocation control flags */
1205 ULONG size) /* [in] Number of bytes to allocate */
1206 {
1207 ARENA_FREE *pArena;
1208 ARENA_INUSE *pInUse;
1209 SUBHEAP *subheap;
1210 HEAP *heapPtr = HEAP_GetPtr( heap );
1211
1212 /* Validate the parameters */
1213
1214 if (!heapPtr)
1215 {
1216 if (flags & HEAP_GENERATE_EXCEPTIONS)
1217 RtlRaiseStatus( STATUS_NO_MEMORY );
1218 return NULL;
1219 }
1220 //flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY;
1221 flags |= heapPtr->flags;
1222 if (!(flags & HEAP_NO_SERIALIZE))
1223 RtlEnterHeapLock( &heapPtr->critSection );
1224 size = (size + 7) & ~7;
1225 if (size < HEAP_MIN_BLOCK_SIZE)
1226 size = HEAP_MIN_BLOCK_SIZE;
1227
1228 /* Locate a suitable free block */
1229
1230 if (!(pArena = HEAP_FindFreeBlock( heapPtr, size, &subheap )))
1231 {
1232 DPRINT("(%p,%08lx,%08lx): returning NULL\n",
1233 heap, flags, size );
1234 if (!(flags & HEAP_NO_SERIALIZE))
1235 RtlLeaveHeapLock( &heapPtr->critSection );
1236 if (flags & HEAP_GENERATE_EXCEPTIONS)
1237 RtlRaiseStatus( STATUS_NO_MEMORY );
1238 return NULL;
1239 }
1240
1241 /* Remove the arena from the free list */
1242
1243 pArena->next->prev = pArena->prev;
1244 pArena->prev->next = pArena->next;
1245
1246 /* Build the in-use arena */
1247
1248 pInUse = (ARENA_INUSE *)pArena;
1249 pInUse->size = (pInUse->size & ~ARENA_FLAG_FREE)
1250 + sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
1251 pInUse->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
1252 pInUse->magic = ARENA_INUSE_MAGIC;
1253
1254 /* Save user flags */
1255 subheap->UserFlags = flags & HEAP_SETTABLE_USER_FLAGS;
1256
1257 /* Shrink the block */
1258
1259 HEAP_ShrinkBlock( subheap, pInUse, size );
1260
1261 if (flags & HEAP_ZERO_MEMORY)
1262 memset( pInUse + 1, 0, pInUse->size & ARENA_SIZE_MASK );
1263 else if (TRACE_ON(heap))
1264 memset( pInUse + 1, ARENA_INUSE_FILLER, pInUse->size & ARENA_SIZE_MASK );
1265
1266 if (!(flags & HEAP_NO_SERIALIZE))
1267 RtlLeaveHeapLock( &heapPtr->critSection );
1268
1269 DPRINT("(%p,%08lx,%08lx): returning %p\n",
1270 heap, flags, size, (PVOID)(pInUse + 1) );
1271 return (PVOID)(pInUse + 1);
1272 }
1273
1274
1275 /***********************************************************************
1276 * HeapFree (KERNEL32.338)
1277 * RETURNS
1278 * TRUE: Success
1279 * FALSE: Failure
1280 *
1281 * @implemented
1282 */
1283 BOOLEAN NTAPI RtlFreeHeap(
1284 HANDLE heap, /* [in] Handle of heap */
1285 ULONG flags, /* [in] Heap freeing flags */
1286 PVOID ptr /* [in] Address of memory to free */
1287 )
1288 {
1289 ARENA_INUSE *pInUse;
1290 SUBHEAP *subheap;
1291 HEAP *heapPtr = HEAP_GetPtr( heap );
1292
1293 /* Validate the parameters */
1294
1295 if (!heapPtr)
1296 return FALSE;
1297 if (!ptr) /* Freeing a NULL ptr is doesn't indicate an error in Win2k */
1298 {
1299 DPRINT("(%p,%08lx,%p): asked to free NULL\n",
1300 heap, flags, ptr );
1301 return TRUE;
1302 }
1303
1304 flags &= HEAP_NO_SERIALIZE;
1305 flags |= heapPtr->flags;
1306 if (!(flags & HEAP_NO_SERIALIZE))
1307 RtlEnterHeapLock( &heapPtr->critSection );
1308 if (!HEAP_IsRealArena( heap, HEAP_NO_SERIALIZE, ptr, QUIET ))
1309 {
1310 if (!(flags & HEAP_NO_SERIALIZE))
1311 RtlLeaveHeapLock( &heapPtr->critSection );
1312 DPRINT("(%p,%08lx,%p): returning FALSE\n",
1313 heap, flags, ptr );
1314 return FALSE;
1315 }
1316
1317 /* Turn the block into a free block */
1318
1319 pInUse = (ARENA_INUSE *)ptr - 1;
1320 subheap = HEAP_FindSubHeap( heapPtr, pInUse );
1321 HEAP_MakeInUseBlockFree( subheap, pInUse, heapPtr->flags );
1322
1323 if (!(flags & HEAP_NO_SERIALIZE))
1324 RtlLeaveHeapLock( &heapPtr->critSection );
1325
1326 DPRINT("(%p,%08lx,%p): returning TRUE\n",
1327 heap, flags, ptr );
1328 return TRUE;
1329 }
1330
1331
1332 /***********************************************************************
1333 * RtlReAllocateHeap
1334 * PARAMS
1335 * Heap [in] Handle of heap block
1336 * Flags [in] Heap reallocation flags
1337 * Ptr, [in] Address of memory to reallocate
1338 * Size [in] Number of bytes to reallocate
1339 *
1340 * RETURNS
1341 * Pointer to reallocated memory block
1342 * NULL: Failure
1343 * 0x7d030f60--invalid flags in RtlHeapAllocate
1344 * @implemented
1345 */
1346 PVOID NTAPI RtlReAllocateHeap(
1347 HANDLE Heap,
1348 ULONG Flags,
1349 PVOID Ptr,
1350 ULONG Size
1351 )
1352 {
1353 ARENA_INUSE *pArena;
1354 ULONG oldSize;
1355 HEAP *heapPtr;
1356 SUBHEAP *subheap;
1357
1358 if (!Ptr)
1359 return FALSE;
1360 if (!(heapPtr = HEAP_GetPtr( Heap )))
1361 return FALSE;
1362
1363 /* Validate the parameters */
1364
1365 //Flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY |
1366 // HEAP_REALLOC_IN_PLACE_ONLY;
1367 Flags |= heapPtr->flags;
1368 Size = (Size + 7) & ~7;
1369 if (Size < HEAP_MIN_BLOCK_SIZE)
1370 Size = HEAP_MIN_BLOCK_SIZE;
1371
1372 if (!(Flags & HEAP_NO_SERIALIZE))
1373 RtlEnterHeapLock( &heapPtr->critSection );
1374 if (!HEAP_IsRealArena( Heap, HEAP_NO_SERIALIZE, Ptr, QUIET ))
1375 {
1376 if (!(Flags & HEAP_NO_SERIALIZE))
1377 RtlLeaveHeapLock( &heapPtr->critSection );
1378 DPRINT("(%p,%08lx,%p,%08lx): returning NULL\n",
1379 Heap, Flags, Ptr, Size );
1380 if (Flags & HEAP_GENERATE_EXCEPTIONS)
1381 RtlRaiseStatus( STATUS_NO_MEMORY );
1382 return NULL;
1383 }
1384
1385 /* Check if we need to grow the block */
1386
1387 pArena = (ARENA_INUSE *)Ptr - 1;
1388 pArena->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
1389
1390 subheap = HEAP_FindSubHeap( heapPtr, pArena );
1391 oldSize = (pArena->size & ARENA_SIZE_MASK);
1392 if (Size > oldSize)
1393 {
1394 char *pNext = (char *)(pArena + 1) + oldSize;
1395 if ((pNext < (char *)subheap + subheap->size) &&
1396 (*(PULONG)pNext & ARENA_FLAG_FREE) &&
1397 (oldSize + (*(PULONG)pNext & ARENA_SIZE_MASK) + sizeof(ARENA_FREE) >= Size))
1398 {
1399 /* The next block is free and large enough */
1400 ARENA_FREE *pFree = (ARENA_FREE *)pNext;
1401 pFree->next->prev = pFree->prev;
1402 pFree->prev->next = pFree->next;
1403 pArena->size += (pFree->size & ARENA_SIZE_MASK) + sizeof(*pFree);
1404 if (!HEAP_Commit( subheap, (char *)pArena + sizeof(ARENA_INUSE)
1405 + Size + HEAP_MIN_BLOCK_SIZE,
1406 heapPtr->flags))
1407 {
1408 if (!(Flags & HEAP_NO_SERIALIZE))
1409 RtlLeaveHeapLock( &heapPtr->critSection );
1410 if (Flags & HEAP_GENERATE_EXCEPTIONS)
1411 RtlRaiseStatus( STATUS_NO_MEMORY );
1412 return NULL;
1413 }
1414 HEAP_ShrinkBlock( subheap, pArena, Size );
1415 }
1416 else /* Do it the hard way */
1417 {
1418 ARENA_FREE *pNew;
1419 ARENA_INUSE *pInUse;
1420 SUBHEAP *newsubheap;
1421
1422 if ((Flags & HEAP_REALLOC_IN_PLACE_ONLY) ||
1423 !(pNew = HEAP_FindFreeBlock( heapPtr, Size, &newsubheap )))
1424 {
1425 if (!(Flags & HEAP_NO_SERIALIZE))
1426 RtlLeaveHeapLock( &heapPtr->critSection );
1427 if (Flags & HEAP_GENERATE_EXCEPTIONS)
1428 RtlRaiseStatus( STATUS_NO_MEMORY );
1429 return NULL;
1430 }
1431
1432 /* Build the in-use arena */
1433
1434 pNew->next->prev = pNew->prev;
1435 pNew->prev->next = pNew->next;
1436 pInUse = (ARENA_INUSE *)pNew;
1437 pInUse->size = (pInUse->size & ~ARENA_FLAG_FREE)
1438 + sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
1439 pInUse->threadId = (ULONG)NtCurrentTeb()->Cid.UniqueThread;
1440 pInUse->magic = ARENA_INUSE_MAGIC;
1441 HEAP_ShrinkBlock( newsubheap, pInUse, Size );
1442 memcpy( pInUse + 1, pArena + 1, oldSize );
1443
1444 /* Free the previous block */
1445
1446 HEAP_MakeInUseBlockFree( subheap, pArena, Flags );
1447 subheap = newsubheap;
1448 pArena = pInUse;
1449 }
1450 }
1451 else
1452 HEAP_ShrinkBlock( subheap, pArena, Size ); /* Shrink the block */
1453
1454 /* Clear the extra bytes if needed */
1455
1456 if (Size > oldSize)
1457 {
1458 if (Flags & HEAP_ZERO_MEMORY)
1459 memset( (char *)(pArena + 1) + oldSize, 0,
1460 (pArena->size & ARENA_SIZE_MASK) - oldSize );
1461 else if (TRACE_ON(heap))
1462 memset( (char *)(pArena + 1) + oldSize, ARENA_INUSE_FILLER,
1463 (pArena->size & ARENA_SIZE_MASK) - oldSize );
1464 }
1465
1466 /* Return the new arena */
1467
1468 if (!(Flags & HEAP_NO_SERIALIZE))
1469 RtlLeaveHeapLock( &heapPtr->critSection );
1470
1471 DPRINT("(%p,%08lx,%p,%08lx): returning %p\n",
1472 Heap, Flags, Ptr, Size, (PVOID)(pArena + 1) );
1473 return (PVOID)(pArena + 1);
1474 }
1475
1476
1477 /***********************************************************************
1478 * RtlCompactHeap
1479 *
1480 * @unimplemented
1481 */
1482 ULONG NTAPI
1483 RtlCompactHeap(HANDLE Heap,
1484 ULONG Flags)
1485 {
1486 UNIMPLEMENTED;
1487 return 0;
1488 }
1489
1490
1491 /***********************************************************************
1492 * RtlLockHeap
1493 * Attempts to acquire the critical section object for a specified heap.
1494 *
1495 * PARAMS
1496 * Heap [in] Handle of heap to lock for exclusive access
1497 *
1498 * RETURNS
1499 * TRUE: Success
1500 * FALSE: Failure
1501 *
1502 * @implemented
1503 */
1504 BOOLEAN NTAPI
1505 RtlLockHeap(IN HANDLE Heap)
1506 {
1507 HEAP *heapPtr = HEAP_GetPtr( Heap );
1508 if (!heapPtr)
1509 return FALSE;
1510 RtlEnterHeapLock( &heapPtr->critSection );
1511 return TRUE;
1512 }
1513
1514
1515 /***********************************************************************
1516 * RtlUnlockHeap
1517 * Releases ownership of the critical section object.
1518 *
1519 * PARAMS
1520 * Heap [in] Handle to the heap to unlock
1521 *
1522 * RETURNS
1523 * TRUE: Success
1524 * FALSE: Failure
1525 *
1526 * @implemented
1527 */
1528 BOOLEAN NTAPI
1529 RtlUnlockHeap(HANDLE Heap)
1530 {
1531 HEAP *heapPtr = HEAP_GetPtr( Heap );
1532 if (!heapPtr)
1533 return FALSE;
1534 RtlLeaveHeapLock( &heapPtr->critSection );
1535 return TRUE;
1536 }
1537
1538
1539 /***********************************************************************
1540 * RtlSizeHeap
1541 * PARAMS
1542 * Heap [in] Handle of heap
1543 * Flags [in] Heap size control flags
1544 * Ptr [in] Address of memory to return size for
1545 *
1546 * RETURNS
1547 * Size in bytes of allocated memory
1548 * 0xffffffff: Failure
1549 *
1550 * @implemented
1551 */
1552 ULONG NTAPI
1553 RtlSizeHeap(
1554 HANDLE Heap,
1555 ULONG Flags,
1556 PVOID Ptr
1557 )
1558 {
1559 ULONG ret;
1560 HEAP *heapPtr = HEAP_GetPtr( Heap );
1561
1562 if (!heapPtr)
1563 return 0;
1564 Flags &= HEAP_NO_SERIALIZE;
1565 Flags |= heapPtr->flags;
1566 if (!(Flags & HEAP_NO_SERIALIZE))
1567 RtlEnterHeapLock( &heapPtr->critSection );
1568 if (!HEAP_IsRealArena( Heap, HEAP_NO_SERIALIZE, Ptr, QUIET ))
1569 {
1570 ret = 0xffffffff;
1571 }
1572 else
1573 {
1574 ARENA_INUSE *pArena = (ARENA_INUSE *)Ptr - 1;
1575 ret = pArena->size & ARENA_SIZE_MASK;
1576 }
1577 if (!(Flags & HEAP_NO_SERIALIZE))
1578 RtlLeaveHeapLock( &heapPtr->critSection );
1579
1580 DPRINT("(%p,%08lx,%p): returning %08lx\n",
1581 Heap, Flags, Ptr, ret );
1582 return ret;
1583 }
1584
1585
1586 /***********************************************************************
1587 * RtlValidateHeap
1588 * Validates a specified heap.
1589 *
1590 * PARAMS
1591 * Heap [in] Handle to the heap
1592 * Flags [in] Bit flags that control access during operation
1593 * Block [in] Optional pointer to memory block to validate
1594 *
1595 * NOTES
1596 * Flags is ignored.
1597 *
1598 * RETURNS
1599 * TRUE: Success
1600 * FALSE: Failure
1601 *
1602 * @implemented
1603 */
1604 BOOLEAN NTAPI RtlValidateHeap(
1605 HANDLE Heap,
1606 ULONG Flags,
1607 PVOID Block
1608 )
1609 {
1610 HEAP *heapPtr = HEAP_GetPtr( Heap );
1611 if (!heapPtr)
1612 return FALSE;
1613 return HEAP_IsRealArena( heapPtr, Flags, Block, QUIET );
1614 }
1615
1616
1617 /***********************************************************************
1618 * HeapWalk (KERNEL32.344)
1619 * Enumerates the memory blocks in a specified heap.
1620 * See HEAP_Dump() for info on heap structure.
1621 *
1622 * TODO
1623 * - handling of PROCESS_HEAP_ENTRY_MOVEABLE and
1624 * PROCESS_HEAP_ENTRY_DDESHARE (needs heap.c support)
1625 *
1626 * RETURNS
1627 * TRUE: Success
1628 * FALSE: Failure
1629 */
1630 #if 0
1631 BOOLEAN NTAPI HeapWalk(
1632 HANDLE heap, /* [in] Handle to heap to enumerate */
1633 LPPROCESS_HEAP_ENTRY entry /* [out] Pointer to structure of enumeration info */
1634 )
1635 {
1636 HEAP *heapPtr = HEAP_GetPtr(heap);
1637 SUBHEAP *sub, *currentheap = NULL;
1638 BOOLEAN ret = FALSE;
1639 char *ptr;
1640 int region_index = 0;
1641
1642 if (!heapPtr || !entry)
1643 {
1644 return FALSE;
1645 }
1646
1647 if (!(heapPtr->flags & HEAP_NO_SERIALIZE))
1648 RtlEnterHeapLock( &heapPtr->critSection );
1649
1650 /* set ptr to the next arena to be examined */
1651
1652 if (!entry->lpData) /* first call (init) ? */
1653 {
1654 DPRINT("begin walking of heap 0x%08x.\n", heap);
1655 /*HEAP_Dump(heapPtr);*/
1656 currentheap = &heapPtr->subheap;
1657 ptr = (char*)currentheap + currentheap->headerSize;
1658 }
1659 else
1660 {
1661 ptr = entry->lpData;
1662 sub = &heapPtr->subheap;
1663 while (sub)
1664 {
1665 if (((char *)ptr >= (char *)sub) &&
1666 ((char *)ptr < (char *)sub + sub->size))
1667 {
1668 currentheap = sub;
1669 break;
1670 }
1671 sub = sub->next;
1672 region_index++;
1673 }
1674 if (currentheap == NULL)
1675 {
1676 DPRINT("no matching subheap found, shouldn't happen !\n");
1677 goto HW_end;
1678 }
1679
1680 ptr += entry->cbData; /* point to next arena */
1681 if (ptr > (char *)currentheap + currentheap->size - 1)
1682 { /* proceed with next subheap */
1683 if (!(currentheap = currentheap->next))
1684 { /* successfully finished */
1685 DPRINT("end reached.\n");
1686 goto HW_end;
1687 }
1688 ptr = (char*)currentheap + currentheap->headerSize;
1689 }
1690 }
1691
1692 entry->wFlags = 0;
1693 if (*(PULONG)ptr & ARENA_FLAG_FREE)
1694 {
1695 ARENA_FREE *pArena = (ARENA_FREE *)ptr;
1696
1697 /*DPRINT("free, magic: %04x\n", pArena->magic);*/
1698
1699 entry->lpData = pArena + 1;
1700 entry->cbData = pArena->size & ARENA_SIZE_MASK;
1701 entry->cbOverhead = sizeof(ARENA_FREE);
1702 entry->wFlags = PROCESS_HEAP_UNCOMMITTED_RANGE;
1703 }
1704 else
1705 {
1706 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
1707
1708 /*DPRINT("busy, magic: %04x\n", pArena->magic);*/
1709
1710 entry->lpData = pArena + 1;
1711 entry->cbData = pArena->size & ARENA_SIZE_MASK;
1712 entry->cbOverhead = sizeof(ARENA_INUSE);
1713 entry->wFlags = PROCESS_HEAP_ENTRY_BUSY;
1714 /* FIXME: can't handle PROCESS_HEAP_ENTRY_MOVEABLE
1715 and PROCESS_HEAP_ENTRY_DDESHARE yet */
1716 }
1717
1718 entry->iRegionIndex = region_index;
1719
1720 /* first element of heap ? */
1721 if (ptr == (char *)(currentheap + currentheap->headerSize))
1722 {
1723 entry->wFlags |= PROCESS_HEAP_REGION;
1724 entry->Foo.Region.dwCommittedSize = currentheap->commitSize;
1725 entry->Foo.Region.dwUnCommittedSize =
1726 currentheap->size - currentheap->commitSize;
1727 entry->Foo.Region.lpFirstBlock = /* first valid block */
1728 currentheap + currentheap->headerSize;
1729 entry->Foo.Region.lpLastBlock = /* first invalid block */
1730 currentheap + currentheap->size;
1731 }
1732 ret = TRUE;
1733
1734 HW_end:
1735 if (!(heapPtr->flags & HEAP_NO_SERIALIZE))
1736 RtlLeaveHeapLock( &heapPtr->critSection );
1737
1738 return ret;
1739 }
1740 #endif
1741
1742
1743 VOID
1744 RtlInitializeHeapManager(VOID)
1745 {
1746 PPEB Peb;
1747
1748 Peb = NtCurrentPeb();
1749
1750 Peb->NumberOfHeaps = 0;
1751 Peb->MaximumNumberOfHeaps = -1; /* no limit */
1752 Peb->ProcessHeaps = NULL;
1753
1754 RtlInitializeHeapLock(&RtlpProcessHeapsListLock);
1755 }
1756
1757
1758 /*
1759 * @implemented
1760 */
1761 NTSTATUS NTAPI
1762 RtlEnumProcessHeaps(PHEAP_ENUMERATION_ROUTINE HeapEnumerationRoutine,
1763 PVOID lParam)
1764 {
1765 NTSTATUS Status = STATUS_SUCCESS;
1766 HEAP** pptr;
1767
1768 RtlEnterHeapLock(&RtlpProcessHeapsListLock);
1769
1770 for (pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps; *pptr; pptr = &(*pptr)->next)
1771 {
1772 Status = HeapEnumerationRoutine(*pptr,lParam);
1773 if (!NT_SUCCESS(Status))
1774 break;
1775 }
1776
1777 RtlLeaveHeapLock(&RtlpProcessHeapsListLock);
1778
1779 return Status;
1780 }
1781
1782
1783 /*
1784 * @implemented
1785 */
1786 ULONG NTAPI
1787 RtlGetProcessHeaps(ULONG HeapCount,
1788 HANDLE *HeapArray)
1789 {
1790 ULONG Result = 0;
1791 HEAP ** pptr;
1792
1793 RtlEnterHeapLock(&RtlpProcessHeapsListLock);
1794
1795 Result = NtCurrentPeb()->NumberOfHeaps;
1796
1797 if (NtCurrentPeb()->NumberOfHeaps <= HeapCount)
1798 {
1799 int i = 0;
1800 for (pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps; *pptr; pptr = &(*pptr)->next)
1801 {
1802 HeapArray[i++] = *pptr;
1803 }
1804 }
1805
1806 RtlLeaveHeapLock (&RtlpProcessHeapsListLock);
1807
1808 return Result;
1809 }
1810
1811
1812 /*
1813 * @implemented
1814 */
1815 BOOLEAN NTAPI
1816 RtlValidateProcessHeaps(VOID)
1817 {
1818 BOOLEAN Result = TRUE;
1819 HEAP ** pptr;
1820
1821 RtlEnterHeapLock(&RtlpProcessHeapsListLock);
1822
1823 for (pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps; *pptr; pptr = &(*pptr)->next)
1824 {
1825 if (!RtlValidateHeap(*pptr, 0, NULL))
1826 {
1827 Result = FALSE;
1828 break;
1829 }
1830 }
1831
1832 RtlLeaveHeapLock (&RtlpProcessHeapsListLock);
1833
1834 return Result;
1835 }
1836
1837
1838 /*
1839 * @unimplemented
1840 */
1841 BOOLEAN NTAPI
1842 RtlZeroHeap(
1843 IN PVOID HeapHandle,
1844 IN ULONG Flags
1845 )
1846 {
1847 UNIMPLEMENTED;
1848 return FALSE;
1849 }
1850
1851 /*
1852 * @unimplemented
1853 */
1854 BOOLEAN
1855 NTAPI
1856 RtlSetUserValueHeap(IN PVOID HeapHandle,
1857 IN ULONG Flags,
1858 IN PVOID BaseAddress,
1859 IN PVOID UserValue)
1860 {
1861 HEAP *heapPtr = HEAP_GetPtr(HeapHandle);
1862 ARENA_INUSE *pInUse;
1863 SUBHEAP *subheap;
1864
1865 /* Get the subheap */
1866 pInUse = (ARENA_INUSE *)BaseAddress - 1;
1867 subheap = HEAP_FindSubHeap( heapPtr, pInUse );
1868
1869 /* Hack */
1870 subheap->UserValue = UserValue;
1871 return TRUE;
1872 }
1873
1874 /*
1875 * @unimplemented
1876 */
1877 BOOLEAN
1878 NTAPI
1879 RtlGetUserInfoHeap(IN PVOID HeapHandle,
1880 IN ULONG Flags,
1881 IN PVOID BaseAddress,
1882 OUT PVOID *UserValue,
1883 OUT PULONG UserFlags)
1884 {
1885 HEAP *heapPtr = HEAP_GetPtr(HeapHandle);
1886 ARENA_INUSE *pInUse;
1887 SUBHEAP *subheap;
1888
1889 /* Get the subheap */
1890 pInUse = (ARENA_INUSE *)BaseAddress - 1;
1891 subheap = HEAP_FindSubHeap( heapPtr, pInUse );
1892
1893 /* Hack */
1894 DPRINT1("V/F: %lx %p\n", subheap->UserValue, subheap->UserFlags);
1895 if (UserValue) *UserValue = subheap->UserValue;
1896 if (UserFlags) *UserFlags = subheap->UserFlags;
1897 return TRUE;
1898 }
1899
1900 /*
1901 * @unimplemented
1902 */
1903 NTSTATUS
1904 NTAPI
1905 RtlUsageHeap(IN HANDLE Heap,
1906 IN ULONG Flags,
1907 OUT PRTL_HEAP_USAGE Usage)
1908 {
1909 /* TODO */
1910 UNIMPLEMENTED;
1911 return STATUS_NOT_IMPLEMENTED;
1912 }
1913
1914 PWSTR
1915 NTAPI
1916 RtlQueryTagHeap(IN PVOID HeapHandle,
1917 IN ULONG Flags,
1918 IN USHORT TagIndex,
1919 IN BOOLEAN ResetCounters,
1920 OUT PRTL_HEAP_TAG_INFO HeapTagInfo)
1921 {
1922 /* TODO */
1923 UNIMPLEMENTED;
1924 return NULL;
1925 }
1926
1927 ULONG
1928 NTAPI
1929 RtlExtendHeap(IN HANDLE Heap,
1930 IN ULONG Flags,
1931 IN PVOID P,
1932 IN ULONG Size)
1933 {
1934 /* TODO */
1935 UNIMPLEMENTED;
1936 return 0;
1937 }
1938
1939 ULONG
1940 NTAPI
1941 RtlCreateTagHeap(IN HANDLE HeapHandle,
1942 IN ULONG Flags,
1943 IN PWSTR TagName,
1944 IN PWSTR TagSubName)
1945 {
1946 /* TODO */
1947 UNIMPLEMENTED;
1948 return 0;
1949 }
1950
1951 /* EOF */