- RtlReAllocateHeap shouldn't allocate memory if ptr == NULL.
[reactos.git] / reactos / lib / rtl / heap.c
1 /*
2 * Win32 heap functions
3 *
4 * Copyright 1996 Alexandre Julliard
5 * Copyright 1998 Ulrich Weigand
6 */
7
8
9 /* Note: the heap data structures are based on what Pietrek describes in his
10 * book 'Windows 95 System Programming Secrets'. The layout is not exactly
11 * the same, but could be easily adapted if it turns out some programs
12 * require it.
13 */
14
15 #include <string.h>
16 #include <ddk/ntddk.h>
17 #include <ntdll/rtl.h>
18 #include <ntos/heap.h>
19 #include <ntos/minmax.h>
20
21 #define NDEBUG
22 #include <debug.h>
23
24 #define DPRINTF DPRINT
25 #define ERR DPRINT
26 #define SetLastError(x)
27 #define WARN DPRINT
28 #define TRACE DPRINT
29 #define WARN_ON(x) (1)
30
31 #undef assert
32 #ifdef NDEBUG
33 #define TRACE_ON(x) (0)
34 #define assert(x)
35 #else
36 #define TRACE_ON(x) (1)
37 #define assert(x)
38 #endif
39
40
41 static CRITICAL_SECTION RtlpProcessHeapsListLock;
42
43
44 typedef struct tagARENA_INUSE
45 {
46 DWORD size; /* Block size; must be the first field */
47 WORD threadId; /* Allocating thread id */
48 WORD magic; /* Magic number */
49 }
50 ARENA_INUSE;
51
52 typedef struct tagARENA_FREE
53 {
54 DWORD size; /* Block size; must be the first field */
55 WORD threadId; /* Freeing thread id */
56 WORD magic; /* Magic number */
57 struct tagARENA_FREE *next; /* Next free arena */
58 struct tagARENA_FREE *prev; /* Prev free arena */
59 }
60 ARENA_FREE;
61
62 #define ARENA_FLAG_FREE 0x00000001 /* flags OR'ed with arena size */
63 #define ARENA_FLAG_PREV_FREE 0x00000002
64 #define ARENA_SIZE_MASK 0xfffffffc
65 #define ARENA_INUSE_MAGIC 0x4842 /* Value for arena 'magic' field */
66 #define ARENA_FREE_MAGIC 0x4846 /* Value for arena 'magic' field */
67
68 #define ARENA_INUSE_FILLER 0x55
69 #define ARENA_FREE_FILLER 0xaa
70
71 #define QUIET 1 /* Suppress messages */
72 #define NOISY 0 /* Report all errors */
73
74 #define HEAP_NB_FREE_LISTS 4 /* Number of free lists */
75
76 /* Max size of the blocks on the free lists */
77 static const DWORD HEAP_freeListSizes[HEAP_NB_FREE_LISTS] =
78 {
79 0x20, 0x80, 0x200, 0xffffffff
80 };
81
82 typedef struct
83 {
84 DWORD size;
85 ARENA_FREE arena;
86 }
87 FREE_LIST_ENTRY;
88
89 struct tagHEAP;
90
91 typedef struct tagSUBHEAP
92 {
93 DWORD size; /* Size of the whole sub-heap */
94 DWORD commitSize; /* Committed size of the sub-heap */
95 DWORD headerSize; /* Size of the heap header */
96 struct tagSUBHEAP *next; /* Next sub-heap */
97 struct tagHEAP *heap; /* Main heap structure */
98 DWORD magic; /* Magic number */
99 }
100 SUBHEAP, *PSUBHEAP;
101
102 #define SUBHEAP_MAGIC ((DWORD)('S' | ('U'<<8) | ('B'<<16) | ('H'<<24)))
103
104 typedef struct tagHEAP
105 {
106 SUBHEAP subheap; /* First sub-heap */
107 struct tagHEAP *next; /* Next heap for this process */
108 FREE_LIST_ENTRY freeList[HEAP_NB_FREE_LISTS]; /* Free lists */
109 CRITICAL_SECTION critSection; /* Critical section for serialization */
110 DWORD flags; /* Heap flags */
111 DWORD magic; /* Magic number */
112 BYTE filler[4]; /* Make multiple of 8 bytes */
113 }
114 HEAP, *PHEAP;
115
116 #define HEAP_MAGIC ((DWORD)('H' | ('E'<<8) | ('A'<<16) | ('P'<<24)))
117
118 #define HEAP_DEF_SIZE 0x110000 /* Default heap size = 1Mb + 64Kb */
119 #define HEAP_MIN_BLOCK_SIZE (sizeof(ARENA_FREE) + 8) /* Min. heap block size */
120 #define COMMIT_MASK 0xffff /* bitmask for commit/decommit granularity */
121
122
123 static BOOL HEAP_IsRealArena( HANDLE heap, DWORD flags, LPCVOID block, BOOL quiet );
124
125
126 /***********************************************************************
127 * HEAP_Dump
128 */
129 void
130 HEAP_Dump(PHEAP heap)
131 {
132 int i;
133 SUBHEAP *subheap;
134 char *ptr;
135
136 DPRINTF( "Heap: %08lx\n", (DWORD)heap );
137 DPRINTF( "Next: %08lx Sub-heaps: %08lx",
138 (DWORD)heap->next, (DWORD)&heap->subheap );
139 subheap = &heap->subheap;
140 while (subheap->next)
141 {
142 DPRINTF( " -> %08lx", (DWORD)subheap->next );
143 subheap = subheap->next;
144 }
145
146 DPRINTF( "\nFree lists:\n Block Stat Size Id\n" );
147 for (i = 0; i < HEAP_NB_FREE_LISTS; i++)
148 DPRINTF( "%08lx free %08lx %04x prev=%08lx next=%08lx\n",
149 (DWORD)&heap->freeList[i].arena, heap->freeList[i].arena.size,
150 heap->freeList[i].arena.threadId,
151 (DWORD)heap->freeList[i].arena.prev,
152 (DWORD)heap->freeList[i].arena.next );
153
154 subheap = &heap->subheap;
155 while (subheap)
156 {
157 DWORD freeSize = 0, usedSize = 0, arenaSize = subheap->headerSize;
158 DPRINTF( "\n\nSub-heap %08lx: size=%08lx committed=%08lx\n",
159 (DWORD)subheap, subheap->size, subheap->commitSize );
160
161 DPRINTF( "\n Block Stat Size Id\n" );
162 ptr = (char*)subheap + subheap->headerSize;
163 while (ptr < (char *)subheap + subheap->size)
164 {
165 if (*(DWORD *)ptr & ARENA_FLAG_FREE)
166 {
167 ARENA_FREE *pArena = (ARENA_FREE *)ptr;
168 DPRINTF( "%08lx free %08lx %04x prev=%08lx next=%08lx\n",
169 (DWORD)pArena, pArena->size & ARENA_SIZE_MASK,
170 pArena->threadId, (DWORD)pArena->prev,
171 (DWORD)pArena->next);
172 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
173 arenaSize += sizeof(ARENA_FREE);
174 freeSize += pArena->size & ARENA_SIZE_MASK;
175 }
176 else if (*(DWORD *)ptr & ARENA_FLAG_PREV_FREE)
177 {
178 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
179 DPRINTF( "%08lx Used %08lx %04x back=%08lx\n",
180 (DWORD)pArena, pArena->size & ARENA_SIZE_MASK,
181 pArena->threadId, *((DWORD *)pArena - 1));
182 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
183 arenaSize += sizeof(ARENA_INUSE);
184 usedSize += pArena->size & ARENA_SIZE_MASK;
185 }
186 else
187 {
188 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
189 DPRINTF( "%08lx used %08lx %04x\n",
190 (DWORD)pArena, pArena->size & ARENA_SIZE_MASK,
191 pArena->threadId);
192 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
193 arenaSize += sizeof(ARENA_INUSE);
194 usedSize += pArena->size & ARENA_SIZE_MASK;
195 }
196 }
197 DPRINTF( "\nTotal: Size=%08lx Committed=%08lx Free=%08lx Used=%08lx Arenas=%08lx (%ld%%)\n\n",
198 subheap->size, subheap->commitSize, freeSize, usedSize,
199 arenaSize, (arenaSize * 100) / subheap->size );
200 subheap = subheap->next;
201 }
202 }
203
204
205 /***********************************************************************
206 * HEAP_GetPtr
207 * RETURNS
208 * Pointer to the heap
209 * NULL: Failure
210 */
211 static PHEAP
212 HEAP_GetPtr(HANDLE heap) /* [in] Handle to the heap */
213 {
214 HEAP *heapPtr = (HEAP *)heap;
215 if (!heapPtr || (heapPtr->magic != HEAP_MAGIC))
216 {
217 ERR("Invalid heap %08x!\n", heap );
218 return NULL;
219 }
220 if (TRACE_ON(heap) && !HEAP_IsRealArena( heap, 0, NULL, NOISY ))
221 {
222 HEAP_Dump( heapPtr );
223 assert( FALSE );
224 return NULL;
225 }
226 return heapPtr;
227 }
228
229
230 /***********************************************************************
231 * HEAP_InsertFreeBlock
232 *
233 * Insert a free block into the free list.
234 */
235 static VOID
236 HEAP_InsertFreeBlock(PHEAP heap,
237 ARENA_FREE *pArena,
238 BOOL last)
239 {
240 FREE_LIST_ENTRY *pEntry = heap->freeList;
241 while (pEntry->size < pArena->size)
242 pEntry++;
243 if (last)
244 {
245 /* insert at end of free list, i.e. before next free list entry */
246 pEntry++;
247 if (pEntry == &heap->freeList[HEAP_NB_FREE_LISTS])
248 {
249 pEntry = heap->freeList;
250 }
251 pArena->prev = pEntry->arena.prev;
252 pArena->prev->next = pArena;
253 pArena->next = &pEntry->arena;
254 pEntry->arena.prev = pArena;
255 }
256 else
257 {
258 /* insert at head of free list */
259 pArena->next = pEntry->arena.next;
260 pArena->next->prev = pArena;
261 pArena->prev = &pEntry->arena;
262 pEntry->arena.next = pArena;
263 }
264 pArena->size |= ARENA_FLAG_FREE;
265 }
266
267
268 /***********************************************************************
269 * HEAP_FindSubHeap
270 * Find the sub-heap containing a given address.
271 *
272 * RETURNS
273 * Pointer: Success
274 * NULL: Failure
275 */
276 static PSUBHEAP
277 HEAP_FindSubHeap(HEAP *heap, /* [in] Heap pointer */
278 LPCVOID ptr) /* [in] Address */
279 {
280 SUBHEAP *sub = &heap->subheap;
281 while (sub)
282 {
283 if (((char *)ptr >= (char *)sub) &&
284 ((char *)ptr < (char *)sub + sub->size))
285 return sub;
286 sub = sub->next;
287 }
288 return NULL;
289 }
290
291
292 /***********************************************************************
293 * HEAP_Commit
294 *
295 * Make sure the heap storage is committed up to (not including) ptr.
296 */
297 static inline BOOL
298 HEAP_Commit(SUBHEAP *subheap,
299 void *ptr,
300 DWORD flags)
301 {
302 DWORD size = (DWORD)((char *)ptr - (char *)subheap);
303 NTSTATUS Status;
304 PVOID address;
305 ULONG commitsize;
306
307 size = (size + COMMIT_MASK) & ~COMMIT_MASK;
308 if (size > subheap->size)
309 size = subheap->size;
310 if (size <= subheap->commitSize)
311 return TRUE;
312
313 address = (PVOID)((char *)subheap + subheap->commitSize);
314 commitsize = size - subheap->commitSize;
315
316 if (!(flags & HEAP_NO_VALLOC))
317 {
318 Status = NtAllocateVirtualMemory(NtCurrentProcess(),
319 &address,
320 0,
321 &commitsize,
322 MEM_COMMIT,
323 PAGE_EXECUTE_READWRITE);
324 if (!NT_SUCCESS(Status))
325 {
326 WARN("Could not commit %08lx bytes at %08lx for heap %08lx\n",
327 size - subheap->commitSize,
328 (DWORD)((char *)subheap + subheap->commitSize),
329 (DWORD)subheap->heap );
330 return FALSE;
331 }
332 }
333 subheap->commitSize += commitsize;
334 return TRUE;
335 }
336
337
338 /***********************************************************************
339 * HEAP_Decommit
340 *
341 * If possible, decommit the heap storage from (including) 'ptr'.
342 */
343 static inline BOOL HEAP_Decommit( SUBHEAP *subheap, void *ptr, DWORD flags )
344 {
345 DWORD size = (DWORD)((char *)ptr - (char *)subheap);
346 PVOID address;
347 ULONG decommitsize;
348 NTSTATUS Status;
349 /* round to next block and add one full block */
350 size = ((size + COMMIT_MASK) & ~COMMIT_MASK) + COMMIT_MASK + 1;
351 if (size >= subheap->commitSize)
352 return TRUE;
353
354 address = (PVOID)((char *)subheap + size);
355 decommitsize = subheap->commitSize - size;
356
357 if (!(flags & HEAP_NO_VALLOC))
358 {
359 Status = ZwFreeVirtualMemory(NtCurrentProcess(),
360 &address,
361 &decommitsize,
362 MEM_DECOMMIT);
363 if (!NT_SUCCESS(Status))
364 ;
365 {
366 WARN("Could not decommit %08lx bytes at %08lx for heap %08lx\n",
367 subheap->commitSize - size,
368 (DWORD)((char *)subheap + size),
369 (DWORD)subheap->heap );
370 return FALSE;
371 }
372 }
373 subheap->commitSize -= decommitsize;
374 return TRUE;
375 }
376
377
378 /***********************************************************************
379 * HEAP_CreateFreeBlock
380 *
381 * Create a free block at a specified address. 'size' is the size of the
382 * whole block, including the new arena.
383 */
384 static void HEAP_CreateFreeBlock( SUBHEAP *subheap, void *ptr, DWORD size )
385 {
386 ARENA_FREE *pFree;
387 BOOL last;
388
389 /* Create a free arena */
390
391 pFree = (ARENA_FREE *)ptr;
392 pFree->threadId = (DWORD)NtCurrentTeb()->Cid.UniqueThread;
393 pFree->magic = ARENA_FREE_MAGIC;
394
395 /* If debugging, erase the freed block content */
396
397 if (TRACE_ON(heap))
398 {
399 char *pEnd = (char *)ptr + size;
400 if (pEnd > (char *)subheap + subheap->commitSize)
401 pEnd = (char *)subheap + subheap->commitSize;
402 if (pEnd > (char *)(pFree + 1))
403 memset( pFree + 1, ARENA_FREE_FILLER, pEnd - (char *)(pFree + 1) );
404 }
405
406 /* Check if next block is free also */
407
408 if (((char *)ptr + size < (char *)subheap + subheap->size) &&
409 (*(DWORD *)((char *)ptr + size) & ARENA_FLAG_FREE))
410 {
411 /* Remove the next arena from the free list */
412 ARENA_FREE *pNext = (ARENA_FREE *)((char *)ptr + size);
413 pNext->next->prev = pNext->prev;
414 pNext->prev->next = pNext->next;
415 size += (pNext->size & ARENA_SIZE_MASK) + sizeof(*pNext);
416 if (TRACE_ON(heap))
417 memset( pNext, ARENA_FREE_FILLER, sizeof(ARENA_FREE) );
418 }
419
420 /* Set the next block PREV_FREE flag and pointer */
421
422 last = ((char *)ptr + size >= (char *)subheap + subheap->size);
423 if (!last)
424 {
425 DWORD *pNext = (DWORD *)((char *)ptr + size);
426 *pNext |= ARENA_FLAG_PREV_FREE;
427 *(ARENA_FREE **)(pNext - 1) = pFree;
428 }
429
430 /* Last, insert the new block into the free list */
431
432 pFree->size = size - sizeof(*pFree);
433 HEAP_InsertFreeBlock( subheap->heap, pFree, last );
434 }
435
436
437 /***********************************************************************
438 * HEAP_MakeInUseBlockFree
439 *
440 * Turn an in-use block into a free block. Can also decommit the end of
441 * the heap, and possibly even free the sub-heap altogether.
442 */
443 static void HEAP_MakeInUseBlockFree( SUBHEAP *subheap, ARENA_INUSE *pArena,
444 DWORD flags)
445 {
446 ARENA_FREE *pFree;
447 DWORD size = (pArena->size & ARENA_SIZE_MASK) + sizeof(*pArena);
448
449 /* Check if we can merge with previous block */
450
451 if (pArena->size & ARENA_FLAG_PREV_FREE)
452 {
453 pFree = *((ARENA_FREE **)pArena - 1);
454 size += (pFree->size & ARENA_SIZE_MASK) + sizeof(ARENA_FREE);
455 /* Remove it from the free list */
456 pFree->next->prev = pFree->prev;
457 pFree->prev->next = pFree->next;
458 }
459 else
460 pFree = (ARENA_FREE *)pArena;
461
462 /* Create a free block */
463
464 HEAP_CreateFreeBlock( subheap, pFree, size );
465 size = (pFree->size & ARENA_SIZE_MASK) + sizeof(ARENA_FREE);
466 if ((char *)pFree + size < (char *)subheap + subheap->size)
467 return; /* Not the last block, so nothing more to do */
468
469 /* Free the whole sub-heap if it's empty and not the original one */
470
471 if (((char *)pFree == (char *)subheap + subheap->headerSize) &&
472 (subheap != &subheap->heap->subheap))
473 {
474 SUBHEAP *pPrev = &subheap->heap->subheap;
475 /* Remove the free block from the list */
476 pFree->next->prev = pFree->prev;
477 pFree->prev->next = pFree->next;
478 /* Remove the subheap from the list */
479 while (pPrev && (pPrev->next != subheap))
480 pPrev = pPrev->next;
481 if (pPrev)
482 pPrev->next = subheap->next;
483 /* Free the memory */
484 subheap->magic = 0;
485 if (!(flags & HEAP_NO_VALLOC))
486 {
487 ULONG dummySize = 0;
488 ZwFreeVirtualMemory(NtCurrentProcess(),
489 (PVOID*)&subheap,
490 &dummySize,
491 MEM_RELEASE);
492 }
493 return;
494 }
495
496 /* Decommit the end of the heap */
497 }
498
499
500 /***********************************************************************
501 * HEAP_ShrinkBlock
502 *
503 * Shrink an in-use block.
504 */
505 static void HEAP_ShrinkBlock(SUBHEAP *subheap, ARENA_INUSE *pArena, DWORD size)
506 {
507 if ((pArena->size & ARENA_SIZE_MASK) >= size + HEAP_MIN_BLOCK_SIZE)
508 {
509 HEAP_CreateFreeBlock( subheap, (char *)(pArena + 1) + size,
510 (pArena->size & ARENA_SIZE_MASK) - size );
511 /* assign size plus previous arena flags */
512 pArena->size = size | (pArena->size & ~ARENA_SIZE_MASK);
513 }
514 else
515 {
516 /* Turn off PREV_FREE flag in next block */
517 char *pNext = (char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK);
518 if (pNext < (char *)subheap + subheap->size)
519 *(DWORD *)pNext &= ~ARENA_FLAG_PREV_FREE;
520 }
521 }
522
523 /***********************************************************************
524 * HEAP_InitSubHeap
525 */
526 static BOOL HEAP_InitSubHeap( HEAP *heap, LPVOID address, DWORD flags,
527 DWORD commitSize, DWORD totalSize )
528 {
529 SUBHEAP *subheap = (SUBHEAP *)address;
530 FREE_LIST_ENTRY *pEntry;
531 int i;
532 NTSTATUS Status;
533
534 /* Commit memory */
535 if (!(flags & HEAP_NO_VALLOC))
536 {
537 Status = ZwAllocateVirtualMemory(NtCurrentProcess(),
538 &address,
539 0,
540 (PULONG)&commitSize,
541 MEM_COMMIT,
542 PAGE_EXECUTE_READWRITE);
543 if (!NT_SUCCESS(Status))
544 {
545 WARN("Could not commit %08lx bytes for sub-heap %08lx\n",
546 commitSize, (DWORD)address );
547 return FALSE;
548 }
549 }
550
551 /* Fill the sub-heap structure */
552
553 subheap = (SUBHEAP *)address;
554 subheap->heap = heap;
555 subheap->size = totalSize;
556 subheap->commitSize = commitSize;
557 subheap->magic = SUBHEAP_MAGIC;
558
559 if ( subheap != (SUBHEAP *)heap )
560 {
561 /* If this is a secondary subheap, insert it into list */
562
563 subheap->headerSize = sizeof(SUBHEAP);
564 subheap->next = heap->subheap.next;
565 heap->subheap.next = subheap;
566 }
567 else
568 {
569 /* If this is a primary subheap, initialize main heap */
570
571 subheap->headerSize = sizeof(HEAP);
572 subheap->next = NULL;
573 heap->next = NULL;
574 heap->flags = flags;
575 heap->magic = HEAP_MAGIC;
576
577 /* Build the free lists */
578
579 for (i = 0, pEntry = heap->freeList; i < HEAP_NB_FREE_LISTS; i++, pEntry++)
580 {
581 pEntry->size = HEAP_freeListSizes[i];
582 pEntry->arena.size = 0 | ARENA_FLAG_FREE;
583 pEntry->arena.next = i < HEAP_NB_FREE_LISTS-1 ?
584 &heap->freeList[i+1].arena : &heap->freeList[0].arena;
585 pEntry->arena.prev = i ? &heap->freeList[i-1].arena :
586 &heap->freeList[HEAP_NB_FREE_LISTS-1].arena;
587 pEntry->arena.threadId = 0;
588 pEntry->arena.magic = ARENA_FREE_MAGIC;
589 }
590
591 /* Initialize critical section */
592
593 RtlInitializeCriticalSection( &heap->critSection );
594 }
595
596 /* Create the first free block */
597
598 HEAP_CreateFreeBlock( subheap, (LPBYTE)subheap + subheap->headerSize,
599 subheap->size - subheap->headerSize );
600
601 return TRUE;
602 }
603
604 /***********************************************************************
605 * HEAP_CreateSubHeap
606 *
607 * Create a sub-heap of the given size.
608 * If heap == NULL, creates a main heap.
609 */
610 static SUBHEAP *HEAP_CreateSubHeap(PVOID BaseAddress,
611 HEAP *heap, DWORD flags,
612 DWORD commitSize, DWORD totalSize )
613 {
614 LPVOID address;
615 NTSTATUS Status;
616
617 /* Round-up sizes on a 64K boundary */
618
619 totalSize = (totalSize + 0xffff) & 0xffff0000;
620 commitSize = (commitSize + 0xffff) & 0xffff0000;
621 if (!commitSize)
622 commitSize = 0x10000;
623 if (totalSize < commitSize)
624 totalSize = commitSize;
625
626 /* Allocate the memory block */
627 address = BaseAddress;
628 if (!(flags & HEAP_NO_VALLOC))
629 {
630 Status = ZwAllocateVirtualMemory(NtCurrentProcess(),
631 &address,
632 0,
633 (PULONG)&totalSize,
634 MEM_RESERVE | MEM_COMMIT,
635 PAGE_EXECUTE_READWRITE);
636 if (!NT_SUCCESS(Status))
637 {
638 WARN("Could not VirtualAlloc %08lx bytes\n",
639 totalSize );
640 return NULL;
641 }
642 }
643
644 /* Initialize subheap */
645
646 if (!HEAP_InitSubHeap( heap? heap : (HEAP *)address,
647 address, flags, commitSize, totalSize ))
648 {
649 if (address && !(flags & HEAP_NO_VALLOC))
650 {
651 ULONG dummySize = 0;
652 ZwFreeVirtualMemory(NtCurrentProcess(),
653 &address,
654 &dummySize,
655 MEM_RELEASE);
656 }
657 return NULL;
658 }
659
660 return (SUBHEAP *)address;
661 }
662
663
664 /***********************************************************************
665 * HEAP_FindFreeBlock
666 *
667 * Find a free block at least as large as the requested size, and make sure
668 * the requested size is committed.
669 */
670 static ARENA_FREE *HEAP_FindFreeBlock( HEAP *heap, DWORD size,
671 SUBHEAP **ppSubHeap )
672 {
673 SUBHEAP *subheap;
674 ARENA_FREE *pArena;
675 FREE_LIST_ENTRY *pEntry = heap->freeList;
676
677 /* Find a suitable free list, and in it find a block large enough */
678
679 while (pEntry->size < size)
680 pEntry++;
681 pArena = pEntry->arena.next;
682 while (pArena != &heap->freeList[0].arena)
683 {
684 DWORD arena_size = (pArena->size & ARENA_SIZE_MASK) +
685 sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
686 if (arena_size >= size)
687 {
688 subheap = HEAP_FindSubHeap( heap, pArena );
689 if (!HEAP_Commit( subheap, (char *)pArena + sizeof(ARENA_INUSE)
690 + size + HEAP_MIN_BLOCK_SIZE,
691 heap->flags))
692 return NULL;
693 *ppSubHeap = subheap;
694 return pArena;
695 }
696
697 pArena = pArena->next;
698 }
699
700 /* If no block was found, attempt to grow the heap */
701
702 if (!(heap->flags & HEAP_GROWABLE))
703 {
704 WARN("Not enough space in heap %08lx for %08lx bytes\n",
705 (DWORD)heap, size );
706 return NULL;
707 }
708 /* make sure that we have a big enough size *committed* to fit another
709 * last free arena in !
710 * So just one heap struct, one first free arena which will eventually
711 * get inuse, and HEAP_MIN_BLOCK_SIZE for the second free arena that
712 * might get assigned all remaining free space in HEAP_ShrinkBlock() */
713 size += sizeof(SUBHEAP) + sizeof(ARENA_FREE) + HEAP_MIN_BLOCK_SIZE;
714 if (!(subheap = HEAP_CreateSubHeap( NULL, heap, heap->flags, size,
715 max( HEAP_DEF_SIZE, size ) )))
716 return NULL;
717
718 TRACE("created new sub-heap %08lx of %08lx bytes for heap %08lx\n",
719 (DWORD)subheap, size, (DWORD)heap );
720
721 *ppSubHeap = subheap;
722 return (ARENA_FREE *)(subheap + 1);
723 }
724
725
726 /***********************************************************************
727 * HEAP_IsValidArenaPtr
728 *
729 * Check that the pointer is inside the range possible for arenas.
730 */
731 static BOOL HEAP_IsValidArenaPtr( HEAP *heap, void *ptr )
732 {
733 int i;
734 SUBHEAP *subheap = HEAP_FindSubHeap( heap, ptr );
735 if (!subheap)
736 return FALSE;
737 if ((char *)ptr >= (char *)subheap + subheap->headerSize)
738 return TRUE;
739 if (subheap != &heap->subheap)
740 return FALSE;
741 for (i = 0; i < HEAP_NB_FREE_LISTS; i++)
742 if (ptr == (void *)&heap->freeList[i].arena)
743 return TRUE;
744 return FALSE;
745 }
746
747
748 /***********************************************************************
749 * HEAP_ValidateFreeArena
750 */
751 static BOOL HEAP_ValidateFreeArena( SUBHEAP *subheap, ARENA_FREE *pArena )
752 {
753 char *heapEnd = (char *)subheap + subheap->size;
754
755 /* Check magic number */
756 if (pArena->magic != ARENA_FREE_MAGIC)
757 {
758 ERR("Heap %08lx: invalid free arena magic for %08lx\n",
759 (DWORD)subheap->heap, (DWORD)pArena );
760 return FALSE;
761 }
762 /* Check size flags */
763 if (!(pArena->size & ARENA_FLAG_FREE) ||
764 (pArena->size & ARENA_FLAG_PREV_FREE))
765 {
766 ERR("Heap %08lx: bad flags %lx for free arena %08lx\n",
767 (DWORD)subheap->heap, pArena->size & ~ARENA_SIZE_MASK, (DWORD)pArena );
768 }
769 /* Check arena size */
770 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) > heapEnd)
771 {
772 ERR("Heap %08lx: bad size %08lx for free arena %08lx\n",
773 (DWORD)subheap->heap, (DWORD)pArena->size & ARENA_SIZE_MASK, (DWORD)pArena );
774 return FALSE;
775 }
776 /* Check that next pointer is valid */
777 if (!HEAP_IsValidArenaPtr( subheap->heap, pArena->next ))
778 {
779 ERR("Heap %08lx: bad next ptr %08lx for arena %08lx\n",
780 (DWORD)subheap->heap, (DWORD)pArena->next, (DWORD)pArena );
781 return FALSE;
782 }
783 /* Check that next arena is free */
784 if (!(pArena->next->size & ARENA_FLAG_FREE) ||
785 (pArena->next->magic != ARENA_FREE_MAGIC))
786 {
787 ERR("Heap %08lx: next arena %08lx invalid for %08lx\n",
788 (DWORD)subheap->heap, (DWORD)pArena->next, (DWORD)pArena );
789 return FALSE;
790 }
791 /* Check that prev pointer is valid */
792 if (!HEAP_IsValidArenaPtr( subheap->heap, pArena->prev ))
793 {
794 ERR("Heap %08lx: bad prev ptr %08lx for arena %08lx\n",
795 (DWORD)subheap->heap, (DWORD)pArena->prev, (DWORD)pArena );
796 return FALSE;
797 }
798 /* Check that prev arena is free */
799 if (!(pArena->prev->size & ARENA_FLAG_FREE) ||
800 (pArena->prev->magic != ARENA_FREE_MAGIC))
801 {
802 ERR("Heap %08lx: prev arena %08lx invalid for %08lx\n",
803 (DWORD)subheap->heap, (DWORD)pArena->prev, (DWORD)pArena );
804 return FALSE;
805 }
806 /* Check that next block has PREV_FREE flag */
807 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) < heapEnd)
808 {
809 if (!(*(DWORD *)((char *)(pArena + 1) +
810 (pArena->size & ARENA_SIZE_MASK)) & ARENA_FLAG_PREV_FREE))
811 {
812 ERR("Heap %08lx: free arena %08lx next block has no PREV_FREE flag\n",
813 (DWORD)subheap->heap, (DWORD)pArena );
814 return FALSE;
815 }
816 /* Check next block back pointer */
817 if (*((ARENA_FREE **)((char *)(pArena + 1) +
818 (pArena->size & ARENA_SIZE_MASK)) - 1) != pArena)
819 {
820 ERR("Heap %08lx: arena %08lx has wrong back ptr %08lx\n",
821 (DWORD)subheap->heap, (DWORD)pArena,
822 *((DWORD *)((char *)(pArena+1)+ (pArena->size & ARENA_SIZE_MASK)) - 1));
823 return FALSE;
824 }
825 }
826 return TRUE;
827 }
828
829
830 /***********************************************************************
831 * HEAP_ValidateInUseArena
832 */
833 static BOOL HEAP_ValidateInUseArena( SUBHEAP *subheap, ARENA_INUSE *pArena, BOOL quiet )
834 {
835 char *heapEnd = (char *)subheap + subheap->size;
836
837 /* Check magic number */
838 if (pArena->magic != ARENA_INUSE_MAGIC)
839 {
840 if (quiet == NOISY)
841 {
842 ERR("Heap %08lx: invalid in-use arena magic for %08lx\n",
843 (DWORD)subheap->heap, (DWORD)pArena );
844 if (TRACE_ON(heap))
845 HEAP_Dump( subheap->heap );
846 }
847 else if (WARN_ON(heap))
848 {
849 WARN("Heap %08lx: invalid in-use arena magic for %08lx\n",
850 (DWORD)subheap->heap, (DWORD)pArena );
851 if (TRACE_ON(heap))
852 HEAP_Dump( subheap->heap );
853 }
854 return FALSE;
855 }
856 /* Check size flags */
857 if (pArena->size & ARENA_FLAG_FREE)
858 {
859 ERR("Heap %08lx: bad flags %lx for in-use arena %08lx\n",
860 (DWORD)subheap->heap, pArena->size & ~ARENA_SIZE_MASK, (DWORD)pArena );
861 return FALSE;
862 }
863 /* Check arena size */
864 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) > heapEnd)
865 {
866 ERR("Heap %08lx: bad size %08lx for in-use arena %08lx\n",
867 (DWORD)subheap->heap, (DWORD)pArena->size & ARENA_SIZE_MASK, (DWORD)pArena );
868 return FALSE;
869 }
870 /* Check next arena PREV_FREE flag */
871 if (((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) < heapEnd) &&
872 (*(DWORD *)((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK)) & ARENA_FLAG_PREV_FREE))
873 {
874 ERR("Heap %08lx: in-use arena %08lx next block has PREV_FREE flag\n",
875 (DWORD)subheap->heap, (DWORD)pArena );
876 return FALSE;
877 }
878 /* Check prev free arena */
879 if (pArena->size & ARENA_FLAG_PREV_FREE)
880 {
881 ARENA_FREE *pPrev = *((ARENA_FREE **)pArena - 1);
882 /* Check prev pointer */
883 if (!HEAP_IsValidArenaPtr( subheap->heap, pPrev ))
884 {
885 ERR("Heap %08lx: bad back ptr %08lx for arena %08lx\n",
886 (DWORD)subheap->heap, (DWORD)pPrev, (DWORD)pArena );
887 return FALSE;
888 }
889 /* Check that prev arena is free */
890 if (!(pPrev->size & ARENA_FLAG_FREE) ||
891 (pPrev->magic != ARENA_FREE_MAGIC))
892 {
893 ERR("Heap %08lx: prev arena %08lx invalid for in-use %08lx\n",
894 (DWORD)subheap->heap, (DWORD)pPrev, (DWORD)pArena );
895 return FALSE;
896 }
897 /* Check that prev arena is really the previous block */
898 if ((char *)(pPrev + 1) + (pPrev->size & ARENA_SIZE_MASK) != (char *)pArena)
899 {
900 ERR("Heap %08lx: prev arena %08lx is not prev for in-use %08lx\n",
901 (DWORD)subheap->heap, (DWORD)pPrev, (DWORD)pArena );
902 return FALSE;
903 }
904 }
905 return TRUE;
906 }
907
908
909 /***********************************************************************
910 * HEAP_IsInsideHeap
911 * Checks whether the pointer points to a block inside a given heap.
912 *
913 * NOTES
914 * Should this return BOOL32?
915 *
916 * RETURNS
917 * !0: Success
918 * 0: Failure
919 */
920 int HEAP_IsInsideHeap(
921 HANDLE heap, /* [in] Heap */
922 DWORD flags, /* [in] Flags */
923 LPCVOID ptr /* [in] Pointer */
924 )
925 {
926 HEAP *heapPtr = HEAP_GetPtr( heap );
927 SUBHEAP *subheap;
928 int ret;
929
930 /* Validate the parameters */
931
932 if (!heapPtr)
933 return 0;
934 flags |= heapPtr->flags;
935 if (!(flags & HEAP_NO_SERIALIZE))
936 RtlEnterCriticalSection( &heapPtr->critSection );
937 ret = (((subheap = HEAP_FindSubHeap( heapPtr, ptr )) != NULL) &&
938 (((char *)ptr >= (char *)subheap + subheap->headerSize
939 + sizeof(ARENA_INUSE))));
940 if (!(flags & HEAP_NO_SERIALIZE))
941 RtlLeaveCriticalSection( &heapPtr->critSection );
942 return ret;
943 }
944
945
946
947
948 /***********************************************************************
949 * HEAP_IsRealArena [Internal]
950 * Validates a block is a valid arena.
951 *
952 * RETURNS
953 * TRUE: Success
954 * FALSE: Failure
955 */
956 static BOOL HEAP_IsRealArena(
957 HANDLE heap, /* [in] Handle to the heap */
958 DWORD flags, /* [in] Bit flags that control access during operation */
959 LPCVOID block, /* [in] Optional pointer to memory block to validate */
960 BOOL quiet /* [in] Flag - if true, HEAP_ValidateInUseArena
961 * does not complain */
962 )
963 {
964 SUBHEAP *subheap;
965 HEAP *heapPtr = (HEAP *)(heap);
966 BOOL ret = TRUE;
967
968 if (!heapPtr || (heapPtr->magic != HEAP_MAGIC))
969 {
970 ERR("Invalid heap %08x!\n", heap );
971 return FALSE;
972 }
973
974 flags &= HEAP_NO_SERIALIZE;
975 flags |= heapPtr->flags;
976 /* calling HeapLock may result in infinite recursion, so do the critsect directly */
977 if (!(flags & HEAP_NO_SERIALIZE))
978 RtlEnterCriticalSection( &heapPtr->critSection );
979
980 if (block)
981 {
982 /* Only check this single memory block */
983
984 /* The following code is really HEAP_IsInsideHeap *
985 * with serialization already done. */
986 if (!(subheap = HEAP_FindSubHeap( heapPtr, block )) ||
987 ((char *)block < (char *)subheap + subheap->headerSize
988 + sizeof(ARENA_INUSE)))
989 {
990 if (quiet == NOISY)
991 {
992 ERR("Heap %08lx: block %08lx is not inside heap\n",
993 (DWORD)heap, (DWORD)block );
994 }
995 else if (WARN_ON(heap))
996 {
997 WARN("Heap %08lx: block %08lx is not inside heap\n",
998 (DWORD)heap, (DWORD)block );
999 }
1000 ret = FALSE;
1001 }
1002 else
1003 ret = HEAP_ValidateInUseArena( subheap, (ARENA_INUSE *)block - 1, quiet );
1004
1005 if (!(flags & HEAP_NO_SERIALIZE))
1006 RtlLeaveCriticalSection( &heapPtr->critSection );
1007 return ret;
1008 }
1009
1010 subheap = &heapPtr->subheap;
1011 while (subheap && ret)
1012 {
1013 char *ptr = (char *)subheap + subheap->headerSize;
1014 while (ptr < (char *)subheap + subheap->size)
1015 {
1016 if (*(DWORD *)ptr & ARENA_FLAG_FREE)
1017 {
1018 if (!HEAP_ValidateFreeArena( subheap, (ARENA_FREE *)ptr ))
1019 {
1020 ret = FALSE;
1021 break;
1022 }
1023 ptr += sizeof(ARENA_FREE) + (*(DWORD *)ptr & ARENA_SIZE_MASK);
1024 }
1025 else
1026 {
1027 if (!HEAP_ValidateInUseArena( subheap, (ARENA_INUSE *)ptr, NOISY ))
1028 {
1029 ret = FALSE;
1030 break;
1031 }
1032 ptr += sizeof(ARENA_INUSE) + (*(DWORD *)ptr & ARENA_SIZE_MASK);
1033 }
1034 }
1035 subheap = subheap->next;
1036 }
1037
1038 if (!(flags & HEAP_NO_SERIALIZE))
1039 RtlLeaveCriticalSection( &heapPtr->critSection );
1040 return ret;
1041 }
1042
1043
1044 /***********************************************************************
1045 * HeapCreate (KERNEL32.336)
1046 * RETURNS
1047 * Handle of heap: Success
1048 * NULL: Failure
1049 *
1050 * @implemented
1051 */
1052 HANDLE STDCALL
1053 RtlCreateHeap(ULONG flags,
1054 PVOID BaseAddress,
1055 ULONG maxSize,
1056 ULONG initialSize,
1057 PVOID Unknown,
1058 PRTL_HEAP_DEFINITION Definition)
1059 {
1060 SUBHEAP *subheap;
1061 ULONG i;
1062
1063 /* Allocate the heap block */
1064
1065 if (!maxSize)
1066 {
1067 maxSize = HEAP_DEF_SIZE;
1068 flags |= HEAP_GROWABLE;
1069 }
1070 if (!(subheap = HEAP_CreateSubHeap( BaseAddress, NULL, flags, initialSize, maxSize )))
1071 {
1072 return 0;
1073 }
1074
1075 RtlEnterCriticalSection (&RtlpProcessHeapsListLock);
1076 for (i = 0; i < NtCurrentPeb ()->NumberOfHeaps; i++)
1077 {
1078 if (NtCurrentPeb ()->ProcessHeaps[i] == NULL)
1079 {
1080 NtCurrentPeb()->ProcessHeaps[i] = (PVOID)subheap;
1081 break;
1082 }
1083 }
1084 RtlLeaveCriticalSection (&RtlpProcessHeapsListLock);
1085
1086 return (HANDLE)subheap;
1087 }
1088
1089 /***********************************************************************
1090 * HeapDestroy (KERNEL32.337)
1091 * RETURNS
1092 * TRUE: Success
1093 * FALSE: Failure
1094 *
1095 * @implemented
1096 */
1097 BOOL STDCALL
1098 RtlDestroyHeap(HANDLE heap) /* [in] Handle of heap */
1099 {
1100 HEAP *heapPtr = HEAP_GetPtr( heap );
1101 SUBHEAP *subheap;
1102 ULONG i, flags;
1103
1104 TRACE("%08x\n", heap );
1105 if (!heapPtr)
1106 return FALSE;
1107
1108 RtlEnterCriticalSection (&RtlpProcessHeapsListLock);
1109 for (i = 0; i < NtCurrentPeb ()->NumberOfHeaps; i++)
1110 {
1111 if (NtCurrentPeb ()->ProcessHeaps[i] == heap)
1112 {
1113 NtCurrentPeb()->ProcessHeaps[i] = NULL;
1114 break;
1115 }
1116 }
1117 RtlLeaveCriticalSection (&RtlpProcessHeapsListLock);
1118
1119 RtlDeleteCriticalSection( &heapPtr->critSection );
1120 subheap = &heapPtr->subheap;
1121 // We must save the flags. The first subheap is located after
1122 // the heap structure. If we release the first subheap,
1123 // we release also the heap structure.
1124 flags = heapPtr->flags;
1125 while (subheap)
1126 {
1127 SUBHEAP *next = subheap->next;
1128
1129 if (!(flags & HEAP_NO_VALLOC))
1130 {
1131 ULONG dummySize = 0;
1132 ZwFreeVirtualMemory(NtCurrentProcess(),
1133 (PVOID*)&subheap,
1134 &dummySize,
1135 MEM_RELEASE);
1136 }
1137 subheap = next;
1138 }
1139 return TRUE;
1140 }
1141
1142
1143 /***********************************************************************
1144 * HeapAlloc (KERNEL32.334)
1145 * RETURNS
1146 * Pointer to allocated memory block
1147 * NULL: Failure
1148 * 0x7d030f60--invalid flags in RtlHeapAllocate
1149 * @implemented
1150 */
1151 PVOID STDCALL
1152 RtlAllocateHeap(HANDLE heap, /* [in] Handle of private heap block */
1153 ULONG flags, /* [in] Heap allocation control flags */
1154 ULONG size) /* [in] Number of bytes to allocate */
1155 {
1156 ARENA_FREE *pArena;
1157 ARENA_INUSE *pInUse;
1158 SUBHEAP *subheap;
1159 HEAP *heapPtr = HEAP_GetPtr( heap );
1160
1161 /* Validate the parameters */
1162
1163 if (!heapPtr)
1164 {
1165 if (flags & HEAP_GENERATE_EXCEPTIONS)
1166 RtlRaiseStatus( STATUS_NO_MEMORY );
1167 return NULL;
1168 }
1169 flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY;
1170 flags |= heapPtr->flags;
1171 if (!(flags & HEAP_NO_SERIALIZE))
1172 RtlEnterCriticalSection( &heapPtr->critSection );
1173 size = (size + 7) & ~7;
1174 if (size < HEAP_MIN_BLOCK_SIZE)
1175 size = HEAP_MIN_BLOCK_SIZE;
1176
1177 /* Locate a suitable free block */
1178
1179 if (!(pArena = HEAP_FindFreeBlock( heapPtr, size, &subheap )))
1180 {
1181 TRACE("(%08x,%08lx,%08lx): returning NULL\n",
1182 heap, flags, size );
1183 if (!(flags & HEAP_NO_SERIALIZE))
1184 RtlLeaveCriticalSection( &heapPtr->critSection );
1185 if (flags & HEAP_GENERATE_EXCEPTIONS)
1186 RtlRaiseStatus( STATUS_NO_MEMORY );
1187 return NULL;
1188 }
1189
1190 /* Remove the arena from the free list */
1191
1192 pArena->next->prev = pArena->prev;
1193 pArena->prev->next = pArena->next;
1194
1195 /* Build the in-use arena */
1196
1197 pInUse = (ARENA_INUSE *)pArena;
1198 pInUse->size = (pInUse->size & ~ARENA_FLAG_FREE)
1199 + sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
1200 pInUse->threadId = (DWORD)NtCurrentTeb()->Cid.UniqueThread;
1201 pInUse->magic = ARENA_INUSE_MAGIC;
1202
1203 /* Shrink the block */
1204
1205 HEAP_ShrinkBlock( subheap, pInUse, size );
1206
1207 if (flags & HEAP_ZERO_MEMORY)
1208 memset( pInUse + 1, 0, pInUse->size & ARENA_SIZE_MASK );
1209 else if (TRACE_ON(heap))
1210 memset( pInUse + 1, ARENA_INUSE_FILLER, pInUse->size & ARENA_SIZE_MASK );
1211
1212 if (!(flags & HEAP_NO_SERIALIZE))
1213 RtlLeaveCriticalSection( &heapPtr->critSection );
1214
1215 TRACE("(%08x,%08lx,%08lx): returning %08lx\n",
1216 heap, flags, size, (DWORD)(pInUse + 1) );
1217 return (LPVOID)(pInUse + 1);
1218 }
1219
1220
1221 /***********************************************************************
1222 * HeapFree (KERNEL32.338)
1223 * RETURNS
1224 * TRUE: Success
1225 * FALSE: Failure
1226 *
1227 * @implemented
1228 */
1229 BOOLEAN STDCALL RtlFreeHeap(
1230 HANDLE heap, /* [in] Handle of heap */
1231 ULONG flags, /* [in] Heap freeing flags */
1232 PVOID ptr /* [in] Address of memory to free */
1233 )
1234 {
1235 ARENA_INUSE *pInUse;
1236 SUBHEAP *subheap;
1237 HEAP *heapPtr = HEAP_GetPtr( heap );
1238
1239 /* Validate the parameters */
1240
1241 if (!heapPtr)
1242 return FALSE;
1243 if (!ptr) /* Freeing a NULL ptr is doesn't indicate an error in Win2k */
1244 {
1245 WARN("(%08x,%08lx,%08lx): asked to free NULL\n",
1246 heap, flags, (DWORD)ptr );
1247 return TRUE;
1248 }
1249
1250 flags &= HEAP_NO_SERIALIZE;
1251 flags |= heapPtr->flags;
1252 if (!(flags & HEAP_NO_SERIALIZE))
1253 RtlEnterCriticalSection( &heapPtr->critSection );
1254 if (!HEAP_IsRealArena( heap, HEAP_NO_SERIALIZE, ptr, QUIET ))
1255 {
1256 if (!(flags & HEAP_NO_SERIALIZE))
1257 RtlLeaveCriticalSection( &heapPtr->critSection );
1258 TRACE("(%08x,%08lx,%08lx): returning FALSE\n",
1259 heap, flags, (DWORD)ptr );
1260 return FALSE;
1261 }
1262
1263 /* Turn the block into a free block */
1264
1265 pInUse = (ARENA_INUSE *)ptr - 1;
1266 subheap = HEAP_FindSubHeap( heapPtr, pInUse );
1267 HEAP_MakeInUseBlockFree( subheap, pInUse, heapPtr->flags );
1268
1269 if (!(flags & HEAP_NO_SERIALIZE))
1270 RtlLeaveCriticalSection( &heapPtr->critSection );
1271
1272 TRACE("(%08x,%08lx,%08lx): returning TRUE\n",
1273 heap, flags, (DWORD)ptr );
1274 return TRUE;
1275 }
1276
1277
1278 /***********************************************************************
1279 * HeapReAlloc (KERNEL32.340)
1280 * RETURNS
1281 * Pointer to reallocated memory block
1282 * NULL: Failure
1283 * 0x7d030f60--invalid flags in RtlHeapAllocate
1284 * @implemented
1285 */
1286 LPVOID STDCALL RtlReAllocateHeap(
1287 HANDLE heap, /* [in] Handle of heap block */
1288 DWORD flags, /* [in] Heap reallocation flags */
1289 LPVOID ptr, /* [in] Address of memory to reallocate */
1290 DWORD size /* [in] Number of bytes to reallocate */
1291 )
1292 {
1293 ARENA_INUSE *pArena;
1294 DWORD oldSize;
1295 HEAP *heapPtr;
1296 SUBHEAP *subheap;
1297
1298 if (!ptr)
1299 return FALSE;
1300 if (!(heapPtr = HEAP_GetPtr( heap )))
1301 return FALSE;
1302
1303 /* Validate the parameters */
1304
1305 flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY |
1306 HEAP_REALLOC_IN_PLACE_ONLY;
1307 flags |= heapPtr->flags;
1308 size = (size + 7) & ~7;
1309 if (size < HEAP_MIN_BLOCK_SIZE)
1310 size = HEAP_MIN_BLOCK_SIZE;
1311
1312 if (!(flags & HEAP_NO_SERIALIZE))
1313 RtlEnterCriticalSection( &heapPtr->critSection );
1314 if (!HEAP_IsRealArena( heap, HEAP_NO_SERIALIZE, ptr, QUIET ))
1315 {
1316 if (!(flags & HEAP_NO_SERIALIZE))
1317 RtlLeaveCriticalSection( &heapPtr->critSection );
1318 TRACE("(%08x,%08lx,%08lx,%08lx): returning NULL\n",
1319 heap, flags, (DWORD)ptr, size );
1320 if (flags & HEAP_GENERATE_EXCEPTIONS)
1321 RtlRaiseStatus( STATUS_NO_MEMORY );
1322 return NULL;
1323 }
1324
1325 /* Check if we need to grow the block */
1326
1327 pArena = (ARENA_INUSE *)ptr - 1;
1328 pArena->threadId = (DWORD)NtCurrentTeb()->Cid.UniqueThread;
1329
1330 subheap = HEAP_FindSubHeap( heapPtr, pArena );
1331 oldSize = (pArena->size & ARENA_SIZE_MASK);
1332 if (size > oldSize)
1333 {
1334 char *pNext = (char *)(pArena + 1) + oldSize;
1335 if ((pNext < (char *)subheap + subheap->size) &&
1336 (*(DWORD *)pNext & ARENA_FLAG_FREE) &&
1337 (oldSize + (*(DWORD *)pNext & ARENA_SIZE_MASK) + sizeof(ARENA_FREE) >= size))
1338 {
1339 /* The next block is free and large enough */
1340 ARENA_FREE *pFree = (ARENA_FREE *)pNext;
1341 pFree->next->prev = pFree->prev;
1342 pFree->prev->next = pFree->next;
1343 pArena->size += (pFree->size & ARENA_SIZE_MASK) + sizeof(*pFree);
1344 if (!HEAP_Commit( subheap, (char *)pArena + sizeof(ARENA_INUSE)
1345 + size + HEAP_MIN_BLOCK_SIZE,
1346 heapPtr->flags))
1347 {
1348 if (!(flags & HEAP_NO_SERIALIZE))
1349 RtlLeaveCriticalSection( &heapPtr->critSection );
1350 if (flags & HEAP_GENERATE_EXCEPTIONS)
1351 RtlRaiseStatus( STATUS_NO_MEMORY );
1352 return NULL;
1353 }
1354 HEAP_ShrinkBlock( subheap, pArena, size );
1355 }
1356 else /* Do it the hard way */
1357 {
1358 ARENA_FREE *pNew;
1359 ARENA_INUSE *pInUse;
1360 SUBHEAP *newsubheap;
1361
1362 if ((flags & HEAP_REALLOC_IN_PLACE_ONLY) ||
1363 !(pNew = HEAP_FindFreeBlock( heapPtr, size, &newsubheap )))
1364 {
1365 if (!(flags & HEAP_NO_SERIALIZE))
1366 RtlLeaveCriticalSection( &heapPtr->critSection );
1367 if (flags & HEAP_GENERATE_EXCEPTIONS)
1368 RtlRaiseStatus( STATUS_NO_MEMORY );
1369 return NULL;
1370 }
1371
1372 /* Build the in-use arena */
1373
1374 pNew->next->prev = pNew->prev;
1375 pNew->prev->next = pNew->next;
1376 pInUse = (ARENA_INUSE *)pNew;
1377 pInUse->size = (pInUse->size & ~ARENA_FLAG_FREE)
1378 + sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
1379 pInUse->threadId = (DWORD)NtCurrentTeb()->Cid.UniqueThread;
1380 pInUse->magic = ARENA_INUSE_MAGIC;
1381 HEAP_ShrinkBlock( newsubheap, pInUse, size );
1382 memcpy( pInUse + 1, pArena + 1, oldSize );
1383
1384 /* Free the previous block */
1385
1386 HEAP_MakeInUseBlockFree( subheap, pArena, flags );
1387 subheap = newsubheap;
1388 pArena = pInUse;
1389 }
1390 }
1391 else
1392 HEAP_ShrinkBlock( subheap, pArena, size ); /* Shrink the block */
1393
1394 /* Clear the extra bytes if needed */
1395
1396 if (size > oldSize)
1397 {
1398 if (flags & HEAP_ZERO_MEMORY)
1399 memset( (char *)(pArena + 1) + oldSize, 0,
1400 (pArena->size & ARENA_SIZE_MASK) - oldSize );
1401 else if (TRACE_ON(heap))
1402 memset( (char *)(pArena + 1) + oldSize, ARENA_INUSE_FILLER,
1403 (pArena->size & ARENA_SIZE_MASK) - oldSize );
1404 }
1405
1406 /* Return the new arena */
1407
1408 if (!(flags & HEAP_NO_SERIALIZE))
1409 RtlLeaveCriticalSection( &heapPtr->critSection );
1410
1411 TRACE("(%08x,%08lx,%08lx,%08lx): returning %08lx\n",
1412 heap, flags, (DWORD)ptr, size, (DWORD)(pArena + 1) );
1413 return (LPVOID)(pArena + 1);
1414 }
1415
1416
1417 /***********************************************************************
1418 * HeapCompact (KERNEL32.335)
1419 *
1420 * @unimplemented
1421 */
1422 DWORD STDCALL RtlCompactHeap( HANDLE heap, DWORD flags )
1423 {
1424 SetLastError(ERROR_CALL_NOT_IMPLEMENTED);
1425 return 0;
1426 }
1427
1428
1429 /***********************************************************************
1430 * HeapLock (KERNEL32.339)
1431 * Attempts to acquire the critical section object for a specified heap.
1432 *
1433 * RETURNS
1434 * TRUE: Success
1435 * FALSE: Failure
1436 *
1437 * @implemented
1438 */
1439 BOOL STDCALL RtlLockHeap(
1440 HANDLE heap /* [in] Handle of heap to lock for exclusive access */
1441 )
1442 {
1443 HEAP *heapPtr = HEAP_GetPtr( heap );
1444 if (!heapPtr)
1445 return FALSE;
1446 RtlEnterCriticalSection( &heapPtr->critSection );
1447 return TRUE;
1448 }
1449
1450
1451 /***********************************************************************
1452 * HeapUnlock (KERNEL32.342)
1453 * Releases ownership of the critical section object.
1454 *
1455 * RETURNS
1456 * TRUE: Success
1457 * FALSE: Failure
1458 *
1459 * @implemented
1460 */
1461 BOOL STDCALL RtlUnlockHeap(
1462 HANDLE heap /* [in] Handle to the heap to unlock */
1463 )
1464 {
1465 HEAP *heapPtr = HEAP_GetPtr( heap );
1466 if (!heapPtr)
1467 return FALSE;
1468 RtlLeaveCriticalSection( &heapPtr->critSection );
1469 return TRUE;
1470 }
1471
1472
1473 /***********************************************************************
1474 * HeapSize (KERNEL32.341)
1475 * RETURNS
1476 * Size in bytes of allocated memory
1477 * 0xffffffff: Failure
1478 *
1479 * @implemented
1480 */
1481 DWORD STDCALL RtlSizeHeap(
1482 HANDLE heap, /* [in] Handle of heap */
1483 DWORD flags, /* [in] Heap size control flags */
1484 LPVOID ptr /* [in] Address of memory to return size for */
1485 )
1486 {
1487 DWORD ret;
1488 HEAP *heapPtr = HEAP_GetPtr( heap );
1489
1490 if (!heapPtr)
1491 return FALSE;
1492 flags &= HEAP_NO_SERIALIZE;
1493 flags |= heapPtr->flags;
1494 if (!(flags & HEAP_NO_SERIALIZE))
1495 RtlEnterCriticalSection( &heapPtr->critSection );
1496 if (!HEAP_IsRealArena( heap, HEAP_NO_SERIALIZE, ptr, QUIET ))
1497 {
1498 SetLastError( ERROR_INVALID_PARAMETER );
1499 ret = 0xffffffff;
1500 }
1501 else
1502 {
1503 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr - 1;
1504 ret = pArena->size & ARENA_SIZE_MASK;
1505 }
1506 if (!(flags & HEAP_NO_SERIALIZE))
1507 RtlLeaveCriticalSection( &heapPtr->critSection );
1508
1509 TRACE("(%08x,%08lx,%08lx): returning %08lx\n",
1510 heap, flags, (DWORD)ptr, ret );
1511 return ret;
1512 }
1513
1514
1515 /***********************************************************************
1516 * HeapValidate (KERNEL32.343)
1517 * Validates a specified heap.
1518 *
1519 * NOTES
1520 * Flags is ignored.
1521 *
1522 * RETURNS
1523 * TRUE: Success
1524 * FALSE: Failure
1525 *
1526 * @implemented
1527 */
1528 BOOL STDCALL RtlValidateHeap(
1529 HANDLE heap, /* [in] Handle to the heap */
1530 DWORD flags, /* [in] Bit flags that control access during operation */
1531 PVOID block /* [in] Optional pointer to memory block to validate */
1532 )
1533 {
1534
1535 HEAP *heapPtr = HEAP_GetPtr( heap );
1536 if (!heapPtr)
1537 return FALSE;
1538 return HEAP_IsRealArena( heapPtr, flags, block, QUIET );
1539 }
1540
1541
1542 /***********************************************************************
1543 * HeapWalk (KERNEL32.344)
1544 * Enumerates the memory blocks in a specified heap.
1545 * See HEAP_Dump() for info on heap structure.
1546 *
1547 * TODO
1548 * - handling of PROCESS_HEAP_ENTRY_MOVEABLE and
1549 * PROCESS_HEAP_ENTRY_DDESHARE (needs heap.c support)
1550 *
1551 * RETURNS
1552 * TRUE: Success
1553 * FALSE: Failure
1554 */
1555 #if 0
1556 BOOL STDCALL HeapWalk(
1557 HANDLE heap, /* [in] Handle to heap to enumerate */
1558 LPPROCESS_HEAP_ENTRY entry /* [out] Pointer to structure of enumeration info */
1559 )
1560 {
1561 HEAP *heapPtr = HEAP_GetPtr(heap);
1562 SUBHEAP *sub, *currentheap = NULL;
1563 BOOL ret = FALSE;
1564 char *ptr;
1565 int region_index = 0;
1566
1567 if (!heapPtr || !entry)
1568 {
1569 SetLastError(ERROR_INVALID_PARAMETER);
1570 return FALSE;
1571 }
1572
1573 if (!(heapPtr->flags & HEAP_NO_SERIALIZE))
1574 RtlEnterCriticalSection( &heapPtr->critSection );
1575
1576 /* set ptr to the next arena to be examined */
1577
1578 if (!entry->lpData) /* first call (init) ? */
1579 {
1580 TRACE("begin walking of heap 0x%08x.\n", heap);
1581 /*HEAP_Dump(heapPtr);*/
1582 currentheap = &heapPtr->subheap;
1583 ptr = (char*)currentheap + currentheap->headerSize;
1584 }
1585 else
1586 {
1587 ptr = entry->lpData;
1588 sub = &heapPtr->subheap;
1589 while (sub)
1590 {
1591 if (((char *)ptr >= (char *)sub) &&
1592 ((char *)ptr < (char *)sub + sub->size))
1593 {
1594 currentheap = sub;
1595 break;
1596 }
1597 sub = sub->next;
1598 region_index++;
1599 }
1600 if (currentheap == NULL)
1601 {
1602 ERR("no matching subheap found, shouldn't happen !\n");
1603 SetLastError(ERROR_NO_MORE_ITEMS);
1604 goto HW_end;
1605 }
1606
1607 ptr += entry->cbData; /* point to next arena */
1608 if (ptr > (char *)currentheap + currentheap->size - 1)
1609 { /* proceed with next subheap */
1610 if (!(currentheap = currentheap->next))
1611 { /* successfully finished */
1612 TRACE("end reached.\n");
1613 SetLastError(ERROR_NO_MORE_ITEMS);
1614 goto HW_end;
1615 }
1616 ptr = (char*)currentheap + currentheap->headerSize;
1617 }
1618 }
1619
1620 entry->wFlags = 0;
1621 if (*(DWORD *)ptr & ARENA_FLAG_FREE)
1622 {
1623 ARENA_FREE *pArena = (ARENA_FREE *)ptr;
1624
1625 /*TRACE("free, magic: %04x\n", pArena->magic);*/
1626
1627 entry->lpData = pArena + 1;
1628 entry->cbData = pArena->size & ARENA_SIZE_MASK;
1629 entry->cbOverhead = sizeof(ARENA_FREE);
1630 entry->wFlags = PROCESS_HEAP_UNCOMMITTED_RANGE;
1631 }
1632 else
1633 {
1634 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
1635
1636 /*TRACE("busy, magic: %04x\n", pArena->magic);*/
1637
1638 entry->lpData = pArena + 1;
1639 entry->cbData = pArena->size & ARENA_SIZE_MASK;
1640 entry->cbOverhead = sizeof(ARENA_INUSE);
1641 entry->wFlags = PROCESS_HEAP_ENTRY_BUSY;
1642 /* FIXME: can't handle PROCESS_HEAP_ENTRY_MOVEABLE
1643 and PROCESS_HEAP_ENTRY_DDESHARE yet */
1644 }
1645
1646 entry->iRegionIndex = region_index;
1647
1648 /* first element of heap ? */
1649 if (ptr == (char *)(currentheap + currentheap->headerSize))
1650 {
1651 entry->wFlags |= PROCESS_HEAP_REGION;
1652 entry->Foo.Region.dwCommittedSize = currentheap->commitSize;
1653 entry->Foo.Region.dwUnCommittedSize =
1654 currentheap->size - currentheap->commitSize;
1655 entry->Foo.Region.lpFirstBlock = /* first valid block */
1656 currentheap + currentheap->headerSize;
1657 entry->Foo.Region.lpLastBlock = /* first invalid block */
1658 currentheap + currentheap->size;
1659 }
1660 ret = TRUE;
1661
1662 HW_end:
1663 if (!(heapPtr->flags & HEAP_NO_SERIALIZE))
1664 RtlLeaveCriticalSection( &heapPtr->critSection );
1665
1666 return ret;
1667 }
1668 #endif
1669
1670 VOID
1671 RtlInitializeHeapManager(VOID)
1672 {
1673 PPEB Peb;
1674
1675 Peb = NtCurrentPeb();
1676
1677 Peb->NumberOfHeaps = 0;
1678 Peb->MaximumNumberOfHeaps = (PAGE_SIZE - sizeof(PEB)) / sizeof(HANDLE);
1679 Peb->ProcessHeaps = (PVOID)Peb + sizeof(PEB);
1680
1681 RtlInitializeCriticalSection(&RtlpProcessHeapsListLock);
1682 }
1683
1684
1685 /*
1686 * @implemented
1687 */
1688 NTSTATUS STDCALL
1689 RtlEnumProcessHeaps(DWORD STDCALL_FUNC(*func)(void*,LONG),
1690 LONG lParam)
1691 {
1692 NTSTATUS Status = STATUS_SUCCESS;
1693 ULONG i;
1694
1695 RtlEnterCriticalSection(&RtlpProcessHeapsListLock);
1696
1697 for (i = 0; i < NtCurrentPeb()->NumberOfHeaps; i++)
1698 {
1699 Status = func(NtCurrentPeb()->ProcessHeaps[i],lParam);
1700 if (!NT_SUCCESS(Status))
1701 break;
1702 }
1703
1704 RtlLeaveCriticalSection(&RtlpProcessHeapsListLock);
1705
1706 return Status;
1707 }
1708
1709
1710 /*
1711 * @implemented
1712 */
1713 ULONG STDCALL
1714 RtlGetProcessHeaps(ULONG HeapCount,
1715 HANDLE *HeapArray)
1716 {
1717 ULONG Result = 0;
1718
1719 RtlEnterCriticalSection(&RtlpProcessHeapsListLock);
1720
1721 if (NtCurrentPeb()->NumberOfHeaps <= HeapCount)
1722 {
1723 Result = NtCurrentPeb()->NumberOfHeaps;
1724 memmove(HeapArray,
1725 NtCurrentPeb()->ProcessHeaps,
1726 Result * sizeof(HANDLE));
1727 }
1728
1729 RtlLeaveCriticalSection (&RtlpProcessHeapsListLock);
1730
1731 return Result;
1732 }
1733
1734
1735 /*
1736 * @implemented
1737 */
1738 BOOLEAN STDCALL
1739 RtlValidateProcessHeaps(VOID)
1740 {
1741 HANDLE Heaps[128];
1742 BOOLEAN Result = TRUE;
1743 ULONG HeapCount;
1744 ULONG i;
1745
1746 HeapCount = RtlGetProcessHeaps(128, Heaps);
1747 for (i = 0; i < HeapCount; i++)
1748 {
1749 if (!RtlValidateHeap(Heaps[i], 0, NULL))
1750 Result = FALSE;
1751 }
1752
1753 return Result;
1754 }
1755
1756 /*
1757 * @unimplemented
1758 */
1759 ULONG
1760 STDCALL
1761 RtlZeroHeap(
1762 IN PVOID HeapHandle,
1763 IN ULONG Flags
1764 )
1765 {
1766 UNIMPLEMENTED;
1767 return 0;
1768 }
1769
1770 /* EOF */