2002-10-01 Casper S. Hornstrup <chorns@users.sourceforge.net>
[reactos.git] / reactos / lib / ntdll / rtl / heap.c
1 /*
2 * Win32 heap functions
3 *
4 * Copyright 1996 Alexandre Julliard
5 * Copyright 1998 Ulrich Weigand
6 */
7
8
9 /* Note: the heap data structures are based on what Pietrek describes in his
10 * book 'Windows 95 System Programming Secrets'. The layout is not exactly
11 * the same, but could be easily adapted if it turns out some programs
12 * require it.
13 */
14
15 #include <string.h>
16 #include <ddk/ntddk.h>
17 #include <ntdll/rtl.h>
18 #include <ntos/heap.h>
19 #include <ntos/minmax.h>
20
21 #define NDEBUG
22 #include <ntdll/ntdll.h>
23
24 #define DPRINTF DPRINT
25 #define ERR DPRINT
26 #define SetLastError(x)
27 #define WARN DPRINT
28 #define TRACE DPRINT
29 #define WARN_ON(x) (1)
30
31 #undef assert
32 #ifdef NDEBUG
33 #define TRACE_ON(x) (0)
34 #define assert(x)
35 #else
36 #define TRACE_ON(x) (1)
37 #define assert(x)
38 #endif
39
40
41 static CRITICAL_SECTION RtlpProcessHeapsListLock;
42
43
44 typedef struct tagARENA_INUSE
45 {
46 DWORD size; /* Block size; must be the first field */
47 WORD threadId; /* Allocating thread id */
48 WORD magic; /* Magic number */
49 void *callerEIP; /* EIP of caller upon allocation */
50 } ARENA_INUSE;
51
52 typedef struct tagARENA_FREE
53 {
54 DWORD size; /* Block size; must be the first field */
55 WORD threadId; /* Freeing thread id */
56 WORD magic; /* Magic number */
57 struct tagARENA_FREE *next; /* Next free arena */
58 struct tagARENA_FREE *prev; /* Prev free arena */
59 } ARENA_FREE;
60
61 #define ARENA_FLAG_FREE 0x00000001 /* flags OR'ed with arena size */
62 #define ARENA_FLAG_PREV_FREE 0x00000002
63 #define ARENA_SIZE_MASK 0xfffffffc
64 #define ARENA_INUSE_MAGIC 0x4842 /* Value for arena 'magic' field */
65 #define ARENA_FREE_MAGIC 0x4846 /* Value for arena 'magic' field */
66
67 #define ARENA_INUSE_FILLER 0x55
68 #define ARENA_FREE_FILLER 0xaa
69
70 #define QUIET 1 /* Suppress messages */
71 #define NOISY 0 /* Report all errors */
72
73 #define HEAP_NB_FREE_LISTS 4 /* Number of free lists */
74
75 /* Max size of the blocks on the free lists */
76 static const DWORD HEAP_freeListSizes[HEAP_NB_FREE_LISTS] =
77 {
78 0x20, 0x80, 0x200, 0xffffffff
79 };
80
81 typedef struct
82 {
83 DWORD size;
84 ARENA_FREE arena;
85 } FREE_LIST_ENTRY;
86
87 struct tagHEAP;
88
89 typedef struct tagSUBHEAP
90 {
91 DWORD size; /* Size of the whole sub-heap */
92 DWORD commitSize; /* Committed size of the sub-heap */
93 DWORD headerSize; /* Size of the heap header */
94 struct tagSUBHEAP *next; /* Next sub-heap */
95 struct tagHEAP *heap; /* Main heap structure */
96 DWORD magic; /* Magic number */
97 WORD selector; /* Selector for HEAP_WINE_SEGPTR heaps */
98 } SUBHEAP, *PSUBHEAP;
99
100 #define SUBHEAP_MAGIC ((DWORD)('S' | ('U'<<8) | ('B'<<16) | ('H'<<24)))
101
102 typedef struct tagHEAP
103 {
104 SUBHEAP subheap; /* First sub-heap */
105 struct tagHEAP *next; /* Next heap for this process */
106 FREE_LIST_ENTRY freeList[HEAP_NB_FREE_LISTS]; /* Free lists */
107 CRITICAL_SECTION critSection; /* Critical section for serialization */
108 DWORD flags; /* Heap flags */
109 DWORD magic; /* Magic number */
110 void *private; /* Private pointer for the user of the heap */
111 } HEAP, *PHEAP;
112
113 #define HEAP_MAGIC ((DWORD)('H' | ('E'<<8) | ('A'<<16) | ('P'<<24)))
114
115 #define HEAP_DEF_SIZE 0x110000 /* Default heap size = 1Mb + 64Kb */
116 #define HEAP_MIN_BLOCK_SIZE (8+sizeof(ARENA_FREE)) /* Min. heap block size */
117 #define COMMIT_MASK 0xffff /* bitmask for commit/decommit granularity */
118
119
120 static BOOL HEAP_IsRealArena( HANDLE heap, DWORD flags, LPCVOID block, BOOL quiet );
121
122 #ifdef __GNUC__
123 #define GET_EIP() (__builtin_return_address(0))
124 #define SET_EIP(ptr) ((ARENA_INUSE*)(ptr) - 1)->callerEIP = GET_EIP()
125 #else
126 #define GET_EIP() 0
127 #define SET_EIP(ptr) /* nothing */
128 #endif /* __GNUC__ */
129
130
131 /***********************************************************************
132 * HEAP_Dump
133 */
134 void
135 HEAP_Dump(PHEAP heap)
136 {
137 int i;
138 SUBHEAP *subheap;
139 char *ptr;
140
141 DPRINTF( "Heap: %08lx\n", (DWORD)heap );
142 DPRINTF( "Next: %08lx Sub-heaps: %08lx",
143 (DWORD)heap->next, (DWORD)&heap->subheap );
144 subheap = &heap->subheap;
145 while (subheap->next)
146 {
147 DPRINTF( " -> %08lx", (DWORD)subheap->next );
148 subheap = subheap->next;
149 }
150
151 DPRINTF( "\nFree lists:\n Block Stat Size Id\n" );
152 for (i = 0; i < HEAP_NB_FREE_LISTS; i++)
153 DPRINTF( "%08lx free %08lx %04x prev=%08lx next=%08lx\n",
154 (DWORD)&heap->freeList[i].arena, heap->freeList[i].arena.size,
155 heap->freeList[i].arena.threadId,
156 (DWORD)heap->freeList[i].arena.prev,
157 (DWORD)heap->freeList[i].arena.next );
158
159 subheap = &heap->subheap;
160 while (subheap)
161 {
162 DWORD freeSize = 0, usedSize = 0, arenaSize = subheap->headerSize;
163 DPRINTF( "\n\nSub-heap %08lx: size=%08lx committed=%08lx\n",
164 (DWORD)subheap, subheap->size, subheap->commitSize );
165
166 DPRINTF( "\n Block Stat Size Id\n" );
167 ptr = (char*)subheap + subheap->headerSize;
168 while (ptr < (char *)subheap + subheap->size)
169 {
170 if (*(DWORD *)ptr & ARENA_FLAG_FREE)
171 {
172 ARENA_FREE *pArena = (ARENA_FREE *)ptr;
173 DPRINTF( "%08lx free %08lx %04x prev=%08lx next=%08lx\n",
174 (DWORD)pArena, pArena->size & ARENA_SIZE_MASK,
175 pArena->threadId, (DWORD)pArena->prev,
176 (DWORD)pArena->next);
177 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
178 arenaSize += sizeof(ARENA_FREE);
179 freeSize += pArena->size & ARENA_SIZE_MASK;
180 }
181 else if (*(DWORD *)ptr & ARENA_FLAG_PREV_FREE)
182 {
183 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
184 DPRINTF( "%08lx Used %08lx %04x back=%08lx EIP=%p\n",
185 (DWORD)pArena, pArena->size & ARENA_SIZE_MASK,
186 pArena->threadId, *((DWORD *)pArena - 1),
187 pArena->callerEIP );
188 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
189 arenaSize += sizeof(ARENA_INUSE);
190 usedSize += pArena->size & ARENA_SIZE_MASK;
191 }
192 else
193 {
194 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
195 DPRINTF( "%08lx used %08lx %04x EIP=%p\n",
196 (DWORD)pArena, pArena->size & ARENA_SIZE_MASK,
197 pArena->threadId, pArena->callerEIP );
198 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
199 arenaSize += sizeof(ARENA_INUSE);
200 usedSize += pArena->size & ARENA_SIZE_MASK;
201 }
202 }
203 DPRINTF( "\nTotal: Size=%08lx Committed=%08lx Free=%08lx Used=%08lx Arenas=%08lx (%ld%%)\n\n",
204 subheap->size, subheap->commitSize, freeSize, usedSize,
205 arenaSize, (arenaSize * 100) / subheap->size );
206 subheap = subheap->next;
207 }
208 }
209
210
211 /***********************************************************************
212 * HEAP_GetPtr
213 * RETURNS
214 * Pointer to the heap
215 * NULL: Failure
216 */
217 static PHEAP
218 HEAP_GetPtr(HANDLE heap) /* [in] Handle to the heap */
219 {
220 HEAP *heapPtr = (HEAP *)heap;
221 if (!heapPtr || (heapPtr->magic != HEAP_MAGIC))
222 {
223 ERR("Invalid heap %08x!\n", heap );
224 return NULL;
225 }
226 if (TRACE_ON(heap) && !HEAP_IsRealArena( heap, 0, NULL, NOISY ))
227 {
228 HEAP_Dump( heapPtr );
229 assert( FALSE );
230 return NULL;
231 }
232 return heapPtr;
233 }
234
235
236 /***********************************************************************
237 * HEAP_InsertFreeBlock
238 *
239 * Insert a free block into the free list.
240 */
241 static VOID
242 HEAP_InsertFreeBlock(PHEAP heap,
243 ARENA_FREE *pArena)
244 {
245 FREE_LIST_ENTRY *pEntry = heap->freeList;
246 while (pEntry->size < pArena->size) pEntry++;
247 pArena->size |= ARENA_FLAG_FREE;
248 pArena->next = pEntry->arena.next;
249 pArena->next->prev = pArena;
250 pArena->prev = &pEntry->arena;
251 pEntry->arena.next = pArena;
252 }
253
254
255 /***********************************************************************
256 * HEAP_FindSubHeap
257 * Find the sub-heap containing a given address.
258 *
259 * RETURNS
260 * Pointer: Success
261 * NULL: Failure
262 */
263 static PSUBHEAP
264 HEAP_FindSubHeap(HEAP *heap, /* [in] Heap pointer */
265 LPCVOID ptr) /* [in] Address */
266 {
267 SUBHEAP *sub = &heap->subheap;
268 while (sub)
269 {
270 if (((char *)ptr >= (char *)sub) &&
271 ((char *)ptr < (char *)sub + sub->size)) return sub;
272 sub = sub->next;
273 }
274 return NULL;
275 }
276
277
278 /***********************************************************************
279 * HEAP_Commit
280 *
281 * Make sure the heap storage is committed up to (not including) ptr.
282 */
283 static inline BOOL
284 HEAP_Commit(SUBHEAP *subheap,
285 void *ptr,
286 DWORD flags)
287 {
288 DWORD size = (DWORD)((char *)ptr - (char *)subheap);
289 NTSTATUS Status;
290 PVOID address;
291 ULONG commitsize;
292
293 size = (size + COMMIT_MASK) & ~COMMIT_MASK;
294 if (size > subheap->size) size = subheap->size;
295 if (size <= subheap->commitSize) return TRUE;
296
297 address = (PVOID)((char *)subheap + subheap->commitSize);
298 commitsize = size - subheap->commitSize;
299
300 if (!(flags & HEAP_NO_VALLOC))
301 {
302 Status = NtAllocateVirtualMemory(NtCurrentProcess(),
303 &address,
304 0,
305 &commitsize,
306 MEM_COMMIT,
307 PAGE_EXECUTE_READWRITE);
308 if (!NT_SUCCESS(Status))
309 {
310 WARN("Could not commit %08lx bytes at %08lx for heap %08lx\n",
311 size - subheap->commitSize,
312 (DWORD)((char *)subheap + subheap->commitSize),
313 (DWORD)subheap->heap );
314 return FALSE;
315 }
316 }
317 subheap->commitSize = size;
318 return TRUE;
319 }
320
321
322 /***********************************************************************
323 * HEAP_Decommit
324 *
325 * If possible, decommit the heap storage from (including) 'ptr'.
326 */
327 static inline BOOL HEAP_Decommit( SUBHEAP *subheap, void *ptr, DWORD flags )
328 {
329 DWORD size = (DWORD)((char *)ptr - (char *)subheap);
330 PVOID address;
331 ULONG decommitsize;
332 NTSTATUS Status;
333 /* round to next block and add one full block */
334 size = ((size + COMMIT_MASK) & ~COMMIT_MASK) + COMMIT_MASK + 1;
335 if (size >= subheap->commitSize) return TRUE;
336
337 address = (PVOID)((char *)subheap + size);
338 decommitsize = subheap->commitSize - size;
339
340 if (!(flags & HEAP_NO_VALLOC))
341 {
342 Status = ZwFreeVirtualMemory(NtCurrentProcess(),
343 &address,
344 &decommitsize,
345 MEM_DECOMMIT);
346 if (!NT_SUCCESS(Status));
347 {
348 WARN("Could not decommit %08lx bytes at %08lx for heap %08lx\n",
349 subheap->commitSize - size,
350 (DWORD)((char *)subheap + size),
351 (DWORD)subheap->heap );
352 return FALSE;
353 }
354 }
355 subheap->commitSize = size;
356 return TRUE;
357 }
358
359
360 /***********************************************************************
361 * HEAP_CreateFreeBlock
362 *
363 * Create a free block at a specified address. 'size' is the size of the
364 * whole block, including the new arena.
365 */
366 static void HEAP_CreateFreeBlock( SUBHEAP *subheap, void *ptr, DWORD size )
367 {
368 ARENA_FREE *pFree;
369
370 /* Create a free arena */
371
372 pFree = (ARENA_FREE *)ptr;
373 pFree->threadId = (DWORD)NtCurrentTeb()->Cid.UniqueThread;
374 pFree->magic = ARENA_FREE_MAGIC;
375
376 /* If debugging, erase the freed block content */
377
378 if (TRACE_ON(heap))
379 {
380 char *pEnd = (char *)ptr + size;
381 if (pEnd > (char *)subheap + subheap->commitSize)
382 pEnd = (char *)subheap + subheap->commitSize;
383 if (pEnd > (char *)(pFree + 1))
384 memset( pFree + 1, ARENA_FREE_FILLER, pEnd - (char *)(pFree + 1) );
385 }
386
387 /* Check if next block is free also */
388
389 if (((char *)ptr + size < (char *)subheap + subheap->size) &&
390 (*(DWORD *)((char *)ptr + size) & ARENA_FLAG_FREE))
391 {
392 /* Remove the next arena from the free list */
393 ARENA_FREE *pNext = (ARENA_FREE *)((char *)ptr + size);
394 pNext->next->prev = pNext->prev;
395 pNext->prev->next = pNext->next;
396 size += (pNext->size & ARENA_SIZE_MASK) + sizeof(*pNext);
397 if (TRACE_ON(heap))
398 memset( pNext, ARENA_FREE_FILLER, sizeof(ARENA_FREE) );
399 }
400
401 /* Set the next block PREV_FREE flag and pointer */
402
403 if ((char *)ptr + size < (char *)subheap + subheap->size)
404 {
405 DWORD *pNext = (DWORD *)((char *)ptr + size);
406 *pNext |= ARENA_FLAG_PREV_FREE;
407 *(ARENA_FREE **)(pNext - 1) = pFree;
408 }
409
410 /* Last, insert the new block into the free list */
411
412 pFree->size = size - sizeof(*pFree);
413 HEAP_InsertFreeBlock( subheap->heap, pFree );
414 }
415
416
417 /***********************************************************************
418 * HEAP_MakeInUseBlockFree
419 *
420 * Turn an in-use block into a free block. Can also decommit the end of
421 * the heap, and possibly even free the sub-heap altogether.
422 */
423 static void HEAP_MakeInUseBlockFree( SUBHEAP *subheap, ARENA_INUSE *pArena,
424 DWORD flags)
425 {
426 ARENA_FREE *pFree;
427 DWORD size = (pArena->size & ARENA_SIZE_MASK) + sizeof(*pArena);
428
429 /* Check if we can merge with previous block */
430
431 if (pArena->size & ARENA_FLAG_PREV_FREE)
432 {
433 pFree = *((ARENA_FREE **)pArena - 1);
434 size += (pFree->size & ARENA_SIZE_MASK) + sizeof(ARENA_FREE);
435 /* Remove it from the free list */
436 pFree->next->prev = pFree->prev;
437 pFree->prev->next = pFree->next;
438 }
439 else pFree = (ARENA_FREE *)pArena;
440
441 /* Create a free block */
442
443 HEAP_CreateFreeBlock( subheap, pFree, size );
444 size = (pFree->size & ARENA_SIZE_MASK) + sizeof(ARENA_FREE);
445 if ((char *)pFree + size < (char *)subheap + subheap->size)
446 return; /* Not the last block, so nothing more to do */
447
448 /* Free the whole sub-heap if it's empty and not the original one */
449
450 if (((char *)pFree == (char *)subheap + subheap->headerSize) &&
451 (subheap != &subheap->heap->subheap))
452 {
453 SUBHEAP *pPrev = &subheap->heap->subheap;
454 /* Remove the free block from the list */
455 pFree->next->prev = pFree->prev;
456 pFree->prev->next = pFree->next;
457 /* Remove the subheap from the list */
458 while (pPrev && (pPrev->next != subheap)) pPrev = pPrev->next;
459 if (pPrev) pPrev->next = subheap->next;
460 /* Free the memory */
461 subheap->magic = 0;
462 if (!(flags & HEAP_NO_VALLOC))
463 {
464 ULONG dummySize = 0;
465 ZwFreeVirtualMemory(NtCurrentProcess(),
466 (PVOID*)&subheap,
467 &dummySize,
468 MEM_RELEASE);
469 }
470 return;
471 }
472
473 /* Decommit the end of the heap */
474 }
475
476
477 /***********************************************************************
478 * HEAP_ShrinkBlock
479 *
480 * Shrink an in-use block.
481 */
482 static void HEAP_ShrinkBlock(SUBHEAP *subheap, ARENA_INUSE *pArena, DWORD size)
483 {
484 if ((pArena->size & ARENA_SIZE_MASK) >= size + HEAP_MIN_BLOCK_SIZE)
485 {
486 HEAP_CreateFreeBlock( subheap, (char *)(pArena + 1) + size,
487 (pArena->size & ARENA_SIZE_MASK) - size );
488 pArena->size = (pArena->size & ~ARENA_SIZE_MASK) | size;
489 }
490 else
491 {
492 /* Turn off PREV_FREE flag in next block */
493 char *pNext = (char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK);
494 if (pNext < (char *)subheap + subheap->size)
495 *(DWORD *)pNext &= ~ARENA_FLAG_PREV_FREE;
496 }
497 }
498
499 /***********************************************************************
500 * HEAP_InitSubHeap
501 */
502 static BOOL HEAP_InitSubHeap( HEAP *heap, LPVOID address, DWORD flags,
503 DWORD commitSize, DWORD totalSize )
504 {
505 SUBHEAP *subheap = (SUBHEAP *)address;
506 WORD selector = 0;
507 FREE_LIST_ENTRY *pEntry;
508 int i;
509 NTSTATUS Status;
510
511 /* Commit memory */
512 if (!(flags & HEAP_NO_VALLOC))
513 {
514 Status = ZwAllocateVirtualMemory(NtCurrentProcess(),
515 &address,
516 0,
517 (PULONG)&commitSize,
518 MEM_COMMIT,
519 PAGE_EXECUTE_READWRITE);
520 if (!NT_SUCCESS(Status))
521 {
522 WARN("Could not commit %08lx bytes for sub-heap %08lx\n",
523 commitSize, (DWORD)address );
524 return FALSE;
525 }
526 }
527
528 /* Fill the sub-heap structure */
529
530 subheap->heap = heap;
531 subheap->selector = selector;
532 subheap->size = totalSize;
533 subheap->commitSize = commitSize;
534 subheap->magic = SUBHEAP_MAGIC;
535
536 if ( subheap != (SUBHEAP *)heap )
537 {
538 /* If this is a secondary subheap, insert it into list */
539
540 subheap->headerSize = sizeof(SUBHEAP);
541 subheap->next = heap->subheap.next;
542 heap->subheap.next = subheap;
543 }
544 else
545 {
546 /* If this is a primary subheap, initialize main heap */
547
548 subheap->headerSize = sizeof(HEAP);
549 subheap->next = NULL;
550 heap->next = NULL;
551 heap->flags = flags;
552 heap->magic = HEAP_MAGIC;
553
554 /* Build the free lists */
555
556 for (i = 0, pEntry = heap->freeList; i < HEAP_NB_FREE_LISTS; i++, pEntry++)
557 {
558 pEntry->size = HEAP_freeListSizes[i];
559 pEntry->arena.size = 0 | ARENA_FLAG_FREE;
560 pEntry->arena.next = i < HEAP_NB_FREE_LISTS-1 ?
561 &heap->freeList[i+1].arena : &heap->freeList[0].arena;
562 pEntry->arena.prev = i ? &heap->freeList[i-1].arena :
563 &heap->freeList[HEAP_NB_FREE_LISTS-1].arena;
564 pEntry->arena.threadId = 0;
565 pEntry->arena.magic = ARENA_FREE_MAGIC;
566 }
567
568 /* Initialize critical section */
569
570 RtlInitializeCriticalSection( &heap->critSection );
571 }
572
573 /* Create the first free block */
574
575 HEAP_CreateFreeBlock( subheap, (LPBYTE)subheap + subheap->headerSize,
576 subheap->size - subheap->headerSize );
577
578 return TRUE;
579 }
580
581 /***********************************************************************
582 * HEAP_CreateSubHeap
583 *
584 * Create a sub-heap of the given size.
585 * If heap == NULL, creates a main heap.
586 */
587 static SUBHEAP *HEAP_CreateSubHeap(PVOID BaseAddress,
588 HEAP *heap, DWORD flags,
589 DWORD commitSize, DWORD totalSize )
590 {
591 LPVOID address;
592 NTSTATUS Status;
593
594 /* Round-up sizes on a 64K boundary */
595
596 totalSize = (totalSize + 0xffff) & 0xffff0000;
597 commitSize = (commitSize + 0xffff) & 0xffff0000;
598 if (!commitSize) commitSize = 0x10000;
599 if (totalSize < commitSize) totalSize = commitSize;
600
601 /* Allocate the memory block */
602 address = BaseAddress;
603 if (!(flags & HEAP_NO_VALLOC))
604 {
605 Status = ZwAllocateVirtualMemory(NtCurrentProcess(),
606 &address,
607 0,
608 (PULONG)&totalSize,
609 MEM_RESERVE | MEM_COMMIT,
610 PAGE_EXECUTE_READWRITE);
611 if (!NT_SUCCESS(Status))
612 {
613 WARN("Could not VirtualAlloc %08lx bytes\n",
614 totalSize );
615 return NULL;
616 }
617 }
618
619 /* Initialize subheap */
620
621 if (!HEAP_InitSubHeap( heap? heap : (HEAP *)address,
622 address, flags, commitSize, totalSize ))
623 {
624 if (!(flags & HEAP_NO_VALLOC))
625 {
626 ULONG dummySize = 0;
627 ZwFreeVirtualMemory(NtCurrentProcess(),
628 address,
629 &dummySize,
630 MEM_RELEASE);
631 return NULL;
632 }
633 }
634
635 return (SUBHEAP *)address;
636 }
637
638
639 /***********************************************************************
640 * HEAP_FindFreeBlock
641 *
642 * Find a free block at least as large as the requested size, and make sure
643 * the requested size is committed.
644 */
645 static ARENA_FREE *HEAP_FindFreeBlock( HEAP *heap, DWORD size,
646 SUBHEAP **ppSubHeap )
647 {
648 SUBHEAP *subheap;
649 ARENA_FREE *pArena;
650 FREE_LIST_ENTRY *pEntry = heap->freeList;
651
652 /* Find a suitable free list, and in it find a block large enough */
653
654 while (pEntry->size < size) pEntry++;
655 pArena = pEntry->arena.next;
656 while (pArena != &heap->freeList[0].arena)
657 {
658 if (pArena->size > size)
659 {
660 subheap = HEAP_FindSubHeap( heap, pArena );
661 if (!HEAP_Commit( subheap, (char *)pArena + sizeof(ARENA_INUSE)
662 + size + HEAP_MIN_BLOCK_SIZE,
663 heap->flags))
664 return NULL;
665 *ppSubHeap = subheap;
666 return pArena;
667 }
668
669 pArena = pArena->next;
670 }
671
672 /* If no block was found, attempt to grow the heap */
673
674 if (!(heap->flags & HEAP_GROWABLE))
675 {
676 WARN("Not enough space in heap %08lx for %08lx bytes\n",
677 (DWORD)heap, size );
678 return NULL;
679 }
680 size += sizeof(SUBHEAP) + sizeof(ARENA_FREE);
681 if (!(subheap = HEAP_CreateSubHeap( NULL, heap, heap->flags, size,
682 max( HEAP_DEF_SIZE, size ) )))
683 return NULL;
684
685 TRACE("created new sub-heap %08lx of %08lx bytes for heap %08lx\n",
686 (DWORD)subheap, size, (DWORD)heap );
687
688 *ppSubHeap = subheap;
689 return (ARENA_FREE *)(subheap + 1);
690 }
691
692
693 /***********************************************************************
694 * HEAP_IsValidArenaPtr
695 *
696 * Check that the pointer is inside the range possible for arenas.
697 */
698 static BOOL HEAP_IsValidArenaPtr( HEAP *heap, void *ptr )
699 {
700 int i;
701 SUBHEAP *subheap = HEAP_FindSubHeap( heap, ptr );
702 if (!subheap) return FALSE;
703 if ((char *)ptr >= (char *)subheap + subheap->headerSize) return TRUE;
704 if (subheap != &heap->subheap) return FALSE;
705 for (i = 0; i < HEAP_NB_FREE_LISTS; i++)
706 if (ptr == (void *)&heap->freeList[i].arena) return TRUE;
707 return FALSE;
708 }
709
710
711 /***********************************************************************
712 * HEAP_ValidateFreeArena
713 */
714 static BOOL HEAP_ValidateFreeArena( SUBHEAP *subheap, ARENA_FREE *pArena )
715 {
716 char *heapEnd = (char *)subheap + subheap->size;
717
718 /* Check magic number */
719 if (pArena->magic != ARENA_FREE_MAGIC)
720 {
721 ERR("Heap %08lx: invalid free arena magic for %08lx\n",
722 (DWORD)subheap->heap, (DWORD)pArena );
723 return FALSE;
724 }
725 /* Check size flags */
726 if (!(pArena->size & ARENA_FLAG_FREE) ||
727 (pArena->size & ARENA_FLAG_PREV_FREE))
728 {
729 ERR("Heap %08lx: bad flags %lx for free arena %08lx\n",
730 (DWORD)subheap->heap, pArena->size & ~ARENA_SIZE_MASK, (DWORD)pArena );
731 }
732 /* Check arena size */
733 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) > heapEnd)
734 {
735 ERR("Heap %08lx: bad size %08lx for free arena %08lx\n",
736 (DWORD)subheap->heap, (DWORD)pArena->size & ARENA_SIZE_MASK, (DWORD)pArena );
737 return FALSE;
738 }
739 /* Check that next pointer is valid */
740 if (!HEAP_IsValidArenaPtr( subheap->heap, pArena->next ))
741 {
742 ERR("Heap %08lx: bad next ptr %08lx for arena %08lx\n",
743 (DWORD)subheap->heap, (DWORD)pArena->next, (DWORD)pArena );
744 return FALSE;
745 }
746 /* Check that next arena is free */
747 if (!(pArena->next->size & ARENA_FLAG_FREE) ||
748 (pArena->next->magic != ARENA_FREE_MAGIC))
749 {
750 ERR("Heap %08lx: next arena %08lx invalid for %08lx\n",
751 (DWORD)subheap->heap, (DWORD)pArena->next, (DWORD)pArena );
752 return FALSE;
753 }
754 /* Check that prev pointer is valid */
755 if (!HEAP_IsValidArenaPtr( subheap->heap, pArena->prev ))
756 {
757 ERR("Heap %08lx: bad prev ptr %08lx for arena %08lx\n",
758 (DWORD)subheap->heap, (DWORD)pArena->prev, (DWORD)pArena );
759 return FALSE;
760 }
761 /* Check that prev arena is free */
762 if (!(pArena->prev->size & ARENA_FLAG_FREE) ||
763 (pArena->prev->magic != ARENA_FREE_MAGIC))
764 {
765 ERR("Heap %08lx: prev arena %08lx invalid for %08lx\n",
766 (DWORD)subheap->heap, (DWORD)pArena->prev, (DWORD)pArena );
767 return FALSE;
768 }
769 /* Check that next block has PREV_FREE flag */
770 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) < heapEnd)
771 {
772 if (!(*(DWORD *)((char *)(pArena + 1) +
773 (pArena->size & ARENA_SIZE_MASK)) & ARENA_FLAG_PREV_FREE))
774 {
775 ERR("Heap %08lx: free arena %08lx next block has no PREV_FREE flag\n",
776 (DWORD)subheap->heap, (DWORD)pArena );
777 return FALSE;
778 }
779 /* Check next block back pointer */
780 if (*((ARENA_FREE **)((char *)(pArena + 1) +
781 (pArena->size & ARENA_SIZE_MASK)) - 1) != pArena)
782 {
783 ERR("Heap %08lx: arena %08lx has wrong back ptr %08lx\n",
784 (DWORD)subheap->heap, (DWORD)pArena,
785 *((DWORD *)((char *)(pArena+1)+ (pArena->size & ARENA_SIZE_MASK)) - 1));
786 return FALSE;
787 }
788 }
789 return TRUE;
790 }
791
792
793 /***********************************************************************
794 * HEAP_ValidateInUseArena
795 */
796 static BOOL HEAP_ValidateInUseArena( SUBHEAP *subheap, ARENA_INUSE *pArena, BOOL quiet )
797 {
798 char *heapEnd = (char *)subheap + subheap->size;
799
800 /* Check magic number */
801 if (pArena->magic != ARENA_INUSE_MAGIC)
802 {
803 if (quiet == NOISY) {
804 ERR("Heap %08lx: invalid in-use arena magic for %08lx\n",
805 (DWORD)subheap->heap, (DWORD)pArena );
806 if (TRACE_ON(heap))
807 HEAP_Dump( subheap->heap );
808 } else if (WARN_ON(heap)) {
809 WARN("Heap %08lx: invalid in-use arena magic for %08lx\n",
810 (DWORD)subheap->heap, (DWORD)pArena );
811 if (TRACE_ON(heap))
812 HEAP_Dump( subheap->heap );
813 }
814 return FALSE;
815 }
816 /* Check size flags */
817 if (pArena->size & ARENA_FLAG_FREE)
818 {
819 ERR("Heap %08lx: bad flags %lx for in-use arena %08lx\n",
820 (DWORD)subheap->heap, pArena->size & ~ARENA_SIZE_MASK, (DWORD)pArena );
821 }
822 /* Check arena size */
823 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) > heapEnd)
824 {
825 ERR("Heap %08lx: bad size %08lx for in-use arena %08lx\n",
826 (DWORD)subheap->heap, (DWORD)pArena->size & ARENA_SIZE_MASK, (DWORD)pArena );
827 return FALSE;
828 }
829 /* Check next arena PREV_FREE flag */
830 if (((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) < heapEnd) &&
831 (*(DWORD *)((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK)) & ARENA_FLAG_PREV_FREE))
832 {
833 ERR("Heap %08lx: in-use arena %08lx next block has PREV_FREE flag\n",
834 (DWORD)subheap->heap, (DWORD)pArena );
835 return FALSE;
836 }
837 /* Check prev free arena */
838 if (pArena->size & ARENA_FLAG_PREV_FREE)
839 {
840 ARENA_FREE *pPrev = *((ARENA_FREE **)pArena - 1);
841 /* Check prev pointer */
842 if (!HEAP_IsValidArenaPtr( subheap->heap, pPrev ))
843 {
844 ERR("Heap %08lx: bad back ptr %08lx for arena %08lx\n",
845 (DWORD)subheap->heap, (DWORD)pPrev, (DWORD)pArena );
846 return FALSE;
847 }
848 /* Check that prev arena is free */
849 if (!(pPrev->size & ARENA_FLAG_FREE) ||
850 (pPrev->magic != ARENA_FREE_MAGIC))
851 {
852 ERR("Heap %08lx: prev arena %08lx invalid for in-use %08lx\n",
853 (DWORD)subheap->heap, (DWORD)pPrev, (DWORD)pArena );
854 return FALSE;
855 }
856 /* Check that prev arena is really the previous block */
857 if ((char *)(pPrev + 1) + (pPrev->size & ARENA_SIZE_MASK) != (char *)pArena)
858 {
859 ERR("Heap %08lx: prev arena %08lx is not prev for in-use %08lx\n",
860 (DWORD)subheap->heap, (DWORD)pPrev, (DWORD)pArena );
861 return FALSE;
862 }
863 }
864 return TRUE;
865 }
866
867
868 /***********************************************************************
869 * HEAP_IsInsideHeap
870 * Checks whether the pointer points to a block inside a given heap.
871 *
872 * NOTES
873 * Should this return BOOL32?
874 *
875 * RETURNS
876 * !0: Success
877 * 0: Failure
878 */
879 int HEAP_IsInsideHeap(
880 HANDLE heap, /* [in] Heap */
881 DWORD flags, /* [in] Flags */
882 LPCVOID ptr /* [in] Pointer */
883 ) {
884 HEAP *heapPtr = HEAP_GetPtr( heap );
885 SUBHEAP *subheap;
886 int ret;
887
888 /* Validate the parameters */
889
890 if (!heapPtr) return 0;
891 flags |= heapPtr->flags;
892 if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterCriticalSection( &heapPtr->critSection );
893 ret = (((subheap = HEAP_FindSubHeap( heapPtr, ptr )) != NULL) &&
894 (((char *)ptr >= (char *)subheap + subheap->headerSize
895 + sizeof(ARENA_INUSE))));
896 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
897 return ret;
898 }
899
900
901
902
903 /***********************************************************************
904 * HEAP_IsRealArena [Internal]
905 * Validates a block is a valid arena.
906 *
907 * RETURNS
908 * TRUE: Success
909 * FALSE: Failure
910 */
911 static BOOL HEAP_IsRealArena(
912 HANDLE heap, /* [in] Handle to the heap */
913 DWORD flags, /* [in] Bit flags that control access during operation */
914 LPCVOID block, /* [in] Optional pointer to memory block to validate */
915 BOOL quiet /* [in] Flag - if true, HEAP_ValidateInUseArena
916 * does not complain */
917 ) {
918 SUBHEAP *subheap;
919 HEAP *heapPtr = (HEAP *)(heap);
920 BOOL ret = TRUE;
921
922 if (!heapPtr || (heapPtr->magic != HEAP_MAGIC))
923 {
924 ERR("Invalid heap %08x!\n", heap );
925 return FALSE;
926 }
927
928 flags &= HEAP_NO_SERIALIZE;
929 flags |= heapPtr->flags;
930 /* calling HeapLock may result in infinite recursion, so do the critsect directly */
931 if (!(flags & HEAP_NO_SERIALIZE))
932 RtlEnterCriticalSection( &heapPtr->critSection );
933
934 if (block)
935 {
936 /* Only check this single memory block */
937
938 /* The following code is really HEAP_IsInsideHeap *
939 * with serialization already done. */
940 if (!(subheap = HEAP_FindSubHeap( heapPtr, block )) ||
941 ((char *)block < (char *)subheap + subheap->headerSize
942 + sizeof(ARENA_INUSE)))
943 {
944 if (quiet == NOISY)
945 {
946 ERR("Heap %08lx: block %08lx is not inside heap\n",
947 (DWORD)heap, (DWORD)block );
948 }
949 else if (WARN_ON(heap))
950 {
951 WARN("Heap %08lx: block %08lx is not inside heap\n",
952 (DWORD)heap, (DWORD)block );
953 }
954 ret = FALSE;
955 } else
956 ret = HEAP_ValidateInUseArena( subheap, (ARENA_INUSE *)block - 1, quiet );
957
958 if (!(flags & HEAP_NO_SERIALIZE))
959 RtlLeaveCriticalSection( &heapPtr->critSection );
960 return ret;
961 }
962
963 subheap = &heapPtr->subheap;
964 while (subheap && ret)
965 {
966 char *ptr = (char *)subheap + subheap->headerSize;
967 while (ptr < (char *)subheap + subheap->size)
968 {
969 if (*(DWORD *)ptr & ARENA_FLAG_FREE)
970 {
971 if (!HEAP_ValidateFreeArena( subheap, (ARENA_FREE *)ptr )) {
972 ret = FALSE;
973 break;
974 }
975 ptr += sizeof(ARENA_FREE) + (*(DWORD *)ptr & ARENA_SIZE_MASK);
976 }
977 else
978 {
979 if (!HEAP_ValidateInUseArena( subheap, (ARENA_INUSE *)ptr, NOISY )) {
980 ret = FALSE;
981 break;
982 }
983 ptr += sizeof(ARENA_INUSE) + (*(DWORD *)ptr & ARENA_SIZE_MASK);
984 }
985 }
986 subheap = subheap->next;
987 }
988
989 if (!(flags & HEAP_NO_SERIALIZE))
990 RtlLeaveCriticalSection( &heapPtr->critSection );
991 return ret;
992 }
993
994
995 /***********************************************************************
996 * HeapCreate (KERNEL32.336)
997 * RETURNS
998 * Handle of heap: Success
999 * NULL: Failure
1000 */
1001 HANDLE STDCALL
1002 RtlCreateHeap(ULONG flags,
1003 PVOID BaseAddress,
1004 ULONG initialSize,
1005 ULONG maxSize,
1006 PVOID Unknown,
1007 PRTL_HEAP_DEFINITION Definition)
1008 {
1009 SUBHEAP *subheap;
1010 ULONG i;
1011
1012 /* Allocate the heap block */
1013
1014 if (!maxSize)
1015 {
1016 maxSize = HEAP_DEF_SIZE;
1017 flags |= HEAP_GROWABLE;
1018 }
1019 if (!(subheap = HEAP_CreateSubHeap( BaseAddress, NULL, flags, initialSize, maxSize )))
1020 {
1021 return 0;
1022 }
1023
1024 RtlEnterCriticalSection (&RtlpProcessHeapsListLock);
1025 for (i = 0; i < NtCurrentPeb ()->NumberOfHeaps; i++)
1026 {
1027 if (NtCurrentPeb ()->ProcessHeaps[i] == NULL)
1028 {
1029 NtCurrentPeb()->ProcessHeaps[i] = (PVOID)subheap;
1030 break;
1031 }
1032 }
1033 RtlLeaveCriticalSection (&RtlpProcessHeapsListLock);
1034
1035 return (HANDLE)subheap;
1036 }
1037
1038 /***********************************************************************
1039 * HeapDestroy (KERNEL32.337)
1040 * RETURNS
1041 * TRUE: Success
1042 * FALSE: Failure
1043 */
1044 BOOL STDCALL
1045 RtlDestroyHeap(HANDLE heap) /* [in] Handle of heap */
1046 {
1047 HEAP *heapPtr = HEAP_GetPtr( heap );
1048 SUBHEAP *subheap;
1049 ULONG i, flags;
1050
1051 TRACE("%08x\n", heap );
1052 if (!heapPtr) return FALSE;
1053
1054 RtlEnterCriticalSection (&RtlpProcessHeapsListLock);
1055 for (i = 0; i < NtCurrentPeb ()->NumberOfHeaps; i++)
1056 {
1057 if (NtCurrentPeb ()->ProcessHeaps[i] == heap)
1058 {
1059 NtCurrentPeb()->ProcessHeaps[i] = NULL;
1060 break;
1061 }
1062 }
1063 RtlLeaveCriticalSection (&RtlpProcessHeapsListLock);
1064
1065 RtlDeleteCriticalSection( &heapPtr->critSection );
1066 subheap = &heapPtr->subheap;
1067 // We must save the flags. The first subheap is located after
1068 // the heap structure. If we release the first subheap,
1069 // we release also the heap structure.
1070 flags = heapPtr->flags;
1071 while (subheap)
1072 {
1073 SUBHEAP *next = subheap->next;
1074
1075 if (!(flags & HEAP_NO_VALLOC))
1076 {
1077 ULONG dummySize = 0;
1078 ZwFreeVirtualMemory(NtCurrentProcess(),
1079 (PVOID*)&subheap,
1080 &dummySize,
1081 MEM_RELEASE);
1082 }
1083 subheap = next;
1084 }
1085 return TRUE;
1086 }
1087
1088
1089 /***********************************************************************
1090 * HeapAlloc (KERNEL32.334)
1091 * RETURNS
1092 * Pointer to allocated memory block
1093 * NULL: Failure
1094 */
1095 PVOID STDCALL
1096 RtlAllocateHeap(HANDLE heap, /* [in] Handle of private heap block */
1097 ULONG flags, /* [in] Heap allocation control flags */
1098 ULONG size) /* [in] Number of bytes to allocate */
1099 {
1100 ARENA_FREE *pArena;
1101 ARENA_INUSE *pInUse;
1102 SUBHEAP *subheap;
1103 HEAP *heapPtr = HEAP_GetPtr( heap );
1104
1105 /* Validate the parameters */
1106
1107 if (!heapPtr) return NULL;
1108 flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY;
1109 flags |= heapPtr->flags;
1110 if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterCriticalSection( &heapPtr->critSection );
1111 size = (size + 3) & ~3;
1112 if (size < HEAP_MIN_BLOCK_SIZE) size = HEAP_MIN_BLOCK_SIZE;
1113
1114 /* Locate a suitable free block */
1115
1116 if (!(pArena = HEAP_FindFreeBlock( heapPtr, size, &subheap )))
1117 {
1118 TRACE("(%08x,%08lx,%08lx): returning NULL\n",
1119 heap, flags, size );
1120 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
1121 return NULL;
1122 }
1123
1124 /* Remove the arena from the free list */
1125
1126 pArena->next->prev = pArena->prev;
1127 pArena->prev->next = pArena->next;
1128
1129 /* Build the in-use arena */
1130
1131 pInUse = (ARENA_INUSE *)pArena;
1132 pInUse->size = (pInUse->size & ~ARENA_FLAG_FREE)
1133 + sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
1134 pInUse->callerEIP = GET_EIP();
1135 pInUse->threadId = (DWORD)NtCurrentTeb()->Cid.UniqueThread;
1136 pInUse->magic = ARENA_INUSE_MAGIC;
1137
1138 /* Shrink the block */
1139
1140 HEAP_ShrinkBlock( subheap, pInUse, size );
1141
1142 if (flags & HEAP_ZERO_MEMORY)
1143 memset( pInUse + 1, 0, pInUse->size & ARENA_SIZE_MASK );
1144 else if (TRACE_ON(heap))
1145 memset( pInUse + 1, ARENA_INUSE_FILLER, pInUse->size & ARENA_SIZE_MASK );
1146
1147 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
1148
1149 TRACE("(%08x,%08lx,%08lx): returning %08lx\n",
1150 heap, flags, size, (DWORD)(pInUse + 1) );
1151 return (LPVOID)(pInUse + 1);
1152 }
1153
1154
1155 /***********************************************************************
1156 * HeapFree (KERNEL32.338)
1157 * RETURNS
1158 * TRUE: Success
1159 * FALSE: Failure
1160 */
1161 BOOLEAN STDCALL RtlFreeHeap(
1162 HANDLE heap, /* [in] Handle of heap */
1163 ULONG flags, /* [in] Heap freeing flags */
1164 PVOID ptr /* [in] Address of memory to free */
1165 ) {
1166 ARENA_INUSE *pInUse;
1167 SUBHEAP *subheap;
1168 HEAP *heapPtr = HEAP_GetPtr( heap );
1169
1170 /* Validate the parameters */
1171
1172 if (!heapPtr) return FALSE;
1173 if (!ptr) /* Freeing a NULL ptr is doesn't indicate an error in Win2k */
1174 {
1175 WARN("(%08x,%08lx,%08lx): asked to free NULL\n",
1176 heap, flags, (DWORD)ptr );
1177 return TRUE;
1178 }
1179
1180 flags &= HEAP_NO_SERIALIZE;
1181 flags |= heapPtr->flags;
1182 if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterCriticalSection( &heapPtr->critSection );
1183 if (!HEAP_IsRealArena( heap, HEAP_NO_SERIALIZE, ptr, QUIET ))
1184 {
1185 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
1186 TRACE("(%08x,%08lx,%08lx): returning FALSE\n",
1187 heap, flags, (DWORD)ptr );
1188 return FALSE;
1189 }
1190
1191 /* Turn the block into a free block */
1192
1193 pInUse = (ARENA_INUSE *)ptr - 1;
1194 subheap = HEAP_FindSubHeap( heapPtr, pInUse );
1195 HEAP_MakeInUseBlockFree( subheap, pInUse, heapPtr->flags );
1196
1197 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
1198
1199 TRACE("(%08x,%08lx,%08lx): returning TRUE\n",
1200 heap, flags, (DWORD)ptr );
1201 return TRUE;
1202 }
1203
1204
1205 /***********************************************************************
1206 * HeapReAlloc (KERNEL32.340)
1207 * RETURNS
1208 * Pointer to reallocated memory block
1209 * NULL: Failure
1210 */
1211 LPVOID STDCALL RtlReAllocateHeap(
1212 HANDLE heap, /* [in] Handle of heap block */
1213 DWORD flags, /* [in] Heap reallocation flags */
1214 LPVOID ptr, /* [in] Address of memory to reallocate */
1215 DWORD size /* [in] Number of bytes to reallocate */
1216 ) {
1217 ARENA_INUSE *pArena;
1218 DWORD oldSize;
1219 HEAP *heapPtr;
1220 SUBHEAP *subheap;
1221
1222 if (!ptr) return RtlAllocateHeap( heap, flags, size ); /* FIXME: correct? */
1223 if (!(heapPtr = HEAP_GetPtr( heap ))) return FALSE;
1224
1225 /* Validate the parameters */
1226
1227 flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY |
1228 HEAP_REALLOC_IN_PLACE_ONLY;
1229 flags |= heapPtr->flags;
1230 size = (size + 3) & ~3;
1231 if (size < HEAP_MIN_BLOCK_SIZE) size = HEAP_MIN_BLOCK_SIZE;
1232
1233 if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterCriticalSection( &heapPtr->critSection );
1234 if (!HEAP_IsRealArena( heap, HEAP_NO_SERIALIZE, ptr, QUIET ))
1235 {
1236 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
1237 TRACE("(%08x,%08lx,%08lx,%08lx): returning NULL\n",
1238 heap, flags, (DWORD)ptr, size );
1239 return NULL;
1240 }
1241
1242 /* Check if we need to grow the block */
1243
1244 pArena = (ARENA_INUSE *)ptr - 1;
1245 pArena->threadId = (DWORD)NtCurrentTeb()->Cid.UniqueThread;
1246
1247 subheap = HEAP_FindSubHeap( heapPtr, pArena );
1248 oldSize = (pArena->size & ARENA_SIZE_MASK);
1249 if (size > oldSize)
1250 {
1251 char *pNext = (char *)(pArena + 1) + oldSize;
1252 if ((pNext < (char *)subheap + subheap->size) &&
1253 (*(DWORD *)pNext & ARENA_FLAG_FREE) &&
1254 (oldSize + (*(DWORD *)pNext & ARENA_SIZE_MASK) + sizeof(ARENA_FREE) >= size))
1255 {
1256 /* The next block is free and large enough */
1257 ARENA_FREE *pFree = (ARENA_FREE *)pNext;
1258 pFree->next->prev = pFree->prev;
1259 pFree->prev->next = pFree->next;
1260 pArena->size += (pFree->size & ARENA_SIZE_MASK) + sizeof(*pFree);
1261 if (!HEAP_Commit( subheap, (char *)pArena + sizeof(ARENA_INUSE)
1262 + size + HEAP_MIN_BLOCK_SIZE,
1263 heapPtr->flags))
1264 {
1265 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
1266 return NULL;
1267 }
1268 HEAP_ShrinkBlock( subheap, pArena, size );
1269 }
1270 else /* Do it the hard way */
1271 {
1272 ARENA_FREE *pNew;
1273 ARENA_INUSE *pInUse;
1274 SUBHEAP *newsubheap;
1275
1276 if ((flags & HEAP_REALLOC_IN_PLACE_ONLY) ||
1277 !(pNew = HEAP_FindFreeBlock( heapPtr, size, &newsubheap )))
1278 {
1279 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
1280 return NULL;
1281 }
1282
1283 /* Build the in-use arena */
1284
1285 pNew->next->prev = pNew->prev;
1286 pNew->prev->next = pNew->next;
1287 pInUse = (ARENA_INUSE *)pNew;
1288 pInUse->size = (pInUse->size & ~ARENA_FLAG_FREE)
1289 + sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
1290 pInUse->threadId = (DWORD)NtCurrentTeb()->Cid.UniqueThread;
1291 pInUse->magic = ARENA_INUSE_MAGIC;
1292 HEAP_ShrinkBlock( newsubheap, pInUse, size );
1293 memcpy( pInUse + 1, pArena + 1, oldSize );
1294
1295 /* Free the previous block */
1296
1297 HEAP_MakeInUseBlockFree( subheap, pArena, flags );
1298 subheap = newsubheap;
1299 pArena = pInUse;
1300 }
1301 }
1302 else HEAP_ShrinkBlock( subheap, pArena, size ); /* Shrink the block */
1303
1304 /* Clear the extra bytes if needed */
1305
1306 if (size > oldSize)
1307 {
1308 if (flags & HEAP_ZERO_MEMORY)
1309 memset( (char *)(pArena + 1) + oldSize, 0,
1310 (pArena->size & ARENA_SIZE_MASK) - oldSize );
1311 else if (TRACE_ON(heap))
1312 memset( (char *)(pArena + 1) + oldSize, ARENA_INUSE_FILLER,
1313 (pArena->size & ARENA_SIZE_MASK) - oldSize );
1314 }
1315
1316 /* Return the new arena */
1317
1318 pArena->callerEIP = GET_EIP();
1319 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
1320
1321 TRACE("(%08x,%08lx,%08lx,%08lx): returning %08lx\n",
1322 heap, flags, (DWORD)ptr, size, (DWORD)(pArena + 1) );
1323 return (LPVOID)(pArena + 1);
1324 }
1325
1326
1327 /***********************************************************************
1328 * HeapCompact (KERNEL32.335)
1329 */
1330 DWORD STDCALL RtlCompactHeap( HANDLE heap, DWORD flags )
1331 {
1332 SetLastError(ERROR_CALL_NOT_IMPLEMENTED);
1333 return 0;
1334 }
1335
1336
1337 /***********************************************************************
1338 * HeapLock (KERNEL32.339)
1339 * Attempts to acquire the critical section object for a specified heap.
1340 *
1341 * RETURNS
1342 * TRUE: Success
1343 * FALSE: Failure
1344 */
1345 BOOL STDCALL RtlLockHeap(
1346 HANDLE heap /* [in] Handle of heap to lock for exclusive access */
1347 ) {
1348 HEAP *heapPtr = HEAP_GetPtr( heap );
1349 if (!heapPtr) return FALSE;
1350 RtlEnterCriticalSection( &heapPtr->critSection );
1351 return TRUE;
1352 }
1353
1354
1355 /***********************************************************************
1356 * HeapUnlock (KERNEL32.342)
1357 * Releases ownership of the critical section object.
1358 *
1359 * RETURNS
1360 * TRUE: Success
1361 * FALSE: Failure
1362 */
1363 BOOL STDCALL RtlUnlockHeap(
1364 HANDLE heap /* [in] Handle to the heap to unlock */
1365 ) {
1366 HEAP *heapPtr = HEAP_GetPtr( heap );
1367 if (!heapPtr) return FALSE;
1368 RtlLeaveCriticalSection( &heapPtr->critSection );
1369 return TRUE;
1370 }
1371
1372
1373 /***********************************************************************
1374 * HeapSize (KERNEL32.341)
1375 * RETURNS
1376 * Size in bytes of allocated memory
1377 * 0xffffffff: Failure
1378 */
1379 DWORD STDCALL RtlSizeHeap(
1380 HANDLE heap, /* [in] Handle of heap */
1381 DWORD flags, /* [in] Heap size control flags */
1382 LPVOID ptr /* [in] Address of memory to return size for */
1383 ) {
1384 DWORD ret;
1385 HEAP *heapPtr = HEAP_GetPtr( heap );
1386
1387 if (!heapPtr) return FALSE;
1388 flags &= HEAP_NO_SERIALIZE;
1389 flags |= heapPtr->flags;
1390 if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterCriticalSection( &heapPtr->critSection );
1391 if (!HEAP_IsRealArena( heap, HEAP_NO_SERIALIZE, ptr, QUIET ))
1392 {
1393 SetLastError( ERROR_INVALID_PARAMETER );
1394 ret = 0xffffffff;
1395 }
1396 else
1397 {
1398 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr - 1;
1399 ret = pArena->size & ARENA_SIZE_MASK;
1400 }
1401 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
1402
1403 TRACE("(%08x,%08lx,%08lx): returning %08lx\n",
1404 heap, flags, (DWORD)ptr, ret );
1405 return ret;
1406 }
1407
1408
1409 /***********************************************************************
1410 * HeapValidate (KERNEL32.343)
1411 * Validates a specified heap.
1412 *
1413 * NOTES
1414 * Flags is ignored.
1415 *
1416 * RETURNS
1417 * TRUE: Success
1418 * FALSE: Failure
1419 */
1420 BOOL STDCALL RtlValidateHeap(
1421 HANDLE heap, /* [in] Handle to the heap */
1422 DWORD flags, /* [in] Bit flags that control access during operation */
1423 PVOID block /* [in] Optional pointer to memory block to validate */
1424 ) {
1425
1426 return HEAP_IsRealArena( heap, flags, block, QUIET );
1427 }
1428
1429
1430 /***********************************************************************
1431 * HeapWalk (KERNEL32.344)
1432 * Enumerates the memory blocks in a specified heap.
1433 * See HEAP_Dump() for info on heap structure.
1434 *
1435 * TODO
1436 * - handling of PROCESS_HEAP_ENTRY_MOVEABLE and
1437 * PROCESS_HEAP_ENTRY_DDESHARE (needs heap.c support)
1438 *
1439 * RETURNS
1440 * TRUE: Success
1441 * FALSE: Failure
1442 */
1443 #if 0
1444 BOOL STDCALL HeapWalk(
1445 HANDLE heap, /* [in] Handle to heap to enumerate */
1446 LPPROCESS_HEAP_ENTRY entry /* [out] Pointer to structure of enumeration info */
1447 ) {
1448 HEAP *heapPtr = HEAP_GetPtr(heap);
1449 SUBHEAP *sub, *currentheap = NULL;
1450 BOOL ret = FALSE;
1451 char *ptr;
1452 int region_index = 0;
1453
1454 if (!heapPtr || !entry)
1455 {
1456 SetLastError(ERROR_INVALID_PARAMETER);
1457 return FALSE;
1458 }
1459
1460 if (!(heapPtr->flags & HEAP_NO_SERIALIZE)) RtlEnterCriticalSection( &heapPtr->critSection );
1461
1462 /* set ptr to the next arena to be examined */
1463
1464 if (!entry->lpData) /* first call (init) ? */
1465 {
1466 TRACE("begin walking of heap 0x%08x.\n", heap);
1467 /*HEAP_Dump(heapPtr);*/
1468 currentheap = &heapPtr->subheap;
1469 ptr = (char*)currentheap + currentheap->headerSize;
1470 }
1471 else
1472 {
1473 ptr = entry->lpData;
1474 sub = &heapPtr->subheap;
1475 while (sub)
1476 {
1477 if (((char *)ptr >= (char *)sub) &&
1478 ((char *)ptr < (char *)sub + sub->size))
1479 {
1480 currentheap = sub;
1481 break;
1482 }
1483 sub = sub->next;
1484 region_index++;
1485 }
1486 if (currentheap == NULL)
1487 {
1488 ERR("no matching subheap found, shouldn't happen !\n");
1489 SetLastError(ERROR_NO_MORE_ITEMS);
1490 goto HW_end;
1491 }
1492
1493 ptr += entry->cbData; /* point to next arena */
1494 if (ptr > (char *)currentheap + currentheap->size - 1)
1495 { /* proceed with next subheap */
1496 if (!(currentheap = currentheap->next))
1497 { /* successfully finished */
1498 TRACE("end reached.\n");
1499 SetLastError(ERROR_NO_MORE_ITEMS);
1500 goto HW_end;
1501 }
1502 ptr = (char*)currentheap + currentheap->headerSize;
1503 }
1504 }
1505
1506 entry->wFlags = 0;
1507 if (*(DWORD *)ptr & ARENA_FLAG_FREE)
1508 {
1509 ARENA_FREE *pArena = (ARENA_FREE *)ptr;
1510
1511 /*TRACE("free, magic: %04x\n", pArena->magic);*/
1512
1513 entry->lpData = pArena + 1;
1514 entry->cbData = pArena->size & ARENA_SIZE_MASK;
1515 entry->cbOverhead = sizeof(ARENA_FREE);
1516 entry->wFlags = PROCESS_HEAP_UNCOMMITTED_RANGE;
1517 }
1518 else
1519 {
1520 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
1521
1522 /*TRACE("busy, magic: %04x\n", pArena->magic);*/
1523
1524 entry->lpData = pArena + 1;
1525 entry->cbData = pArena->size & ARENA_SIZE_MASK;
1526 entry->cbOverhead = sizeof(ARENA_INUSE);
1527 entry->wFlags = PROCESS_HEAP_ENTRY_BUSY;
1528 /* FIXME: can't handle PROCESS_HEAP_ENTRY_MOVEABLE
1529 and PROCESS_HEAP_ENTRY_DDESHARE yet */
1530 }
1531
1532 entry->iRegionIndex = region_index;
1533
1534 /* first element of heap ? */
1535 if (ptr == (char *)(currentheap + currentheap->headerSize))
1536 {
1537 entry->wFlags |= PROCESS_HEAP_REGION;
1538 entry->Foo.Region.dwCommittedSize = currentheap->commitSize;
1539 entry->Foo.Region.dwUnCommittedSize =
1540 currentheap->size - currentheap->commitSize;
1541 entry->Foo.Region.lpFirstBlock = /* first valid block */
1542 currentheap + currentheap->headerSize;
1543 entry->Foo.Region.lpLastBlock = /* first invalid block */
1544 currentheap + currentheap->size;
1545 }
1546 ret = TRUE;
1547
1548 HW_end:
1549 if (!(heapPtr->flags & HEAP_NO_SERIALIZE)) RtlLeaveCriticalSection( &heapPtr->critSection );
1550
1551 return ret;
1552 }
1553 #endif
1554
1555 VOID
1556 RtlInitializeHeapManager(VOID)
1557 {
1558 PPEB Peb;
1559
1560 Peb = NtCurrentPeb();
1561
1562 Peb->NumberOfHeaps = 0;
1563 Peb->MaximumNumberOfHeaps = (PAGE_SIZE - sizeof(PEB)) / sizeof(HANDLE);
1564 Peb->ProcessHeaps = (PVOID)Peb + sizeof(PEB);
1565
1566 RtlInitializeCriticalSection(&RtlpProcessHeapsListLock);
1567 }
1568
1569
1570 NTSTATUS STDCALL
1571 RtlEnumProcessHeaps(DWORD STDCALL(*func)(void*,LONG),
1572 LONG lParam)
1573 {
1574 NTSTATUS Status = STATUS_SUCCESS;
1575 ULONG i;
1576
1577 RtlEnterCriticalSection(&RtlpProcessHeapsListLock);
1578
1579 for (i = 0; i < NtCurrentPeb()->NumberOfHeaps; i++)
1580 {
1581 Status = func(NtCurrentPeb()->ProcessHeaps[i],lParam);
1582 if (!NT_SUCCESS(Status))
1583 break;
1584 }
1585
1586 RtlLeaveCriticalSection(&RtlpProcessHeapsListLock);
1587
1588 return Status;
1589 }
1590
1591
1592 ULONG STDCALL
1593 RtlGetProcessHeaps(ULONG HeapCount,
1594 HANDLE *HeapArray)
1595 {
1596 ULONG Result = 0;
1597
1598 RtlEnterCriticalSection(&RtlpProcessHeapsListLock);
1599
1600 if (NtCurrentPeb()->NumberOfHeaps <= HeapCount)
1601 {
1602 Result = NtCurrentPeb()->NumberOfHeaps;
1603 memmove(HeapArray,
1604 NtCurrentPeb()->ProcessHeaps,
1605 Result * sizeof(HANDLE));
1606 }
1607
1608 RtlLeaveCriticalSection (&RtlpProcessHeapsListLock);
1609
1610 return Result;
1611 }
1612
1613
1614 BOOLEAN STDCALL
1615 RtlValidateProcessHeaps(VOID)
1616 {
1617 HANDLE Heaps[128];
1618 BOOLEAN Result = TRUE;
1619 ULONG HeapCount;
1620 ULONG i;
1621
1622 HeapCount = RtlGetProcessHeaps(128, Heaps);
1623 for (i = 0; i < HeapCount; i++)
1624 {
1625 if (!RtlValidateHeap(Heaps[i], 0, NULL))
1626 Result = FALSE;
1627 }
1628
1629 return Result;
1630 }
1631
1632 /* EOF */