[HEAP]
[reactos.git] / reactos / lib / rtl / heap.c
1 /* COPYRIGHT: See COPYING in the top level directory
2 * PROJECT: ReactOS system libraries
3 * FILE: lib/rtl/image.c
4 * PURPOSE: Image handling functions
5 * PROGRAMMERS: Copyright 1996 Alexandre Julliard
6 * Copyright 1998 Ulrich Weigand
7 */
8
9 //
10 // Note: This is a slightly modified implementation of WINE's.
11 //
12 // WINE's implementation is a hack based on Windows 95's heap implementation,
13 // itself a hack of DOS memory management.It supports 3 out of the 18 possible
14 // NT Heap Flags, does not support custom allocation/deallocation routines,
15 // and is about 50-80x slower with fragmentation rates up to 500x higher when
16 // compared to NT's LFH. WINE is lucky because the advanced NT Heap features are
17 // used in kernel-mode usually, not in user-mode, and they are crossing their
18 // fingers for this being the same. Note that several high-end SQL/Database
19 // applications would significantly benefit from custom heap features provided
20 // by NT.
21 //
22 // ROS's changes include:
23 // - Using Zw instead of Nt calls, because this matters when in Kernel Mode
24 // - Not using per-process heap lists while in Kernel Mode
25 // - Using a macro to handle the Critical Section, because it's meaningless
26 // in Kernel Mode.
27 // - Crappy support for a custom Commit routine.
28 // - Crappy support for User-defined flags and the User-defined value.
29 // - Ripping out all the code for shared heaps, because those don't exist on NT.
30 //
31 // Be aware of these changes when you try to sync something back.
32 //
33
34 /* INCLUDES *****************************************************************/
35
36 #include <rtl.h>
37 #undef LIST_FOR_EACH
38 #undef LIST_FOR_EACH_SAFE
39 #include <wine/list.h>
40
41 #define NDEBUG
42 #include <debug.h>
43
44 #define TRACE DPRINT
45 #define WARN DPRINT1
46 #define ERR DPRINT1
47 #define DPRINTF DPRINT
48
49 /* FUNCTIONS *****************************************************************/
50
51 #define WARN_ON(x) (1)
52
53 #ifdef NDEBUG
54 #define TRACE_ON(x) (0)
55 #else
56 #define TRACE_ON(x) (1)
57 #endif
58
59 /* Note: the heap data structures are based on what Pietrek describes in his
60 * book 'Windows 95 System Programming Secrets'. The layout is not exactly
61 * the same, but could be easily adapted if it turns out some programs
62 * require it.
63 */
64
65 /* FIXME: use SIZE_T for 'size' structure members, but we need to make sure
66 * that there is no unaligned accesses to structure fields.
67 */
68
69 typedef struct tagARENA_INUSE
70 {
71 SIZE_T size; /* Block size; must be the first field */
72 DWORD magic : 23; /* Magic number */
73 DWORD has_user_data : 1; /* There is user data associated with this block */
74 DWORD unused_bytes : 8; /* Number of bytes in the block not used by user data (max value is HEAP_MIN_DATA_SIZE+HEAP_MIN_SHRINK_SIZE) */
75 } ARENA_INUSE;
76
77 typedef struct tagARENA_FREE
78 {
79 SIZE_T size; /* Block size; must be the first field */
80 DWORD magic; /* Magic number */
81 struct list entry; /* Entry in free list */
82 } ARENA_FREE;
83
84 #define ARENA_FLAG_FREE 0x00000001 /* flags OR'ed with arena size */
85 #define ARENA_FLAG_PREV_FREE 0x00000002
86 #define ARENA_INUSE_MAGIC 0x455355 /* Value for arena 'magic' field */
87 #define ARENA_FREE_MAGIC 0x45455246 /* Value for arena 'magic' field */
88
89 #ifndef _WIN64
90 #define ARENA_SIZE_MASK (~3L)
91 #else
92 #define ARENA_SIZE_MASK (~7L)
93 #endif
94
95 #define ARENA_INUSE_FILLER 0x55
96 #define ARENA_FREE_FILLER 0xaa
97
98 /* everything is aligned on 8 byte boundaries (16 for Win64)*/
99 #define ALIGNMENT (2*sizeof(void*))
100 #define ARENA_OFFSET (ALIGNMENT - sizeof(ARENA_INUSE))
101
102 #define ROUND_SIZE(size) ((((size) + ALIGNMENT - 1) & ~(ALIGNMENT-1)) + ARENA_OFFSET)
103
104
105 #define QUIET 1 /* Suppress messages */
106 #define NOISY 0 /* Report all errors */
107
108 /* minimum data size (without arenas) of an allocated block */
109 #define HEAP_MIN_DATA_SIZE ROUND_SIZE(2 * sizeof(struct list))
110 /* minimum size that must remain to shrink an allocated block */
111 #define HEAP_MIN_SHRINK_SIZE (HEAP_MIN_DATA_SIZE+sizeof(ARENA_FREE))
112
113 #define HEAP_NB_FREE_LISTS 5 /* Number of free lists */
114
115 /* Max size of the blocks on the free lists */
116 static const DWORD HEAP_freeListSizes[HEAP_NB_FREE_LISTS] =
117 {
118 0x10, 0x20, 0x80, 0x200, MAXULONG
119 };
120
121 typedef union
122 {
123 ARENA_FREE arena;
124 void *aligment[4];
125 } FREE_LIST_ENTRY;
126
127 struct tagHEAP;
128
129 typedef struct tagSUBHEAP
130 {
131 SIZE_T size; /* Size of the whole sub-heap */
132 SIZE_T commitSize; /* Committed size of the sub-heap */
133 DWORD headerSize; /* Size of the heap header */
134 struct tagSUBHEAP *next; /* Next sub-heap */
135 struct tagHEAP *heap; /* Main heap structure */
136 DWORD magic; /* Magic number */
137 } SUBHEAP;
138
139 #define SUBHEAP_MAGIC ((DWORD)('S' | ('U'<<8) | ('B'<<16) | ('H'<<24)))
140
141 typedef struct tagHEAP_USER_DATA
142 {
143 LIST_ENTRY ListEntry;
144 PVOID BaseAddress;
145 ULONG UserFlags;
146 PVOID UserValue;
147 } HEAP_USER_DATA, *PHEAP_USER_DATA;
148
149 typedef struct tagHEAP
150 {
151 SUBHEAP subheap; /* First sub-heap */
152 struct list entry; /* Entry in process heap list */
153 HEAP_LOCK lock; /* Critical section for serialization */
154 DECLSPEC_ALIGN(8) FREE_LIST_ENTRY freeList[HEAP_NB_FREE_LISTS]; /* Free lists */
155 DWORD flags; /* Heap flags */
156 DWORD magic; /* Magic number */
157 PRTL_HEAP_COMMIT_ROUTINE commitRoutine;
158 LIST_ENTRY UserDataHead;
159 } HEAP;
160
161 #define HEAP_MAGIC ((DWORD)('H' | ('E'<<8) | ('A'<<16) | ('P'<<24)))
162
163 #define HEAP_DEF_SIZE 0x110000 /* Default heap size = 1Mb + 64Kb */
164 #define COMMIT_MASK 0xffff /* bitmask for commit/decommit granularity */
165
166 static HEAP *processHeap; /* main process heap */
167
168 static BOOL HEAP_IsRealArena( HEAP *heapPtr, DWORD flags, LPCVOID block, BOOL quiet );
169
170 /* mark a block of memory as free for debugging purposes */
171 static __inline void mark_block_free( void *ptr, SIZE_T size )
172 {
173 if (TRACE_ON(heap)) memset( ptr, ARENA_FREE_FILLER, size );
174 #ifdef VALGRIND_MAKE_NOACCESS
175 VALGRIND_DISCARD( VALGRIND_MAKE_NOACCESS( ptr, size ));
176 #endif
177 }
178
179 /* mark a block of memory as initialized for debugging purposes */
180 static __inline void mark_block_initialized( void *ptr, SIZE_T size )
181 {
182 #ifdef VALGRIND_MAKE_READABLE
183 VALGRIND_DISCARD( VALGRIND_MAKE_READABLE( ptr, size ));
184 #endif
185 }
186
187 /* mark a block of memory as uninitialized for debugging purposes */
188 static __inline void mark_block_uninitialized( void *ptr, SIZE_T size )
189 {
190 #ifdef VALGRIND_MAKE_WRITABLE
191 VALGRIND_DISCARD( VALGRIND_MAKE_WRITABLE( ptr, size ));
192 #endif
193 if (TRACE_ON(heap))
194 {
195 memset( ptr, ARENA_INUSE_FILLER, size );
196 #ifdef VALGRIND_MAKE_WRITABLE
197 /* make it uninitialized to valgrind again */
198 VALGRIND_DISCARD( VALGRIND_MAKE_WRITABLE( ptr, size ));
199 #endif
200 }
201 }
202
203 /* clear contents of a block of memory */
204 static __inline void clear_block( void *ptr, SIZE_T size )
205 {
206 mark_block_initialized( ptr, size );
207 memset( ptr, 0, size );
208 }
209
210 /* locate a free list entry of the appropriate size */
211 /* size is the size of the whole block including the arena header */
212 static __inline unsigned int get_freelist_index( SIZE_T size )
213 {
214 unsigned int i;
215
216 size -= sizeof(ARENA_FREE);
217 for (i = 0; i < HEAP_NB_FREE_LISTS - 1; i++) if (size <= HEAP_freeListSizes[i]) break;
218 return i;
219 }
220
221 /* get the memory protection type to use for a given heap */
222 static inline ULONG get_protection_type( DWORD flags )
223 {
224 return (flags & HEAP_CREATE_ENABLE_EXECUTE) ? PAGE_EXECUTE_READWRITE : PAGE_READWRITE;
225 }
226
227 static RTL_CRITICAL_SECTION_DEBUG process_heap_critsect_debug =
228 {
229 0, 0, NULL, /* will be set later */
230 { &process_heap_critsect_debug.ProcessLocksList, &process_heap_critsect_debug.ProcessLocksList },
231 0, 0, 0, 0, 0
232 };
233
234 /***********************************************************************
235 * HEAP_Dump
236 */
237 static void HEAP_Dump( HEAP *heap )
238 {
239 int i;
240 SUBHEAP *subheap;
241 char *ptr;
242
243 DPRINTF( "Heap: %p\n", heap );
244 DPRINTF( "Next: %p Sub-heaps: %p",
245 LIST_ENTRY( heap->entry.next, HEAP, entry ), &heap->subheap );
246 subheap = &heap->subheap;
247 while (subheap->next)
248 {
249 DPRINTF( " -> %p", subheap->next );
250 subheap = subheap->next;
251 }
252
253 DPRINTF( "\nFree lists:\n Block Stat Size Id\n" );
254 for (i = 0; i < HEAP_NB_FREE_LISTS; i++)
255 DPRINTF( "%p free %08lx prev=%p next=%p\n",
256 &heap->freeList[i].arena, HEAP_freeListSizes[i],
257 LIST_ENTRY( heap->freeList[i].arena.entry.prev, ARENA_FREE, entry ),
258 LIST_ENTRY( heap->freeList[i].arena.entry.next, ARENA_FREE, entry ));
259
260 subheap = &heap->subheap;
261 while (subheap)
262 {
263 SIZE_T freeSize = 0, usedSize = 0, arenaSize = subheap->headerSize;
264 DPRINTF( "\n\nSub-heap %p: size=%08lx committed=%08lx\n",
265 subheap, subheap->size, subheap->commitSize );
266
267 DPRINTF( "\n Block Stat Size Id\n" );
268 ptr = (char*)subheap + subheap->headerSize;
269 while (ptr < (char *)subheap + subheap->size)
270 {
271 if (*(DWORD *)ptr & ARENA_FLAG_FREE)
272 {
273 ARENA_FREE *pArena = (ARENA_FREE *)ptr;
274 DPRINTF( "%p free %08lx prev=%p next=%p\n",
275 pArena, pArena->size & ARENA_SIZE_MASK,
276 LIST_ENTRY( pArena->entry.prev, ARENA_FREE, entry ),
277 LIST_ENTRY( pArena->entry.next, ARENA_FREE, entry ) );
278 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
279 arenaSize += sizeof(ARENA_FREE);
280 freeSize += pArena->size & ARENA_SIZE_MASK;
281 }
282 else if (*(DWORD *)ptr & ARENA_FLAG_PREV_FREE)
283 {
284 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
285 DPRINTF( "%p Used %08lx back=%p\n",
286 pArena, pArena->size & ARENA_SIZE_MASK, *((ARENA_FREE **)pArena - 1) );
287 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
288 arenaSize += sizeof(ARENA_INUSE);
289 usedSize += pArena->size & ARENA_SIZE_MASK;
290 }
291 else
292 {
293 ARENA_INUSE *pArena = (ARENA_INUSE *)ptr;
294 DPRINTF( "%p used %08lx\n", pArena, pArena->size & ARENA_SIZE_MASK );
295 ptr += sizeof(*pArena) + (pArena->size & ARENA_SIZE_MASK);
296 arenaSize += sizeof(ARENA_INUSE);
297 usedSize += pArena->size & ARENA_SIZE_MASK;
298 }
299 }
300 DPRINTF( "\nTotal: Size=%08lx Committed=%08lx Free=%08lx Used=%08lx Arenas=%08lx (%ld%%)\n\n",
301 subheap->size, subheap->commitSize, freeSize, usedSize,
302 arenaSize, (arenaSize * 100) / subheap->size );
303 subheap = subheap->next;
304 }
305 }
306
307 #if 0
308 static void HEAP_DumpEntry( LPPROCESS_HEAP_ENTRY entry )
309 {
310 WORD rem_flags;
311 TRACE( "Dumping entry %p\n", entry );
312 TRACE( "lpData\t\t: %p\n", entry->lpData );
313 TRACE( "cbData\t\t: %08lx\n", entry->cbData);
314 TRACE( "cbOverhead\t: %08x\n", entry->cbOverhead);
315 TRACE( "iRegionIndex\t: %08x\n", entry->iRegionIndex);
316 TRACE( "WFlags\t\t: ");
317 if (entry->wFlags & PROCESS_HEAP_REGION)
318 TRACE( "PROCESS_HEAP_REGION ");
319 if (entry->wFlags & PROCESS_HEAP_UNCOMMITTED_RANGE)
320 TRACE( "PROCESS_HEAP_UNCOMMITTED_RANGE ");
321 if (entry->wFlags & PROCESS_HEAP_ENTRY_BUSY)
322 TRACE( "PROCESS_HEAP_ENTRY_BUSY ");
323 if (entry->wFlags & PROCESS_HEAP_ENTRY_MOVEABLE)
324 TRACE( "PROCESS_HEAP_ENTRY_MOVEABLE ");
325 if (entry->wFlags & PROCESS_HEAP_ENTRY_DDESHARE)
326 TRACE( "PROCESS_HEAP_ENTRY_DDESHARE ");
327 rem_flags = entry->wFlags &
328 ~(PROCESS_HEAP_REGION | PROCESS_HEAP_UNCOMMITTED_RANGE |
329 PROCESS_HEAP_ENTRY_BUSY | PROCESS_HEAP_ENTRY_MOVEABLE|
330 PROCESS_HEAP_ENTRY_DDESHARE);
331 if (rem_flags)
332 TRACE( "Unknown %08x", rem_flags);
333 TRACE( "\n");
334 if ((entry->wFlags & PROCESS_HEAP_ENTRY_BUSY )
335 && (entry->wFlags & PROCESS_HEAP_ENTRY_MOVEABLE))
336 {
337 /* Treat as block */
338 TRACE( "BLOCK->hMem\t\t:%p\n", entry->Block.hMem);
339 }
340 if (entry->wFlags & PROCESS_HEAP_REGION)
341 {
342 TRACE( "Region.dwCommittedSize\t:%08lx\n",entry->Region.dwCommittedSize);
343 TRACE( "Region.dwUnCommittedSize\t:%08lx\n",entry->Region.dwUnCommittedSize);
344 TRACE( "Region.lpFirstBlock\t:%p\n",entry->Region.lpFirstBlock);
345 TRACE( "Region.lpLastBlock\t:%p\n",entry->Region.lpLastBlock);
346 }
347 }
348 #endif
349
350 static PHEAP_USER_DATA HEAP_GetUserData(HEAP *heapPtr, PVOID BaseAddress)
351 {
352 PLIST_ENTRY CurrentEntry;
353 PHEAP_USER_DATA udata;
354
355 CurrentEntry = heapPtr->UserDataHead.Flink;
356 while (CurrentEntry != &heapPtr->UserDataHead)
357 {
358 udata = CONTAINING_RECORD(CurrentEntry, HEAP_USER_DATA, ListEntry);
359 if (udata->BaseAddress == BaseAddress)
360 return udata;
361 CurrentEntry = CurrentEntry->Flink;
362 }
363 return NULL;
364 }
365
366 static PHEAP_USER_DATA HEAP_AllocUserData(HEAP *heapPtr, PVOID BaseAddress)
367 {
368 /* Allocate user data entry */
369 ARENA_INUSE *pInUse;
370 PHEAP_USER_DATA udata = RtlAllocateHeap(heapPtr, 0, sizeof(HEAP_USER_DATA));
371 if (!udata) return NULL;
372 udata->BaseAddress = BaseAddress;
373 InsertTailList(&heapPtr->UserDataHead, &udata->ListEntry);
374 pInUse = (ARENA_INUSE *)BaseAddress - 1;
375 pInUse->has_user_data = 1;
376 return udata;
377 }
378
379 /***********************************************************************
380 * HEAP_GetPtr
381 * RETURNS
382 * Pointer to the heap
383 * NULL: Failure
384 */
385 static HEAP *HEAP_GetPtr(
386 HANDLE heap /* [in] Handle to the heap */
387 ) {
388 HEAP *heapPtr = (HEAP *)heap;
389 if (!heapPtr || (heapPtr->magic != HEAP_MAGIC))
390 {
391 if (heapPtr)
392 ERR("Invalid heap %p, magic:%.4s!\n", heap, &heapPtr->magic );
393 else
394 ERR("Invalid heap %p!\n", heap );
395 //KeDumpStackFrames(NULL);
396 return NULL;
397 }
398 if (TRACE_ON(heap) && !HEAP_IsRealArena( heapPtr, 0, NULL, NOISY ))
399 {
400 HEAP_Dump( heapPtr );
401 assert( FALSE );
402 return NULL;
403 }
404 return heapPtr;
405 }
406
407
408 /***********************************************************************
409 * HEAP_InsertFreeBlock
410 *
411 * Insert a free block into the free list.
412 */
413 static __inline void HEAP_InsertFreeBlock( HEAP *heap, ARENA_FREE *pArena, BOOL last )
414 {
415 FREE_LIST_ENTRY *pEntry = heap->freeList + get_freelist_index( pArena->size + sizeof(*pArena) );
416 if (last)
417 {
418 /* insert at end of free list, i.e. before the next free list entry */
419 pEntry++;
420 if (pEntry == &heap->freeList[HEAP_NB_FREE_LISTS]) pEntry = heap->freeList;
421 list_add_before( &pEntry->arena.entry, &pArena->entry );
422 }
423 else
424 {
425 /* insert at head of free list */
426 list_add_after( &pEntry->arena.entry, &pArena->entry );
427 }
428 pArena->size |= ARENA_FLAG_FREE;
429 }
430
431
432 /***********************************************************************
433 * HEAP_FindSubHeap
434 * Find the sub-heap containing a given address.
435 *
436 * RETURNS
437 * Pointer: Success
438 * NULL: Failure
439 */
440 static SUBHEAP *HEAP_FindSubHeap(
441 const HEAP *heap, /* [in] Heap pointer */
442 LPCVOID ptr /* [in] Address */
443 ) {
444 const SUBHEAP *sub = &heap->subheap;
445 while (sub)
446 {
447 if (((const char *)ptr >= (const char *)sub) &&
448 ((const char *)ptr < (const char *)sub + sub->size)) return (SUBHEAP*)sub;
449 sub = sub->next;
450 }
451 return NULL;
452 }
453
454 /***********************************************************************
455 * HEAP_Commit
456 *
457 * Make sure the heap storage is committed for a given size in the specified arena.
458 */
459 static __inline BOOL HEAP_Commit( SUBHEAP *subheap, ARENA_INUSE *pArena, SIZE_T data_size )
460 {
461 NTSTATUS Status;
462 void *ptr = (char *)(pArena + 1) + data_size + sizeof(ARENA_FREE);
463 SIZE_T size = (char *)ptr - (char *)subheap;
464 size = (size + COMMIT_MASK) & ~COMMIT_MASK;
465 if (size > subheap->size) size = subheap->size;
466 if (size <= subheap->commitSize) return TRUE;
467 size -= subheap->commitSize;
468 ptr = (char *)subheap + subheap->commitSize;
469 if (subheap->heap->commitRoutine != NULL)
470 {
471 Status = subheap->heap->commitRoutine(subheap->heap, &ptr, &size);
472 }
473 else
474 {
475 Status = ZwAllocateVirtualMemory( NtCurrentProcess(), &ptr, 0,
476 &size, MEM_COMMIT, get_protection_type(subheap->heap->flags) );
477 }
478 if (!NT_SUCCESS(Status))
479 {
480 WARN("Could not commit %08lx bytes at %p for heap %p\n",
481 size, ptr, subheap->heap );
482 return FALSE;
483 }
484 subheap->commitSize += size;
485 return TRUE;
486 }
487
488 #if 0
489 /***********************************************************************
490 * HEAP_Decommit
491 *
492 * If possible, decommit the heap storage from (including) 'ptr'.
493 */
494 static inline BOOL HEAP_Decommit( SUBHEAP *subheap, void *ptr )
495 {
496 void *addr;
497 SIZE_T decommit_size;
498 SIZE_T size = (char *)ptr - (char *)subheap;
499
500 /* round to next block and add one full block */
501 size = ((size + COMMIT_MASK) & ~COMMIT_MASK) + COMMIT_MASK + 1;
502 if (size >= subheap->commitSize) return TRUE;
503 decommit_size = subheap->commitSize - size;
504 addr = (char *)subheap + size;
505
506 if (ZwFreeVirtualMemory( NtCurrentProcess(), &addr, &decommit_size, MEM_DECOMMIT ))
507 {
508 WARN("Could not decommit %08lx bytes at %p for heap %p\n",
509 decommit_size, (char *)subheap + size, subheap->heap );
510 return FALSE;
511 }
512 subheap->commitSize -= decommit_size;
513 return TRUE;
514 }
515 #endif
516
517 /***********************************************************************
518 * HEAP_CreateFreeBlock
519 *
520 * Create a free block at a specified address. 'size' is the size of the
521 * whole block, including the new arena.
522 */
523 static void HEAP_CreateFreeBlock( SUBHEAP *subheap, void *ptr, SIZE_T size )
524 {
525 ARENA_FREE *pFree;
526 char *pEnd;
527 BOOL last;
528
529 /* Create a free arena */
530 mark_block_uninitialized( ptr, sizeof( ARENA_FREE ) );
531 pFree = (ARENA_FREE *)ptr;
532 pFree->magic = ARENA_FREE_MAGIC;
533
534 /* If debugging, erase the freed block content */
535
536 pEnd = (char *)ptr + size;
537 if (pEnd > (char *)subheap + subheap->commitSize) pEnd = (char *)subheap + subheap->commitSize;
538 if (pEnd > (char *)(pFree + 1)) mark_block_free( pFree + 1, pEnd - (char *)(pFree + 1) );
539
540 /* Check if next block is free also */
541
542 if (((char *)ptr + size < (char *)subheap + subheap->size) &&
543 (*(DWORD *)((char *)ptr + size) & ARENA_FLAG_FREE))
544 {
545 /* Remove the next arena from the free list */
546 ARENA_FREE *pNext = (ARENA_FREE *)((char *)ptr + size);
547 list_remove( &pNext->entry );
548 size += (pNext->size & ARENA_SIZE_MASK) + sizeof(*pNext);
549 mark_block_free( pNext, sizeof(ARENA_FREE) );
550 }
551
552 /* Set the next block PREV_FREE flag and pointer */
553
554 last = ((char *)ptr + size >= (char *)subheap + subheap->size);
555 if (!last)
556 {
557 DWORD *pNext = (DWORD *)((char *)ptr + size);
558 *pNext |= ARENA_FLAG_PREV_FREE;
559 mark_block_initialized( pNext - 1, sizeof( ARENA_FREE * ) );
560 *((ARENA_FREE **)pNext - 1) = pFree;
561 }
562
563 /* Last, insert the new block into the free list */
564
565 pFree->size = size - sizeof(*pFree);
566 HEAP_InsertFreeBlock( subheap->heap, pFree, last );
567 }
568
569
570 /***********************************************************************
571 * HEAP_MakeInUseBlockFree
572 *
573 * Turn an in-use block into a free block. Can also decommit the end of
574 * the heap, and possibly even free the sub-heap altogether.
575 */
576 static void HEAP_MakeInUseBlockFree( SUBHEAP *subheap, ARENA_INUSE *pArena )
577 {
578 ARENA_FREE *pFree;
579 SIZE_T size = (pArena->size & ARENA_SIZE_MASK) + sizeof(*pArena);
580 PHEAP_USER_DATA udata;
581
582 /* Find and free user data */
583 if (pArena->has_user_data)
584 {
585 udata = HEAP_GetUserData(subheap->heap, pArena + 1);
586 if (udata)
587 {
588 RemoveEntryList(&udata->ListEntry);
589 RtlFreeHeap(subheap->heap, 0, udata);
590 }
591 }
592
593 /* Check if we can merge with previous block */
594
595 if (pArena->size & ARENA_FLAG_PREV_FREE)
596 {
597 pFree = *((ARENA_FREE **)pArena - 1);
598 size += (pFree->size & ARENA_SIZE_MASK) + sizeof(ARENA_FREE);
599 /* Remove it from the free list */
600 list_remove( &pFree->entry );
601 }
602 else pFree = (ARENA_FREE *)pArena;
603
604 /* Create a free block */
605
606 HEAP_CreateFreeBlock( subheap, pFree, size );
607 size = (pFree->size & ARENA_SIZE_MASK) + sizeof(ARENA_FREE);
608 if ((char *)pFree + size < (char *)subheap + subheap->size)
609 return; /* Not the last block, so nothing more to do */
610
611 /* Free the whole sub-heap if it's empty and not the original one */
612
613 if (((char *)pFree == (char *)subheap + subheap->headerSize) &&
614 (subheap != &subheap->heap->subheap))
615 {
616 SIZE_T size = 0;
617 SUBHEAP *pPrev = &subheap->heap->subheap;
618 /* Remove the free block from the list */
619 list_remove( &pFree->entry );
620 /* Remove the subheap from the list */
621 while (pPrev && (pPrev->next != subheap)) pPrev = pPrev->next;
622 if (pPrev) pPrev->next = subheap->next;
623 /* Free the memory */
624 subheap->magic = 0;
625 ZwFreeVirtualMemory( NtCurrentProcess(), (void **)&subheap, &size, MEM_RELEASE );
626 return;
627 }
628
629 /* Decommit the end of the heap */
630 }
631
632 /***********************************************************************
633 * HEAP_ShrinkBlock
634 *
635 * Shrink an in-use block.
636 */
637 static void HEAP_ShrinkBlock(SUBHEAP *subheap, ARENA_INUSE *pArena, SIZE_T size)
638 {
639 if ((pArena->size & ARENA_SIZE_MASK) >= size + HEAP_MIN_SHRINK_SIZE)
640 {
641 HEAP_CreateFreeBlock( subheap, (char *)(pArena + 1) + size,
642 (pArena->size & ARENA_SIZE_MASK) - size );
643 /* assign size plus previous arena flags */
644 pArena->size = size | (pArena->size & ~ARENA_SIZE_MASK);
645 }
646 else
647 {
648 /* Turn off PREV_FREE flag in next block */
649 char *pNext = (char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK);
650 if (pNext < (char *)subheap + subheap->size)
651 *(DWORD *)pNext &= ~ARENA_FLAG_PREV_FREE;
652 }
653 }
654
655 /***********************************************************************
656 * HEAP_InitSubHeap
657 */
658 static BOOL HEAP_InitSubHeap( HEAP *heap, LPVOID address, DWORD flags,
659 SIZE_T commitSize, SIZE_T totalSize,
660 PRTL_HEAP_PARAMETERS Parameters)
661 {
662 SUBHEAP *subheap;
663 FREE_LIST_ENTRY *pEntry;
664 int i;
665 NTSTATUS Status;
666
667 if (!address && ZwAllocateVirtualMemory( NtCurrentProcess(), &address, 0,
668 &commitSize, MEM_COMMIT, get_protection_type(flags) ))
669 {
670 WARN("Could not commit %08lx bytes for sub-heap %p\n", commitSize, address );
671 return FALSE;
672 }
673
674 /* Fill the sub-heap structure */
675
676 subheap = (SUBHEAP *)address;
677 subheap->heap = heap;
678 subheap->size = totalSize;
679 subheap->commitSize = commitSize;
680 subheap->magic = SUBHEAP_MAGIC;
681
682 if ( subheap != (SUBHEAP *)heap )
683 {
684 /* If this is a secondary subheap, insert it into list */
685
686 subheap->headerSize = ROUND_SIZE( sizeof(SUBHEAP) );
687 subheap->next = heap->subheap.next;
688 heap->subheap.next = subheap;
689 }
690 else
691 {
692 /* If this is a primary subheap, initialize main heap */
693
694 subheap->headerSize = ROUND_SIZE( sizeof(HEAP) );
695 subheap->next = NULL;
696 heap->flags = flags;
697 heap->magic = HEAP_MAGIC;
698 if (Parameters)
699 heap->commitRoutine = Parameters->CommitRoutine;
700 else
701 heap->commitRoutine = NULL;
702 InitializeListHead(&heap->UserDataHead);
703
704 /* Build the free lists */
705
706 list_init( &heap->freeList[0].arena.entry );
707 for (i = 0, pEntry = heap->freeList; i < HEAP_NB_FREE_LISTS; i++, pEntry++)
708 {
709 pEntry->arena.size = 0 | ARENA_FLAG_FREE;
710 pEntry->arena.magic = ARENA_FREE_MAGIC;
711 if (i) list_add_after( &pEntry[-1].arena.entry, &pEntry->arena.entry );
712 }
713
714 /* Initialize critical section */
715
716 if (RtlpGetMode() == UserMode)
717 {
718 if (!processHeap) /* do it by hand to avoid memory allocations */
719 {
720 heap->lock.CriticalSection.DebugInfo = &process_heap_critsect_debug;
721 heap->lock.CriticalSection.LockCount = -1;
722 heap->lock.CriticalSection.RecursionCount = 0;
723 heap->lock.CriticalSection.OwningThread = 0;
724 heap->lock.CriticalSection.LockSemaphore = 0;
725 heap->lock.CriticalSection.SpinCount = 0;
726 process_heap_critsect_debug.CriticalSection = &heap->lock.CriticalSection;
727 }
728 else RtlInitializeHeapLock( &heap->lock );
729 }
730 }
731
732 /* Commit memory */
733 if (heap->commitRoutine)
734 {
735 if (subheap != (SUBHEAP *)heap)
736 {
737 Status = heap->commitRoutine(heap, &address, &commitSize);
738 }
739 else
740 {
741 /* the caller is responsible for committing the first page! */
742 Status = STATUS_SUCCESS;
743 }
744 }
745 else
746 {
747 Status = ZwAllocateVirtualMemory(NtCurrentProcess(),
748 &address,
749 0,
750 &commitSize,
751 MEM_COMMIT,
752 get_protection_type(flags));
753 }
754 if (!NT_SUCCESS(Status))
755 {
756 DPRINT("Could not commit %08lx bytes for sub-heap %p\n",
757 commitSize, address);
758 return FALSE;
759 }
760
761 /* Create the first free block */
762
763 HEAP_CreateFreeBlock( subheap, (LPBYTE)subheap + subheap->headerSize,
764 subheap->size - subheap->headerSize );
765
766 return TRUE;
767 }
768
769 /***********************************************************************
770 * HEAP_CreateSubHeap
771 *
772 * Create a sub-heap of the given size.
773 * If heap == NULL, creates a main heap.
774 */
775 static SUBHEAP *HEAP_CreateSubHeap( HEAP *heap, void *base, DWORD flags,
776 SIZE_T commitSize, SIZE_T totalSize,
777 IN PRTL_HEAP_PARAMETERS Parameters)
778 {
779 LPVOID address = base;
780
781 /* round-up sizes on a 64K boundary */
782 totalSize = (totalSize + 0xffff) & 0xffff0000;
783 commitSize = (commitSize + 0xffff) & 0xffff0000;
784 if (!commitSize) commitSize = 0x10000;
785 totalSize = min( totalSize, 0xffff0000 ); /* don't allow a heap larger than 4Gb */
786 if (totalSize < commitSize) totalSize = commitSize;
787
788 if (!address)
789 {
790 /* allocate the memory block */
791 if (ZwAllocateVirtualMemory( NtCurrentProcess(), &address, 0, &totalSize,
792 MEM_RESERVE | MEM_COMMIT, get_protection_type(flags) ))
793 {
794 WARN("Could not allocate %08lx bytes\n", totalSize );
795 return NULL;
796 }
797 }
798
799 /* Initialize subheap */
800
801 if (!HEAP_InitSubHeap( heap ? heap : (HEAP *)address,
802 address, flags, commitSize, totalSize, Parameters ))
803 {
804 SIZE_T size = 0;
805 if (!base) ZwFreeVirtualMemory( NtCurrentProcess(), &address, &size, MEM_RELEASE );
806 return NULL;
807 }
808
809 return (SUBHEAP *)address;
810 }
811
812
813 /***********************************************************************
814 * HEAP_FindFreeBlock
815 *
816 * Find a free block at least as large as the requested size, and make sure
817 * the requested size is committed.
818 */
819 static ARENA_FREE *HEAP_FindFreeBlock( HEAP *heap, SIZE_T size,
820 SUBHEAP **ppSubHeap )
821 {
822 SUBHEAP *subheap;
823 struct list *ptr;
824 SIZE_T total_size;
825 FREE_LIST_ENTRY *pEntry = heap->freeList + get_freelist_index( size + sizeof(ARENA_INUSE) );
826
827 /* Find a suitable free list, and in it find a block large enough */
828
829 ptr = &pEntry->arena.entry;
830 while ((ptr = list_next( &heap->freeList[0].arena.entry, ptr )))
831 {
832 ARENA_FREE *pArena = LIST_ENTRY( ptr, ARENA_FREE, entry );
833 SIZE_T arena_size = (pArena->size & ARENA_SIZE_MASK) +
834 sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
835 if (arena_size >= size)
836 {
837 subheap = HEAP_FindSubHeap( heap, pArena );
838 if (!HEAP_Commit( subheap, (ARENA_INUSE *)pArena, size )) return NULL;
839 *ppSubHeap = subheap;
840 return pArena;
841 }
842 }
843
844 /* If no block was found, attempt to grow the heap */
845
846 if (!(heap->flags & HEAP_GROWABLE))
847 {
848 ERR("Not enough space in heap %p for %08lx bytes\n", heap, size );
849 return NULL;
850 }
851 /* make sure that we have a big enough size *committed* to fit another
852 * last free arena in !
853 * So just one heap struct, one first free arena which will eventually
854 * get used, and a second free arena that might get assigned all remaining
855 * free space in HEAP_ShrinkBlock() */
856 total_size = size + ROUND_SIZE(sizeof(SUBHEAP)) + sizeof(ARENA_INUSE) + sizeof(ARENA_FREE);
857 if (total_size < size) return NULL; /* overflow */
858
859 if (!(subheap = HEAP_CreateSubHeap( heap, NULL, heap->flags, total_size,
860 max( HEAP_DEF_SIZE, total_size ), NULL )))
861 return NULL;
862
863 TRACE("created new sub-heap %p of %08lx bytes for heap %p\n",
864 subheap, size, heap );
865
866 *ppSubHeap = subheap;
867 return (ARENA_FREE *)(subheap + 1);
868 }
869
870 /***********************************************************************
871 * HEAP_IsValidArenaPtr
872 *
873 * Check that the pointer is inside the range possible for arenas.
874 */
875 static BOOL HEAP_IsValidArenaPtr( const HEAP *heap, const void *ptr )
876 {
877 int i;
878 const SUBHEAP *subheap = HEAP_FindSubHeap( heap, ptr );
879 if (!subheap) return FALSE;
880 if ((const char *)ptr >= (const char *)subheap + subheap->headerSize) return TRUE;
881 if (subheap != &heap->subheap) return FALSE;
882 for (i = 0; i < HEAP_NB_FREE_LISTS; i++)
883 if (ptr == (const void *)&heap->freeList[i].arena) return TRUE;
884 return FALSE;
885 }
886
887
888 /***********************************************************************
889 * HEAP_ValidateFreeArena
890 */
891 static BOOL HEAP_ValidateFreeArena( SUBHEAP *subheap, ARENA_FREE *pArena )
892 {
893 ARENA_FREE *prev, *next;
894 char *heapEnd = (char *)subheap + subheap->size;
895
896 /* Check for unaligned pointers */
897 if ( (ULONG_PTR)pArena % ALIGNMENT != ARENA_OFFSET )
898 {
899 ERR("Heap %p: unaligned arena pointer %p\n", subheap->heap, pArena );
900 return FALSE;
901 }
902
903 /* Check magic number */
904 if (pArena->magic != ARENA_FREE_MAGIC)
905 {
906 ERR("Heap %p: invalid free arena magic for %p\n", subheap->heap, pArena );
907 return FALSE;
908 }
909 /* Check size flags */
910 if (!(pArena->size & ARENA_FLAG_FREE) ||
911 (pArena->size & ARENA_FLAG_PREV_FREE))
912 {
913 ERR("Heap %p: bad flags %08lx for free arena %p\n",
914 subheap->heap, pArena->size & ~ARENA_SIZE_MASK, pArena );
915 return FALSE;
916 }
917 /* Check arena size */
918 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) > heapEnd)
919 {
920 ERR("Heap %p: bad size %08lx for free arena %p\n",
921 subheap->heap, pArena->size & ARENA_SIZE_MASK, pArena );
922 return FALSE;
923 }
924 /* Check that next pointer is valid */
925 next = LIST_ENTRY( pArena->entry.next, ARENA_FREE, entry );
926 if (!HEAP_IsValidArenaPtr( subheap->heap, next ))
927 {
928 ERR("Heap %p: bad next ptr %p for arena %p\n",
929 subheap->heap, next, pArena );
930 return FALSE;
931 }
932 /* Check that next arena is free */
933 if (!(next->size & ARENA_FLAG_FREE) || (next->magic != ARENA_FREE_MAGIC))
934 {
935 ERR("Heap %p: next arena %p invalid for %p\n",
936 subheap->heap, next, pArena );
937 return FALSE;
938 }
939 /* Check that prev pointer is valid */
940 prev = LIST_ENTRY( pArena->entry.prev, ARENA_FREE, entry );
941 if (!HEAP_IsValidArenaPtr( subheap->heap, prev ))
942 {
943 ERR("Heap %p: bad prev ptr %p for arena %p\n",
944 subheap->heap, prev, pArena );
945 return FALSE;
946 }
947 /* Check that prev arena is free */
948 if (!(prev->size & ARENA_FLAG_FREE) || (prev->magic != ARENA_FREE_MAGIC))
949 {
950 /* this often means that the prev arena got overwritten
951 * by a memory write before that prev arena */
952 ERR("Heap %p: prev arena %p invalid for %p\n",
953 subheap->heap, prev, pArena );
954 return FALSE;
955 }
956 /* Check that next block has PREV_FREE flag */
957 if ((char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) < heapEnd)
958 {
959 if (!(*(DWORD *)((char *)(pArena + 1) +
960 (pArena->size & ARENA_SIZE_MASK)) & ARENA_FLAG_PREV_FREE))
961 {
962 ERR("Heap %p: free arena %p next block has no PREV_FREE flag\n",
963 subheap->heap, pArena );
964 return FALSE;
965 }
966 /* Check next block back pointer */
967 if (*((ARENA_FREE **)((char *)(pArena + 1) +
968 (pArena->size & ARENA_SIZE_MASK)) - 1) != pArena)
969 {
970 ERR("Heap %p: arena %p has wrong back ptr %p\n",
971 subheap->heap, pArena,
972 *((ARENA_FREE **)((char *)(pArena+1) + (pArena->size & ARENA_SIZE_MASK)) - 1));
973 return FALSE;
974 }
975 }
976 return TRUE;
977 }
978
979 /***********************************************************************
980 * HEAP_ValidateInUseArena
981 */
982 static BOOL HEAP_ValidateInUseArena( const SUBHEAP *subheap, const ARENA_INUSE *pArena, BOOL quiet )
983 {
984 const char *heapEnd = (const char *)subheap + subheap->size;
985
986 /* Check for unaligned pointers */
987 if ( (ULONG_PTR)pArena % ALIGNMENT != ARENA_OFFSET )
988 {
989 if ( quiet == NOISY )
990 {
991 ERR( "Heap %p: unaligned arena pointer %p\n", subheap->heap, pArena );
992 if ( TRACE_ON(heap) )
993 HEAP_Dump( subheap->heap );
994 }
995 else if ( WARN_ON(heap) )
996 {
997 WARN( "Heap %p: unaligned arena pointer %p\n", subheap->heap, pArena );
998 if ( TRACE_ON(heap) )
999 HEAP_Dump( subheap->heap );
1000 }
1001 return FALSE;
1002 }
1003
1004 /* Check magic number */
1005 if (pArena->magic != ARENA_INUSE_MAGIC)
1006 {
1007 if (quiet == NOISY) {
1008 ERR("Heap %p: invalid in-use arena magic for %p\n", subheap->heap, pArena );
1009 if (TRACE_ON(heap))
1010 HEAP_Dump( subheap->heap );
1011 } else if (WARN_ON(heap)) {
1012 WARN("Heap %p: invalid in-use arena magic for %p\n", subheap->heap, pArena );
1013 if (TRACE_ON(heap))
1014 HEAP_Dump( subheap->heap );
1015 }
1016 return FALSE;
1017 }
1018 /* Check size flags */
1019 if (pArena->size & ARENA_FLAG_FREE)
1020 {
1021 ERR("Heap %p: bad flags %08lx for in-use arena %p\n",
1022 subheap->heap, pArena->size & ~ARENA_SIZE_MASK, pArena );
1023 return FALSE;
1024 }
1025 /* Check arena size */
1026 if ((const char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) > heapEnd)
1027 {
1028 ERR("Heap %p: bad size %08lx for in-use arena %p\n",
1029 subheap->heap, pArena->size & ARENA_SIZE_MASK, pArena );
1030 return FALSE;
1031 }
1032 /* Check next arena PREV_FREE flag */
1033 if (((const char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK) < heapEnd) &&
1034 (*(const DWORD *)((const char *)(pArena + 1) + (pArena->size & ARENA_SIZE_MASK)) & ARENA_FLAG_PREV_FREE))
1035 {
1036 ERR("Heap %p: in-use arena %p next block has PREV_FREE flag\n",
1037 subheap->heap, pArena );
1038 return FALSE;
1039 }
1040 /* Check prev free arena */
1041 if (pArena->size & ARENA_FLAG_PREV_FREE)
1042 {
1043 const ARENA_FREE *pPrev = *((const ARENA_FREE * const*)pArena - 1);
1044 /* Check prev pointer */
1045 if (!HEAP_IsValidArenaPtr( subheap->heap, pPrev ))
1046 {
1047 ERR("Heap %p: bad back ptr %p for arena %p\n",
1048 subheap->heap, pPrev, pArena );
1049 return FALSE;
1050 }
1051 /* Check that prev arena is free */
1052 if (!(pPrev->size & ARENA_FLAG_FREE) ||
1053 (pPrev->magic != ARENA_FREE_MAGIC))
1054 {
1055 ERR("Heap %p: prev arena %p invalid for in-use %p\n",
1056 subheap->heap, pPrev, pArena );
1057 return FALSE;
1058 }
1059 /* Check that prev arena is really the previous block */
1060 if ((const char *)(pPrev + 1) + (pPrev->size & ARENA_SIZE_MASK) != (const char *)pArena)
1061 {
1062 ERR("Heap %p: prev arena %p is not prev for in-use %p\n",
1063 subheap->heap, pPrev, pArena );
1064 return FALSE;
1065 }
1066 }
1067 return TRUE;
1068 }
1069
1070 /***********************************************************************
1071 * HEAP_IsRealArena [Internal]
1072 * Validates a block is a valid arena.
1073 *
1074 * RETURNS
1075 * TRUE: Success
1076 * FALSE: Failure
1077 */
1078 static BOOL HEAP_IsRealArena( HEAP *heapPtr, /* [in] ptr to the heap */
1079 DWORD flags, /* [in] Bit flags that control access during operation */
1080 LPCVOID block, /* [in] Optional pointer to memory block to validate */
1081 BOOL quiet ) /* [in] Flag - if true, HEAP_ValidateInUseArena
1082 * does not complain */
1083 {
1084 SUBHEAP *subheap;
1085 BOOL ret = TRUE;
1086
1087 if (!heapPtr || (heapPtr->magic != HEAP_MAGIC))
1088 {
1089 ERR("Invalid heap %p!\n", heapPtr );
1090 return FALSE;
1091 }
1092
1093 flags &= HEAP_NO_SERIALIZE;
1094 flags |= heapPtr->flags;
1095 /* calling HeapLock may result in infinite recursion, so do the critsect directly */
1096 if (!(flags & HEAP_NO_SERIALIZE))
1097 RtlEnterHeapLock( &heapPtr->lock );
1098
1099 if (block)
1100 {
1101 /* Only check this single memory block */
1102
1103 if (!(subheap = HEAP_FindSubHeap( heapPtr, block )) ||
1104 ((const char *)block < (char *)subheap + subheap->headerSize
1105 + sizeof(ARENA_INUSE)))
1106 {
1107 if (quiet == NOISY)
1108 ERR("Heap %p: block %p is not inside heap\n", heapPtr, block );
1109 else if (WARN_ON(heap))
1110 WARN("Heap %p: block %p is not inside heap\n", heapPtr, block );
1111 ret = FALSE;
1112 } else
1113 ret = HEAP_ValidateInUseArena( subheap, (const ARENA_INUSE *)block - 1, quiet );
1114
1115 if (!(flags & HEAP_NO_SERIALIZE))
1116 RtlLeaveHeapLock( &heapPtr->lock );
1117 return ret;
1118 }
1119
1120 subheap = &heapPtr->subheap;
1121 while (subheap && ret)
1122 {
1123 char *ptr = (char *)subheap + subheap->headerSize;
1124 while (ptr < (char *)subheap + subheap->size)
1125 {
1126 if (*(DWORD *)ptr & ARENA_FLAG_FREE)
1127 {
1128 if (!HEAP_ValidateFreeArena( subheap, (ARENA_FREE *)ptr )) {
1129 ret = FALSE;
1130 break;
1131 }
1132 ptr += sizeof(ARENA_FREE) + (*(DWORD *)ptr & ARENA_SIZE_MASK);
1133 }
1134 else
1135 {
1136 if (!HEAP_ValidateInUseArena( subheap, (ARENA_INUSE *)ptr, NOISY )) {
1137 ret = FALSE;
1138 break;
1139 }
1140 ptr += sizeof(ARENA_INUSE) + (*(DWORD *)ptr & ARENA_SIZE_MASK);
1141 }
1142 }
1143 subheap = subheap->next;
1144 }
1145
1146 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
1147 return ret;
1148 }
1149
1150
1151 /***********************************************************************
1152 * HeapCreate (KERNEL32.336)
1153 * RETURNS
1154 * Handle of heap: Success
1155 * NULL: Failure
1156 *
1157 * @implemented
1158 */
1159 HANDLE NTAPI
1160 RtlCreateHeap(ULONG flags,
1161 PVOID addr,
1162 SIZE_T totalSize,
1163 SIZE_T commitSize,
1164 PVOID Lock,
1165 PRTL_HEAP_PARAMETERS Parameters)
1166 {
1167 SUBHEAP *subheap;
1168
1169 /* Allocate the heap block */
1170
1171 if (!totalSize)
1172 {
1173 totalSize = HEAP_DEF_SIZE;
1174 flags |= HEAP_GROWABLE;
1175 }
1176 if (!(subheap = HEAP_CreateSubHeap( NULL, addr, flags, commitSize, totalSize, Parameters ))) return 0;
1177
1178 if (RtlpGetMode() == UserMode)
1179 {
1180 /* link it into the per-process heap list */
1181 if (processHeap)
1182 {
1183 HEAP *heapPtr = subheap->heap;
1184 RtlEnterHeapLock( &processHeap->lock );
1185 list_add_head( &processHeap->entry, &heapPtr->entry );
1186 RtlLeaveHeapLock( &processHeap->lock );
1187 }
1188 else
1189 {
1190 processHeap = subheap->heap; /* assume the first heap we create is the process main heap */
1191 list_init( &processHeap->entry );
1192 assert( (ULONG_PTR)processHeap->freeList % ALIGNMENT == ARENA_OFFSET );
1193 }
1194 }
1195
1196 return (HANDLE)subheap;
1197 }
1198
1199 /***********************************************************************
1200 * HeapDestroy (KERNEL32.337)
1201 * RETURNS
1202 * TRUE: Success
1203 * FALSE: Failure
1204 *
1205 * @implemented
1206 *
1207 * RETURNS
1208 * Success: A NULL HANDLE, if heap is NULL or it was destroyed
1209 * Failure: The Heap handle, if heap is the process heap.
1210 */
1211 HANDLE NTAPI
1212 RtlDestroyHeap(HANDLE heap) /* [in] Handle of heap */
1213 {
1214 HEAP *heapPtr = HEAP_GetPtr( heap );
1215 SUBHEAP *subheap;
1216
1217 DPRINT("%p\n", heap );
1218 if (!heapPtr)
1219 return heap;
1220
1221 if (RtlpGetMode() == UserMode)
1222 {
1223 if (heap == NtCurrentPeb()->ProcessHeap)
1224 return heap; /* cannot delete the main process heap */
1225
1226 /* remove it from the per-process list */
1227 RtlEnterHeapLock( &processHeap->lock );
1228 list_remove( &heapPtr->entry );
1229 RtlLeaveHeapLock( &processHeap->lock );
1230 }
1231
1232 RtlDeleteHeapLock( &heapPtr->lock );
1233 subheap = &heapPtr->subheap;
1234 while (subheap)
1235 {
1236 SUBHEAP *next = subheap->next;
1237 SIZE_T size = 0;
1238 void *addr = subheap;
1239 ZwFreeVirtualMemory( NtCurrentProcess(), &addr, &size, MEM_RELEASE );
1240 subheap = next;
1241 }
1242 return (HANDLE)NULL;
1243 }
1244
1245
1246 /***********************************************************************
1247 * HeapAlloc (KERNEL32.334)
1248 * RETURNS
1249 * Pointer to allocated memory block
1250 * NULL: Failure
1251 * 0x7d030f60--invalid flags in RtlHeapAllocate
1252 * @implemented
1253 */
1254 PVOID NTAPI
1255 RtlAllocateHeap(HANDLE heap, /* [in] Handle of private heap block */
1256 ULONG flags, /* [in] Heap allocation control flags */
1257 SIZE_T size) /* [in] Number of bytes to allocate */
1258 {
1259 ARENA_FREE *pArena;
1260 ARENA_INUSE *pInUse;
1261 SUBHEAP *subheap;
1262 HEAP *heapPtr = HEAP_GetPtr( heap );
1263 SIZE_T rounded_size;
1264
1265 /* Validate the parameters */
1266
1267 if (!heapPtr)
1268 {
1269 if (flags & HEAP_GENERATE_EXCEPTIONS)
1270 RtlRaiseStatus( STATUS_NO_MEMORY );
1271 return NULL;
1272 }
1273 //flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY;
1274 flags |= heapPtr->flags;
1275 rounded_size = ROUND_SIZE(size);
1276 if (rounded_size < size) /* overflow */
1277 {
1278 if (flags & HEAP_GENERATE_EXCEPTIONS) RtlRaiseStatus( STATUS_NO_MEMORY );
1279 return NULL;
1280 }
1281
1282 if (rounded_size < HEAP_MIN_DATA_SIZE) rounded_size = HEAP_MIN_DATA_SIZE;
1283
1284 if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterHeapLock( &heapPtr->lock );
1285 /* Locate a suitable free block */
1286 if (!(pArena = HEAP_FindFreeBlock( heapPtr, rounded_size, &subheap )))
1287 {
1288 TRACE("(%p,%08lx,%08lx): returning NULL\n",
1289 heap, flags, size );
1290 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
1291 if (flags & HEAP_GENERATE_EXCEPTIONS) RtlRaiseStatus( STATUS_NO_MEMORY );
1292 return NULL;
1293 }
1294
1295 /* Remove the arena from the free list */
1296
1297 list_remove( &pArena->entry );
1298
1299 /* Build the in-use arena */
1300
1301 pInUse = (ARENA_INUSE *)pArena;
1302
1303 /* in-use arena is smaller than free arena,
1304 * so we have to add the difference to the size */
1305 pInUse->size = (pInUse->size & ~ARENA_FLAG_FREE) + sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
1306 pInUse->magic = ARENA_INUSE_MAGIC;
1307 pInUse->has_user_data = 0;
1308
1309 /* Shrink the block */
1310
1311 HEAP_ShrinkBlock( subheap, pInUse, rounded_size );
1312 pInUse->unused_bytes = (pInUse->size & ARENA_SIZE_MASK) - size;
1313
1314 if (flags & HEAP_ZERO_MEMORY)
1315 {
1316 clear_block( pInUse + 1, size );
1317 mark_block_uninitialized( (char *)(pInUse + 1) + size, pInUse->unused_bytes );
1318 }
1319 else
1320 mark_block_uninitialized( pInUse + 1, pInUse->size & ARENA_SIZE_MASK );
1321
1322 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
1323
1324 TRACE("(%p,%08lx,%08lx): returning %p\n", heap, flags, size, pInUse + 1 );
1325 return (LPVOID)(pInUse + 1);
1326 }
1327
1328
1329 /***********************************************************************
1330 * HeapFree (KERNEL32.338)
1331 * RETURNS
1332 * TRUE: Success
1333 * FALSE: Failure
1334 *
1335 * @implemented
1336 */
1337 BOOLEAN NTAPI RtlFreeHeap(
1338 HANDLE heap, /* [in] Handle of heap */
1339 ULONG flags, /* [in] Heap freeing flags */
1340 PVOID ptr /* [in] Address of memory to free */
1341 )
1342 {
1343 ARENA_INUSE *pInUse;
1344 SUBHEAP *subheap;
1345 HEAP *heapPtr;
1346
1347 /* Validate the parameters */
1348
1349 if (!ptr) return TRUE; /* freeing a NULL ptr isn't an error in Win2k */
1350
1351 heapPtr = HEAP_GetPtr( heap );
1352 if (!heapPtr)
1353 {
1354 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_HANDLE );
1355 return FALSE;
1356 }
1357
1358 flags &= HEAP_NO_SERIALIZE;
1359 flags |= heapPtr->flags;
1360 if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterHeapLock( &heapPtr->lock );
1361 if (!HEAP_IsRealArena( heapPtr, HEAP_NO_SERIALIZE, ptr, QUIET ))
1362 {
1363 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
1364 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_PARAMETER );
1365 TRACE("(%p,%08lx,%p): returning FALSE\n", heap, flags, ptr );
1366 return FALSE;
1367 }
1368
1369 /* Some sanity checks */
1370 pInUse = (ARENA_INUSE *)ptr - 1;
1371 subheap = HEAP_FindSubHeap( heapPtr, pInUse );
1372 if ((char *)pInUse < (char *)subheap + subheap->headerSize) goto error;
1373 if (!HEAP_ValidateInUseArena( subheap, pInUse, QUIET )) goto error;
1374
1375 /* Turn the block into a free block */
1376
1377 HEAP_MakeInUseBlockFree( subheap, pInUse );
1378
1379 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
1380
1381 TRACE("(%p,%08lx,%p): returning TRUE\n", heap, flags, ptr );
1382 return TRUE;
1383
1384 error:
1385 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
1386 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_PARAMETER );
1387 TRACE("(%p,%08x,%p): returning FALSE\n", heap, flags, ptr );
1388 return FALSE;
1389 }
1390
1391
1392 /***********************************************************************
1393 * RtlReAllocateHeap
1394 * PARAMS
1395 * Heap [in] Handle of heap block
1396 * Flags [in] Heap reallocation flags
1397 * Ptr, [in] Address of memory to reallocate
1398 * Size [in] Number of bytes to reallocate
1399 *
1400 * RETURNS
1401 * Pointer to reallocated memory block
1402 * NULL: Failure
1403 * 0x7d030f60--invalid flags in RtlHeapAllocate
1404 * @implemented
1405 */
1406 PVOID NTAPI RtlReAllocateHeap(
1407 HANDLE heap,
1408 ULONG flags,
1409 PVOID ptr,
1410 SIZE_T size
1411 )
1412 {
1413 ARENA_INUSE *pArena;
1414 HEAP *heapPtr;
1415 SUBHEAP *subheap;
1416 SIZE_T oldBlockSize, oldActualSize, rounded_size;
1417
1418 if (!ptr) return NULL;
1419 if (!(heapPtr = HEAP_GetPtr( heap )))
1420 {
1421 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_HANDLE );
1422 return NULL;
1423 }
1424
1425 /* Validate the parameters */
1426
1427 //Flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY |
1428 // HEAP_REALLOC_IN_PLACE_ONLY;
1429 flags |= heapPtr->flags;
1430 if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterHeapLock( &heapPtr->lock );
1431
1432 rounded_size = ROUND_SIZE(size);
1433 if (rounded_size < size) goto oom; /* overflow */
1434 if (rounded_size < HEAP_MIN_DATA_SIZE) rounded_size = HEAP_MIN_DATA_SIZE;
1435
1436 pArena = (ARENA_INUSE *)ptr - 1;
1437 if(!(subheap = HEAP_FindSubHeap( heapPtr, pArena ))) goto done;
1438 if ((char *)pArena < (char *)subheap + subheap->headerSize) goto error;
1439 if (!HEAP_ValidateInUseArena( subheap, pArena, QUIET )) goto error;
1440
1441 oldBlockSize = (pArena->size & ARENA_SIZE_MASK);
1442 oldActualSize = (pArena->size & ARENA_SIZE_MASK) - pArena->unused_bytes;
1443
1444 if (rounded_size > oldBlockSize)
1445 {
1446 char *pNext = (char *)(pArena + 1) + oldBlockSize;
1447 if ((pNext < (char *)subheap + subheap->size) &&
1448 (*(DWORD *)pNext & ARENA_FLAG_FREE) &&
1449 (oldBlockSize + (*(DWORD *)pNext & ARENA_SIZE_MASK) + sizeof(ARENA_FREE) >= rounded_size))
1450 {
1451 ARENA_FREE *pFree = (ARENA_FREE *)pNext;
1452 list_remove( &pFree->entry );
1453 pArena->size += (pFree->size & ARENA_SIZE_MASK) + sizeof(*pFree);
1454
1455 if (!HEAP_Commit( subheap, pArena, rounded_size )) goto oom;
1456
1457 HEAP_ShrinkBlock( subheap, pArena, rounded_size );
1458 mark_block_initialized( pArena + 1, oldActualSize );
1459 }
1460 else /* Do it the hard way */
1461 {
1462 ARENA_FREE *pNew;
1463 ARENA_INUSE *pInUse;
1464 SUBHEAP *newsubheap;
1465 if ((flags & HEAP_REALLOC_IN_PLACE_ONLY) ||
1466 !(pNew = HEAP_FindFreeBlock( heapPtr, rounded_size, &newsubheap )))
1467 goto oom;
1468
1469 /* Build the in-use arena */
1470
1471 list_remove( &pNew->entry );
1472 pInUse = (ARENA_INUSE *)pNew;
1473 pInUse->size = (pInUse->size & ~ARENA_FLAG_FREE)
1474 + sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
1475 pInUse->magic = ARENA_INUSE_MAGIC;
1476 HEAP_ShrinkBlock( newsubheap, pInUse, rounded_size );
1477
1478 mark_block_initialized( pInUse + 1, oldActualSize );
1479 memcpy( pInUse + 1, pArena + 1, oldActualSize );
1480
1481 /* Free the previous block */
1482
1483 HEAP_MakeInUseBlockFree( subheap, pArena );
1484 subheap = newsubheap;
1485 pArena = pInUse;
1486 }
1487 }
1488 else
1489 {
1490 HEAP_ShrinkBlock( subheap, pArena, rounded_size ); /* Shrink the block */
1491 mark_block_initialized( pArena + 1, size );
1492 }
1493
1494 pArena->unused_bytes = (pArena->size & ARENA_SIZE_MASK) - size;
1495
1496 /* Clear the extra bytes if needed */
1497
1498 if (size > oldActualSize)
1499 {
1500 if (flags & HEAP_ZERO_MEMORY)
1501 {
1502 clear_block( (char *)(pArena + 1) + oldActualSize, size - oldActualSize );
1503 mark_block_uninitialized( (char *)(pArena + 1) + size,
1504 (pArena->size & ARENA_SIZE_MASK) - oldActualSize );
1505 }
1506 else
1507 mark_block_uninitialized( (char *)(pArena + 1) + oldActualSize,
1508 (pArena->size & ARENA_SIZE_MASK) - oldActualSize );
1509 }
1510
1511 /* Return the new arena */
1512 done:
1513 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
1514 TRACE("(%p,%08lx,%p,%08lx): returning %p\n", heap, flags, ptr, size, pArena + 1 );
1515 return (LPVOID)(pArena + 1);
1516
1517 oom:
1518 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
1519 if (flags & HEAP_GENERATE_EXCEPTIONS) RtlRaiseStatus( STATUS_NO_MEMORY );
1520 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_NO_MEMORY );
1521 TRACE("(%p,%08x,%p,%08lx): returning oom\n", heap, flags, ptr, size );
1522 return NULL;
1523
1524 error:
1525 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
1526 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_PARAMETER );
1527 TRACE("(%p,%08x,%p,%08lx): returning error\n", heap, flags, ptr, size );
1528 return NULL;
1529 }
1530
1531
1532 /***********************************************************************
1533 * RtlCompactHeap
1534 *
1535 * @unimplemented
1536 */
1537 ULONG NTAPI
1538 RtlCompactHeap(HANDLE Heap,
1539 ULONG Flags)
1540 {
1541 UNIMPLEMENTED;
1542 return 0;
1543 }
1544
1545
1546 /***********************************************************************
1547 * RtlLockHeap
1548 * Attempts to acquire the critical section object for a specified heap.
1549 *
1550 * PARAMS
1551 * Heap [in] Handle of heap to lock for exclusive access
1552 *
1553 * RETURNS
1554 * TRUE: Success
1555 * FALSE: Failure
1556 *
1557 * @implemented
1558 */
1559 BOOLEAN NTAPI
1560 RtlLockHeap(IN HANDLE Heap)
1561 {
1562 HEAP *heapPtr = HEAP_GetPtr( Heap );
1563 if (!heapPtr)
1564 return FALSE;
1565 RtlEnterHeapLock( &heapPtr->lock );
1566 return TRUE;
1567 }
1568
1569
1570 /***********************************************************************
1571 * RtlUnlockHeap
1572 * Releases ownership of the critical section object.
1573 *
1574 * PARAMS
1575 * Heap [in] Handle to the heap to unlock
1576 *
1577 * RETURNS
1578 * TRUE: Success
1579 * FALSE: Failure
1580 *
1581 * @implemented
1582 */
1583 BOOLEAN NTAPI
1584 RtlUnlockHeap(HANDLE Heap)
1585 {
1586 HEAP *heapPtr = HEAP_GetPtr( Heap );
1587 if (!heapPtr)
1588 return FALSE;
1589 RtlLeaveHeapLock( &heapPtr->lock );
1590 return TRUE;
1591 }
1592
1593
1594 /***********************************************************************
1595 * RtlSizeHeap
1596 * PARAMS
1597 * Heap [in] Handle of heap
1598 * Flags [in] Heap size control flags
1599 * Ptr [in] Address of memory to return size for
1600 *
1601 * RETURNS
1602 * Size in bytes of allocated memory
1603 * 0xffffffff: Failure
1604 *
1605 * @implemented
1606 */
1607 SIZE_T NTAPI
1608 RtlSizeHeap(
1609 HANDLE heap,
1610 ULONG flags,
1611 PVOID ptr
1612 )
1613 {
1614 SIZE_T ret;
1615 HEAP *heapPtr = HEAP_GetPtr( heap );
1616
1617 if (!heapPtr)
1618 {
1619 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_HANDLE );
1620 return MAXULONG;
1621 }
1622 flags &= HEAP_NO_SERIALIZE;
1623 flags |= heapPtr->flags;
1624 if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterHeapLock( &heapPtr->lock );
1625 if (!HEAP_IsRealArena( heapPtr, HEAP_NO_SERIALIZE, ptr, QUIET ))
1626 {
1627 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_PARAMETER );
1628 ret = MAXULONG;
1629 }
1630 else
1631 {
1632 const ARENA_INUSE *pArena = (const ARENA_INUSE *)ptr - 1;
1633 ret = (pArena->size & ARENA_SIZE_MASK) - pArena->unused_bytes;
1634 }
1635 if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
1636
1637 TRACE("(%p,%08lx,%p): returning %08lx\n", heap, flags, ptr, ret );
1638 return ret;
1639 }
1640
1641
1642 /***********************************************************************
1643 * RtlValidateHeap
1644 * Validates a specified heap.
1645 *
1646 * PARAMS
1647 * Heap [in] Handle to the heap
1648 * Flags [in] Bit flags that control access during operation
1649 * Block [in] Optional pointer to memory block to validate
1650 *
1651 * NOTES
1652 * Flags is ignored.
1653 *
1654 * RETURNS
1655 * TRUE: Success
1656 * FALSE: Failure
1657 *
1658 * @implemented
1659 */
1660 BOOLEAN NTAPI RtlValidateHeap(
1661 HANDLE Heap,
1662 ULONG Flags,
1663 PVOID Block
1664 )
1665 {
1666 HEAP *heapPtr = HEAP_GetPtr( Heap );
1667 if (!heapPtr)
1668 return FALSE;
1669 return HEAP_IsRealArena( heapPtr, Flags, Block, QUIET );
1670 }
1671
1672 VOID
1673 RtlInitializeHeapManager(VOID)
1674 {
1675 PPEB Peb;
1676
1677 Peb = NtCurrentPeb();
1678
1679 Peb->NumberOfHeaps = 0;
1680 Peb->MaximumNumberOfHeaps = -1; /* no limit */
1681 Peb->ProcessHeaps = NULL;
1682
1683 //RtlInitializeHeapLock(&RtlpProcessHeapsListLock);
1684 }
1685
1686
1687 /*
1688 * @implemented
1689 */
1690 NTSTATUS NTAPI
1691 RtlEnumProcessHeaps(PHEAP_ENUMERATION_ROUTINE HeapEnumerationRoutine,
1692 PVOID lParam)
1693 {
1694 NTSTATUS Status = STATUS_SUCCESS;
1695
1696 struct list *ptr=NULL;
1697 RtlEnterHeapLock(&processHeap->lock);
1698 Status=HeapEnumerationRoutine(processHeap,lParam);
1699
1700 LIST_FOR_EACH( ptr, &processHeap->entry )
1701 {
1702 if (!NT_SUCCESS(Status)) break;
1703 Status = HeapEnumerationRoutine(ptr,lParam);
1704 }
1705
1706 RtlLeaveHeapLock(&processHeap->lock);
1707
1708 return Status;
1709 }
1710
1711
1712 /*
1713 * @implemented
1714 */
1715 ULONG NTAPI
1716 RtlGetProcessHeaps(ULONG count,
1717 HANDLE *heaps )
1718 {
1719 ULONG total = 1; /* main heap */
1720 struct list *ptr;
1721 ULONG i=0;
1722 RtlEnterHeapLock( &processHeap->lock );
1723 LIST_FOR_EACH( ptr, &processHeap->entry ) total++;
1724 //if (total <= count)
1725 {
1726 *(heaps++) = processHeap;
1727 i++;
1728 LIST_FOR_EACH( ptr, &processHeap->entry )
1729 {
1730 if (i >= count) break;
1731 i++;
1732 *(heaps++) = LIST_ENTRY( ptr, HEAP, entry );
1733 }
1734 }
1735 RtlLeaveHeapLock( &processHeap->lock );
1736 return i;
1737 }
1738
1739
1740 /*
1741 * @implemented
1742 */
1743 BOOLEAN NTAPI
1744 RtlValidateProcessHeaps(VOID)
1745 {
1746 BOOLEAN Result = TRUE;
1747 HEAP ** pptr;
1748
1749 RtlEnterHeapLock( &processHeap->lock );
1750
1751 for (pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps; *pptr; pptr++)
1752 {
1753 if (!RtlValidateHeap(*pptr, 0, NULL))
1754 {
1755 Result = FALSE;
1756 break;
1757 }
1758 }
1759
1760 RtlLeaveHeapLock( &processHeap->lock );
1761 return Result;
1762 }
1763
1764
1765 /*
1766 * @unimplemented
1767 */
1768 BOOLEAN NTAPI
1769 RtlZeroHeap(
1770 IN PVOID HeapHandle,
1771 IN ULONG Flags
1772 )
1773 {
1774 UNIMPLEMENTED;
1775 return FALSE;
1776 }
1777
1778 /*
1779 * @implemented
1780 */
1781 BOOLEAN
1782 NTAPI
1783 RtlSetUserValueHeap(IN PVOID HeapHandle,
1784 IN ULONG Flags,
1785 IN PVOID BaseAddress,
1786 IN PVOID UserValue)
1787 {
1788 HEAP *heapPtr;
1789 PHEAP_USER_DATA udata;
1790
1791 heapPtr = HEAP_GetPtr(HeapHandle);
1792 if (!heapPtr)
1793 {
1794 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_HANDLE );
1795 return FALSE;
1796 }
1797 udata = HEAP_GetUserData(heapPtr, BaseAddress);
1798 if (!udata)
1799 {
1800 udata = HEAP_AllocUserData(heapPtr, BaseAddress);
1801 if (!udata) return FALSE;
1802 }
1803 udata->UserValue = UserValue;
1804 return TRUE;
1805 }
1806
1807 /*
1808 * @implemented
1809 */
1810 BOOLEAN
1811 NTAPI
1812 RtlSetUserFlagsHeap(IN PVOID HeapHandle,
1813 IN ULONG Flags,
1814 IN PVOID BaseAddress,
1815 IN ULONG UserFlagsReset,
1816 IN ULONG UserFlagsSet)
1817 {
1818 HEAP *heapPtr;
1819 PHEAP_USER_DATA udata;
1820
1821 heapPtr = HEAP_GetPtr(HeapHandle);
1822 if (!heapPtr)
1823 {
1824 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_HANDLE );
1825 return FALSE;
1826 }
1827 udata = HEAP_GetUserData(heapPtr, BaseAddress);
1828 if (!udata)
1829 {
1830 udata = HEAP_AllocUserData(heapPtr, BaseAddress);
1831 if (!udata) return FALSE;
1832 }
1833 udata->UserFlags = UserFlagsSet & HEAP_SETTABLE_USER_FLAGS;
1834 return TRUE;
1835 }
1836
1837 /*
1838 * @implemented
1839 */
1840 BOOLEAN
1841 NTAPI
1842 RtlGetUserInfoHeap(IN PVOID HeapHandle,
1843 IN ULONG Flags,
1844 IN PVOID BaseAddress,
1845 OUT PVOID *UserValue,
1846 OUT PULONG UserFlags)
1847 {
1848 HEAP *heapPtr;
1849 PHEAP_USER_DATA udata;
1850
1851 heapPtr = HEAP_GetPtr(HeapHandle);
1852 if (!heapPtr)
1853 {
1854 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_HANDLE );
1855 return FALSE;
1856 }
1857 udata = HEAP_GetUserData(heapPtr, BaseAddress);
1858 if (!udata)
1859 {
1860 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_PARAMETER );
1861 return FALSE;
1862 }
1863 if (UserValue) *UserValue = udata->UserValue;
1864 if (UserFlags) *UserFlags = udata->UserFlags;
1865 return TRUE;
1866 }
1867
1868 /*
1869 * @unimplemented
1870 */
1871 NTSTATUS
1872 NTAPI
1873 RtlUsageHeap(IN HANDLE Heap,
1874 IN ULONG Flags,
1875 OUT PRTL_HEAP_USAGE Usage)
1876 {
1877 /* TODO */
1878 UNIMPLEMENTED;
1879 return STATUS_NOT_IMPLEMENTED;
1880 }
1881
1882 PWSTR
1883 NTAPI
1884 RtlQueryTagHeap(IN PVOID HeapHandle,
1885 IN ULONG Flags,
1886 IN USHORT TagIndex,
1887 IN BOOLEAN ResetCounters,
1888 OUT PRTL_HEAP_TAG_INFO HeapTagInfo)
1889 {
1890 /* TODO */
1891 UNIMPLEMENTED;
1892 return NULL;
1893 }
1894
1895 ULONG
1896 NTAPI
1897 RtlExtendHeap(IN HANDLE Heap,
1898 IN ULONG Flags,
1899 IN PVOID P,
1900 IN SIZE_T Size)
1901 {
1902 /* TODO */
1903 UNIMPLEMENTED;
1904 return 0;
1905 }
1906
1907 ULONG
1908 NTAPI
1909 RtlCreateTagHeap(IN HANDLE HeapHandle,
1910 IN ULONG Flags,
1911 IN PWSTR TagName,
1912 IN PWSTR TagSubName)
1913 {
1914 /* TODO */
1915 UNIMPLEMENTED;
1916 return 0;
1917 }
1918
1919 NTSTATUS
1920 NTAPI
1921 RtlWalkHeap(IN HANDLE HeapHandle,
1922 IN PVOID HeapEntry)
1923 {
1924 UNIMPLEMENTED;
1925 return STATUS_NOT_IMPLEMENTED;
1926 }
1927
1928 PVOID
1929 NTAPI
1930 RtlProtectHeap(IN PVOID HeapHandle,
1931 IN BOOLEAN ReadOnly)
1932 {
1933 UNIMPLEMENTED;
1934 return NULL;
1935 }
1936
1937 NTSTATUS
1938 NTAPI
1939 RtlSetHeapInformation(IN HANDLE HeapHandle OPTIONAL,
1940 IN HEAP_INFORMATION_CLASS HeapInformationClass,
1941 IN PVOID HeapInformation,
1942 IN SIZE_T HeapInformationLength)
1943 {
1944 UNIMPLEMENTED;
1945 return 0;
1946 }
1947
1948 NTSTATUS
1949 NTAPI
1950 RtlQueryHeapInformation(HANDLE HeapHandle,
1951 HEAP_INFORMATION_CLASS HeapInformationClass,
1952 PVOID HeapInformation OPTIONAL,
1953 SIZE_T HeapInformationLength OPTIONAL,
1954 PSIZE_T ReturnLength OPTIONAL)
1955 {
1956 HEAP *heapPtr;
1957
1958 heapPtr = HEAP_GetPtr(HeapHandle);
1959 if (!heapPtr)
1960 {
1961 RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_HANDLE );
1962 return FALSE;
1963 }
1964
1965 UNIMPLEMENTED
1966
1967 switch (HeapInformationClass)
1968 {
1969 case HeapCompatibilityInformation:
1970 if (ReturnLength) *ReturnLength = sizeof(ULONG);
1971
1972 if (HeapInformationLength < sizeof(ULONG))
1973 return STATUS_BUFFER_TOO_SMALL;
1974
1975 *(ULONG *)HeapInformation = 0; /* standard heap */
1976 return STATUS_SUCCESS;
1977 default:
1978 return STATUS_INVALID_INFO_CLASS;
1979 }
1980 }
1981
1982 NTSTATUS
1983 NTAPI
1984 RtlMultipleAllocateHeap(IN PVOID HeapHandle,
1985 IN ULONG Flags,
1986 IN SIZE_T Size,
1987 IN ULONG Count,
1988 OUT PVOID *Array)
1989 {
1990 UNIMPLEMENTED;
1991 return 0;
1992 }
1993
1994 NTSTATUS
1995 NTAPI
1996 RtlMultipleFreeHeap(IN PVOID HeapHandle,
1997 IN ULONG Flags,
1998 IN ULONG Count,
1999 OUT PVOID *Array)
2000 {
2001 UNIMPLEMENTED;
2002 return 0;
2003 }