typedef struct tagARENA_INUSE
{
- DWORD size; /* Block size; must be the first field */
+ SIZE_T size; /* Block size; must be the first field */
DWORD magic : 23; /* Magic number */
DWORD has_user_data : 1; /* There is user data associated with this block */
DWORD unused_bytes : 8; /* Number of bytes in the block not used by user data (max value is HEAP_MIN_DATA_SIZE+HEAP_MIN_SHRINK_SIZE) */
typedef struct tagARENA_FREE
{
- DWORD size; /* Block size; must be the first field */
+ SIZE_T size; /* Block size; must be the first field */
DWORD magic; /* Magic number */
struct list entry; /* Entry in free list */
} ARENA_FREE;
#define ARENA_FLAG_FREE 0x00000001 /* flags OR'ed with arena size */
#define ARENA_FLAG_PREV_FREE 0x00000002
-#define ARENA_SIZE_MASK (~3)
#define ARENA_INUSE_MAGIC 0x455355 /* Value for arena 'magic' field */
#define ARENA_FREE_MAGIC 0x45455246 /* Value for arena 'magic' field */
+#ifndef _WIN64
+#define ARENA_SIZE_MASK (~3L)
+#else
+#define ARENA_SIZE_MASK (~7L)
+#endif
+
#define ARENA_INUSE_FILLER 0x55
#define ARENA_FREE_FILLER 0xaa
-#define ALIGNMENT 8 /* everything is aligned on 8 byte boundaries */
-#define ROUND_SIZE(size) (((size) + ALIGNMENT - 1) & ~(ALIGNMENT-1))
+/* everything is aligned on 8 byte boundaries (16 for Win64)*/
+#define ALIGNMENT (2*sizeof(void*))
+#define ARENA_OFFSET (ALIGNMENT - sizeof(ARENA_INUSE))
+
+#define ROUND_SIZE(size) ((((size) + ALIGNMENT - 1) & ~(ALIGNMENT-1)) + ARENA_OFFSET)
+
#define QUIET 1 /* Suppress messages */
#define NOISY 0 /* Report all errors */
/* minimum data size (without arenas) of an allocated block */
-#define HEAP_MIN_DATA_SIZE 16
+#define HEAP_MIN_DATA_SIZE ROUND_SIZE(2 * sizeof(struct list))
/* minimum size that must remain to shrink an allocated block */
#define HEAP_MIN_SHRINK_SIZE (HEAP_MIN_DATA_SIZE+sizeof(ARENA_FREE))
-#define HEAP_NB_FREE_LISTS 4 /* Number of free lists */
+#define HEAP_NB_FREE_LISTS 5 /* Number of free lists */
/* Max size of the blocks on the free lists */
static const DWORD HEAP_freeListSizes[HEAP_NB_FREE_LISTS] =
{
- 0x20, 0x80, 0x200, ~0UL
+ 0x10, 0x20, 0x80, 0x200, MAXULONG
};
-typedef struct
+typedef union
{
ARENA_FREE arena;
+ void *aligment[4];
} FREE_LIST_ENTRY;
struct tagHEAP;
typedef struct tagSUBHEAP
{
- DWORD size; /* Size of the whole sub-heap */
- DWORD commitSize; /* Committed size of the sub-heap */
+ SIZE_T size; /* Size of the whole sub-heap */
+ SIZE_T commitSize; /* Committed size of the sub-heap */
DWORD headerSize; /* Size of the heap header */
struct tagSUBHEAP *next; /* Next sub-heap */
struct tagHEAP *heap; /* Main heap structure */
{
SUBHEAP subheap; /* First sub-heap */
struct list entry; /* Entry in process heap list */
- RTL_CRITICAL_SECTION critSection; /* Critical section for serialization */
- FREE_LIST_ENTRY freeList[HEAP_NB_FREE_LISTS]; /* Free lists */
+ HEAP_LOCK lock; /* Critical section for serialization */
+ DECLSPEC_ALIGN(8) FREE_LIST_ENTRY freeList[HEAP_NB_FREE_LISTS]; /* Free lists */
DWORD flags; /* Heap flags */
DWORD magic; /* Magic number */
PRTL_HEAP_COMMIT_ROUTINE commitRoutine;
return i;
}
+/* get the memory protection type to use for a given heap */
+static inline ULONG get_protection_type( DWORD flags )
+{
+ return (flags & HEAP_CREATE_ENABLE_EXECUTE) ? PAGE_EXECUTE_READWRITE : PAGE_READWRITE;
+}
+
static RTL_CRITICAL_SECTION_DEBUG process_heap_critsect_debug =
{
0, 0, NULL, /* will be set later */
}
}
DPRINTF( "\nTotal: Size=%08lx Committed=%08lx Free=%08lx Used=%08lx Arenas=%08lx (%ld%%)\n\n",
- subheap->size, subheap->commitSize, freeSize, usedSize,
- arenaSize, (arenaSize * 100) / subheap->size );
+ subheap->size, subheap->commitSize, freeSize, usedSize,
+ arenaSize, (arenaSize * 100) / subheap->size );
subheap = subheap->next;
}
}
else
{
Status = ZwAllocateVirtualMemory( NtCurrentProcess(), &ptr, 0,
- &size, MEM_COMMIT, PAGE_READWRITE );
+ &size, MEM_COMMIT, get_protection_type(subheap->heap->flags) );
}
if (!NT_SUCCESS(Status))
{
NTSTATUS Status;
if (!address && ZwAllocateVirtualMemory( NtCurrentProcess(), &address, 0,
- &commitSize, MEM_COMMIT, PAGE_READWRITE ))
+ &commitSize, MEM_COMMIT, get_protection_type(flags) ))
{
WARN("Could not commit %08lx bytes for sub-heap %p\n", commitSize, address );
return FALSE;
{
if (!processHeap) /* do it by hand to avoid memory allocations */
{
- heap->critSection.DebugInfo = &process_heap_critsect_debug;
- heap->critSection.LockCount = -1;
- heap->critSection.RecursionCount = 0;
- heap->critSection.OwningThread = 0;
- heap->critSection.LockSemaphore = 0;
- heap->critSection.SpinCount = 0;
- process_heap_critsect_debug.CriticalSection = &heap->critSection;
+ heap->lock.CriticalSection.DebugInfo = &process_heap_critsect_debug;
+ heap->lock.CriticalSection.LockCount = -1;
+ heap->lock.CriticalSection.RecursionCount = 0;
+ heap->lock.CriticalSection.OwningThread = 0;
+ heap->lock.CriticalSection.LockSemaphore = 0;
+ heap->lock.CriticalSection.SpinCount = 0;
+ process_heap_critsect_debug.CriticalSection = &heap->lock.CriticalSection;
}
- else RtlInitializeHeapLock( &heap->critSection );
+ else RtlInitializeHeapLock( &heap->lock );
}
}
0,
&commitSize,
MEM_COMMIT,
- PAGE_EXECUTE_READWRITE);
+ get_protection_type(flags));
}
if (!NT_SUCCESS(Status))
{
totalSize = (totalSize + 0xffff) & 0xffff0000;
commitSize = (commitSize + 0xffff) & 0xffff0000;
if (!commitSize) commitSize = 0x10000;
+ totalSize = min( totalSize, 0xffff0000 ); /* don't allow a heap larger than 4Gb */
if (totalSize < commitSize) totalSize = commitSize;
if (!address)
{
/* allocate the memory block */
if (ZwAllocateVirtualMemory( NtCurrentProcess(), &address, 0, &totalSize,
- MEM_RESERVE | MEM_COMMIT, PAGE_READWRITE ))
+ MEM_RESERVE | MEM_COMMIT, get_protection_type(flags) ))
{
WARN("Could not allocate %08lx bytes\n", totalSize );
return NULL;
{
SUBHEAP *subheap;
struct list *ptr;
+ SIZE_T total_size;
FREE_LIST_ENTRY *pEntry = heap->freeList + get_freelist_index( size + sizeof(ARENA_INUSE) );
/* Find a suitable free list, and in it find a block large enough */
* So just one heap struct, one first free arena which will eventually
* get used, and a second free arena that might get assigned all remaining
* free space in HEAP_ShrinkBlock() */
- size += ROUND_SIZE(sizeof(SUBHEAP)) + sizeof(ARENA_INUSE) + sizeof(ARENA_FREE);
- if (!(subheap = HEAP_CreateSubHeap( heap, NULL, heap->flags, size,
- max( HEAP_DEF_SIZE, size ), NULL )))
+ total_size = size + ROUND_SIZE(sizeof(SUBHEAP)) + sizeof(ARENA_INUSE) + sizeof(ARENA_FREE);
+ if (total_size < size) return NULL; /* overflow */
+
+ if (!(subheap = HEAP_CreateSubHeap( heap, NULL, heap->flags, total_size,
+ max( HEAP_DEF_SIZE, total_size ), NULL )))
return NULL;
TRACE("created new sub-heap %p of %08lx bytes for heap %p\n",
char *heapEnd = (char *)subheap + subheap->size;
/* Check for unaligned pointers */
- if ( (ULONG_PTR)pArena % ALIGNMENT != 0 )
+ if ( (ULONG_PTR)pArena % ALIGNMENT != ARENA_OFFSET )
{
ERR("Heap %p: unaligned arena pointer %p\n", subheap->heap, pArena );
return FALSE;
const char *heapEnd = (const char *)subheap + subheap->size;
/* Check for unaligned pointers */
- if ( (ULONG_PTR)pArena % ALIGNMENT != 0 )
+ if ( (ULONG_PTR)pArena % ALIGNMENT != ARENA_OFFSET )
{
if ( quiet == NOISY )
{
flags |= heapPtr->flags;
/* calling HeapLock may result in infinite recursion, so do the critsect directly */
if (!(flags & HEAP_NO_SERIALIZE))
- RtlEnterHeapLock( &heapPtr->critSection );
+ RtlEnterHeapLock( &heapPtr->lock );
if (block)
{
ret = HEAP_ValidateInUseArena( subheap, (const ARENA_INUSE *)block - 1, quiet );
if (!(flags & HEAP_NO_SERIALIZE))
- RtlLeaveHeapLock( &heapPtr->critSection );
+ RtlLeaveHeapLock( &heapPtr->lock );
return ret;
}
subheap = subheap->next;
}
- if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->critSection );
+ if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
return ret;
}
if (processHeap)
{
HEAP *heapPtr = subheap->heap;
- RtlEnterHeapLock( &processHeap->critSection );
+ RtlEnterHeapLock( &processHeap->lock );
list_add_head( &processHeap->entry, &heapPtr->entry );
- RtlLeaveHeapLock( &processHeap->critSection );
+ RtlLeaveHeapLock( &processHeap->lock );
}
else
{
processHeap = subheap->heap; /* assume the first heap we create is the process main heap */
list_init( &processHeap->entry );
+ assert( (ULONG_PTR)processHeap->freeList % ALIGNMENT == ARENA_OFFSET );
}
}
return heap; /* cannot delete the main process heap */
/* remove it from the per-process list */
- RtlEnterHeapLock( &processHeap->critSection );
+ RtlEnterHeapLock( &processHeap->lock );
list_remove( &heapPtr->entry );
- RtlLeaveHeapLock( &processHeap->critSection );
+ RtlLeaveHeapLock( &processHeap->lock );
}
- RtlDeleteHeapLock( &heapPtr->critSection );
+ RtlDeleteHeapLock( &heapPtr->lock );
subheap = &heapPtr->subheap;
while (subheap)
{
PVOID NTAPI
RtlAllocateHeap(HANDLE heap, /* [in] Handle of private heap block */
ULONG flags, /* [in] Heap allocation control flags */
- ULONG size) /* [in] Number of bytes to allocate */
+ SIZE_T size) /* [in] Number of bytes to allocate */
{
ARENA_FREE *pArena;
ARENA_INUSE *pInUse;
//flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY;
flags |= heapPtr->flags;
rounded_size = ROUND_SIZE(size);
+ if (rounded_size < size) /* overflow */
+ {
+ if (flags & HEAP_GENERATE_EXCEPTIONS) RtlRaiseStatus( STATUS_NO_MEMORY );
+ return NULL;
+ }
+
if (rounded_size < HEAP_MIN_DATA_SIZE) rounded_size = HEAP_MIN_DATA_SIZE;
- if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterHeapLock( &heapPtr->critSection );
+ if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterHeapLock( &heapPtr->lock );
/* Locate a suitable free block */
-
- /* Locate a suitable free block */
-
if (!(pArena = HEAP_FindFreeBlock( heapPtr, rounded_size, &subheap )))
{
TRACE("(%p,%08lx,%08lx): returning NULL\n",
heap, flags, size );
- if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->critSection );
+ if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
if (flags & HEAP_GENERATE_EXCEPTIONS) RtlRaiseStatus( STATUS_NO_MEMORY );
return NULL;
}
pInUse->unused_bytes = (pInUse->size & ARENA_SIZE_MASK) - size;
if (flags & HEAP_ZERO_MEMORY)
- clear_block( pInUse + 1, pInUse->size & ARENA_SIZE_MASK );
+ {
+ clear_block( pInUse + 1, size );
+ mark_block_uninitialized( (char *)(pInUse + 1) + size, pInUse->unused_bytes );
+ }
else
mark_block_uninitialized( pInUse + 1, pInUse->size & ARENA_SIZE_MASK );
- if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->critSection );
+ if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
TRACE("(%p,%08lx,%08lx): returning %p\n", heap, flags, size, pInUse + 1 );
return (LPVOID)(pInUse + 1);
flags &= HEAP_NO_SERIALIZE;
flags |= heapPtr->flags;
- if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterHeapLock( &heapPtr->critSection );
+ if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterHeapLock( &heapPtr->lock );
if (!HEAP_IsRealArena( heapPtr, HEAP_NO_SERIALIZE, ptr, QUIET ))
{
- if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->critSection );
+ if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_PARAMETER );
TRACE("(%p,%08lx,%p): returning FALSE\n", heap, flags, ptr );
return FALSE;
}
- /* Turn the block into a free block */
-
+ /* Some sanity checks */
pInUse = (ARENA_INUSE *)ptr - 1;
subheap = HEAP_FindSubHeap( heapPtr, pInUse );
+ if ((char *)pInUse < (char *)subheap + subheap->headerSize) goto error;
+ if (!HEAP_ValidateInUseArena( subheap, pInUse, QUIET )) goto error;
+
+ /* Turn the block into a free block */
+
HEAP_MakeInUseBlockFree( subheap, pInUse );
- if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->critSection );
+ if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
TRACE("(%p,%08lx,%p): returning TRUE\n", heap, flags, ptr );
return TRUE;
+
+error:
+ if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
+ RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_PARAMETER );
+ TRACE("(%p,%08x,%p): returning FALSE\n", heap, flags, ptr );
+ return FALSE;
}
ARENA_INUSE *pArena;
HEAP *heapPtr;
SUBHEAP *subheap;
- SIZE_T oldSize, rounded_size;
+ SIZE_T oldBlockSize, oldActualSize, rounded_size;
if (!ptr) return NULL;
if (!(heapPtr = HEAP_GetPtr( heap )))
//Flags &= HEAP_GENERATE_EXCEPTIONS | HEAP_NO_SERIALIZE | HEAP_ZERO_MEMORY |
// HEAP_REALLOC_IN_PLACE_ONLY;
flags |= heapPtr->flags;
+ if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterHeapLock( &heapPtr->lock );
+
rounded_size = ROUND_SIZE(size);
+ if (rounded_size < size) goto oom; /* overflow */
if (rounded_size < HEAP_MIN_DATA_SIZE) rounded_size = HEAP_MIN_DATA_SIZE;
- if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterHeapLock( &heapPtr->critSection );
- if (!HEAP_IsRealArena( heapPtr, HEAP_NO_SERIALIZE, ptr, QUIET ))
- {
- if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->critSection );
- RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_PARAMETER );
- TRACE("(%p,%08lx,%p,%08lx): returning NULL\n", heap, flags, ptr, size );
- return NULL;
- }
-
pArena = (ARENA_INUSE *)ptr - 1;
- subheap = HEAP_FindSubHeap( heapPtr, pArena );
- oldSize = (pArena->size & ARENA_SIZE_MASK);
- if (rounded_size > oldSize)
+ if(!(subheap = HEAP_FindSubHeap( heapPtr, pArena ))) goto done;
+ if ((char *)pArena < (char *)subheap + subheap->headerSize) goto error;
+ if (!HEAP_ValidateInUseArena( subheap, pArena, QUIET )) goto error;
+
+ oldBlockSize = (pArena->size & ARENA_SIZE_MASK);
+ oldActualSize = (pArena->size & ARENA_SIZE_MASK) - pArena->unused_bytes;
+
+ if (rounded_size > oldBlockSize)
{
- char *pNext = (char *)(pArena + 1) + oldSize;
+ char *pNext = (char *)(pArena + 1) + oldBlockSize;
if ((pNext < (char *)subheap + subheap->size) &&
(*(DWORD *)pNext & ARENA_FLAG_FREE) &&
- (oldSize + (*(DWORD *)pNext & ARENA_SIZE_MASK) + sizeof(ARENA_FREE) >= rounded_size))
+ (oldBlockSize + (*(DWORD *)pNext & ARENA_SIZE_MASK) + sizeof(ARENA_FREE) >= rounded_size))
{
- /* The next block is free and large enough */
ARENA_FREE *pFree = (ARENA_FREE *)pNext;
list_remove( &pFree->entry );
pArena->size += (pFree->size & ARENA_SIZE_MASK) + sizeof(*pFree);
- if (!HEAP_Commit( subheap, pArena, rounded_size ))
- {
- if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->critSection );
- if (flags & HEAP_GENERATE_EXCEPTIONS) RtlRaiseStatus( STATUS_NO_MEMORY );
- RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_NO_MEMORY );
- return NULL;
- }
+
+ if (!HEAP_Commit( subheap, pArena, rounded_size )) goto oom;
+
HEAP_ShrinkBlock( subheap, pArena, rounded_size );
+ mark_block_initialized( pArena + 1, oldActualSize );
}
else /* Do it the hard way */
{
ARENA_FREE *pNew;
ARENA_INUSE *pInUse;
SUBHEAP *newsubheap;
-
if ((flags & HEAP_REALLOC_IN_PLACE_ONLY) ||
!(pNew = HEAP_FindFreeBlock( heapPtr, rounded_size, &newsubheap )))
- {
- if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->critSection );
- if (flags & HEAP_GENERATE_EXCEPTIONS) RtlRaiseStatus( STATUS_NO_MEMORY );
- RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_NO_MEMORY );
- return NULL;
- }
+ goto oom;
/* Build the in-use arena */
+ sizeof(ARENA_FREE) - sizeof(ARENA_INUSE);
pInUse->magic = ARENA_INUSE_MAGIC;
HEAP_ShrinkBlock( newsubheap, pInUse, rounded_size );
- mark_block_initialized( pInUse + 1, oldSize );
- memcpy( pInUse + 1, pArena + 1, oldSize );
+
+ mark_block_initialized( pInUse + 1, oldActualSize );
+ memcpy( pInUse + 1, pArena + 1, oldActualSize );
/* Free the previous block */
pArena = pInUse;
}
}
- else HEAP_ShrinkBlock( subheap, pArena, rounded_size ); /* Shrink the block */
+ else
+ {
+ HEAP_ShrinkBlock( subheap, pArena, rounded_size ); /* Shrink the block */
+ mark_block_initialized( pArena + 1, size );
+ }
pArena->unused_bytes = (pArena->size & ARENA_SIZE_MASK) - size;
/* Clear the extra bytes if needed */
- if (rounded_size > oldSize)
+ if (size > oldActualSize)
{
if (flags & HEAP_ZERO_MEMORY)
- clear_block( (char *)(pArena + 1) + oldSize,
- (pArena->size & ARENA_SIZE_MASK) - oldSize );
+ {
+ clear_block( (char *)(pArena + 1) + oldActualSize, size - oldActualSize );
+ mark_block_uninitialized( (char *)(pArena + 1) + size,
+ (pArena->size & ARENA_SIZE_MASK) - oldActualSize );
+ }
else
- mark_block_uninitialized( (char *)(pArena + 1) + oldSize,
- (pArena->size & ARENA_SIZE_MASK) - oldSize );
+ mark_block_uninitialized( (char *)(pArena + 1) + oldActualSize,
+ (pArena->size & ARENA_SIZE_MASK) - oldActualSize );
}
/* Return the new arena */
-
- if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->critSection );
-
+done:
+ if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
TRACE("(%p,%08lx,%p,%08lx): returning %p\n", heap, flags, ptr, size, pArena + 1 );
return (LPVOID)(pArena + 1);
+
+oom:
+ if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
+ if (flags & HEAP_GENERATE_EXCEPTIONS) RtlRaiseStatus( STATUS_NO_MEMORY );
+ RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_NO_MEMORY );
+ TRACE("(%p,%08x,%p,%08lx): returning oom\n", heap, flags, ptr, size );
+ return NULL;
+
+error:
+ if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
+ RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_PARAMETER );
+ TRACE("(%p,%08x,%p,%08lx): returning error\n", heap, flags, ptr, size );
+ return NULL;
}
HEAP *heapPtr = HEAP_GetPtr( Heap );
if (!heapPtr)
return FALSE;
- RtlEnterHeapLock( &heapPtr->critSection );
+ RtlEnterHeapLock( &heapPtr->lock );
return TRUE;
}
HEAP *heapPtr = HEAP_GetPtr( Heap );
if (!heapPtr)
return FALSE;
- RtlLeaveHeapLock( &heapPtr->critSection );
+ RtlLeaveHeapLock( &heapPtr->lock );
return TRUE;
}
*
* @implemented
*/
-ULONG NTAPI
+SIZE_T NTAPI
RtlSizeHeap(
HANDLE heap,
ULONG flags,
PVOID ptr
)
{
- SIZE_T ret;
+ SIZE_T ret;
HEAP *heapPtr = HEAP_GetPtr( heap );
if (!heapPtr)
{
RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_HANDLE );
- return ~0UL;
+ return MAXULONG;
}
flags &= HEAP_NO_SERIALIZE;
flags |= heapPtr->flags;
- if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterHeapLock( &heapPtr->critSection );
+ if (!(flags & HEAP_NO_SERIALIZE)) RtlEnterHeapLock( &heapPtr->lock );
if (!HEAP_IsRealArena( heapPtr, HEAP_NO_SERIALIZE, ptr, QUIET ))
{
RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_PARAMETER );
- ret = ~0UL;
+ ret = MAXULONG;
}
else
{
- ARENA_INUSE *pArena = (ARENA_INUSE *)ptr - 1;
+ const ARENA_INUSE *pArena = (const ARENA_INUSE *)ptr - 1;
ret = (pArena->size & ARENA_SIZE_MASK) - pArena->unused_bytes;
}
- if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->critSection );
+ if (!(flags & HEAP_NO_SERIALIZE)) RtlLeaveHeapLock( &heapPtr->lock );
TRACE("(%p,%08lx,%p): returning %08lx\n", heap, flags, ptr, ret );
return ret;
NTSTATUS Status = STATUS_SUCCESS;
struct list *ptr=NULL;
- RtlEnterHeapLock(&processHeap->critSection);
+ RtlEnterHeapLock(&processHeap->lock);
Status=HeapEnumerationRoutine(processHeap,lParam);
LIST_FOR_EACH( ptr, &processHeap->entry )
Status = HeapEnumerationRoutine(ptr,lParam);
}
- RtlLeaveHeapLock(&processHeap->critSection);
+ RtlLeaveHeapLock(&processHeap->lock);
return Status;
}
ULONG total = 1; /* main heap */
struct list *ptr;
ULONG i=0;
- RtlEnterHeapLock( &processHeap->critSection );
+ RtlEnterHeapLock( &processHeap->lock );
LIST_FOR_EACH( ptr, &processHeap->entry ) total++;
//if (total <= count)
{
*(heaps++) = LIST_ENTRY( ptr, HEAP, entry );
}
}
- RtlLeaveHeapLock( &processHeap->critSection );
+ RtlLeaveHeapLock( &processHeap->lock );
return i;
}
BOOLEAN Result = TRUE;
HEAP ** pptr;
- RtlEnterHeapLock( &processHeap->critSection );
+ RtlEnterHeapLock( &processHeap->lock );
for (pptr = (HEAP**)&NtCurrentPeb()->ProcessHeaps; *pptr; pptr++)
{
}
}
- RtlLeaveHeapLock( &processHeap->critSection );
+ RtlLeaveHeapLock( &processHeap->lock );
return Result;
}
RtlSetUserFlagsHeap(IN PVOID HeapHandle,
IN ULONG Flags,
IN PVOID BaseAddress,
- IN ULONG UserFlags)
+ IN ULONG UserFlagsReset,
+ IN ULONG UserFlagsSet)
{
HEAP *heapPtr;
PHEAP_USER_DATA udata;
udata = HEAP_AllocUserData(heapPtr, BaseAddress);
if (!udata) return FALSE;
}
- udata->UserFlags = UserFlags & HEAP_SETTABLE_USER_FLAGS;
+ udata->UserFlags = UserFlagsSet & HEAP_SETTABLE_USER_FLAGS;
return TRUE;
}
RtlExtendHeap(IN HANDLE Heap,
IN ULONG Flags,
IN PVOID P,
- IN ULONG Size)
+ IN SIZE_T Size)
{
/* TODO */
UNIMPLEMENTED;
return NULL;
}
-DWORD
+NTSTATUS
NTAPI
RtlSetHeapInformation(IN HANDLE HeapHandle OPTIONAL,
IN HEAP_INFORMATION_CLASS HeapInformationClass,
return 0;
}
-DWORD
+NTSTATUS
NTAPI
RtlQueryHeapInformation(HANDLE HeapHandle,
HEAP_INFORMATION_CLASS HeapInformationClass,
SIZE_T HeapInformationLength OPTIONAL,
PSIZE_T ReturnLength OPTIONAL)
{
- UNIMPLEMENTED;
- return 0;
+ HEAP *heapPtr;
+
+ heapPtr = HEAP_GetPtr(HeapHandle);
+ if (!heapPtr)
+ {
+ RtlSetLastWin32ErrorAndNtStatusFromNtStatus( STATUS_INVALID_HANDLE );
+ return FALSE;
+ }
+
+ UNIMPLEMENTED
+
+ switch (HeapInformationClass)
+ {
+ case HeapCompatibilityInformation:
+ if (ReturnLength) *ReturnLength = sizeof(ULONG);
+
+ if (HeapInformationLength < sizeof(ULONG))
+ return STATUS_BUFFER_TOO_SMALL;
+
+ *(ULONG *)HeapInformation = 0; /* standard heap */
+ return STATUS_SUCCESS;
+ default:
+ return STATUS_INVALID_INFO_CLASS;
+ }
}
-DWORD
+NTSTATUS
NTAPI
RtlMultipleAllocateHeap(IN PVOID HeapHandle,
- IN DWORD Flags,
+ IN ULONG Flags,
IN SIZE_T Size,
- IN DWORD Count,
+ IN ULONG Count,
OUT PVOID *Array)
{
UNIMPLEMENTED;
return 0;
}
-DWORD
+NTSTATUS
NTAPI
RtlMultipleFreeHeap(IN PVOID HeapHandle,
- IN DWORD Flags,
- IN DWORD Count,
+ IN ULONG Flags,
+ IN ULONG Count,
OUT PVOID *Array)
{
UNIMPLEMENTED;