LIST_ENTRY CcCleanSharedCacheMapList;
#if DBG
-VOID CcRosVacbIncRefCount_(PROS_VACB vacb, PCSTR file, INT line)
+ULONG CcRosVacbIncRefCount_(PROS_VACB vacb, PCSTR file, INT line)
{
- ++vacb->ReferenceCount;
+ ULONG Refs;
+
+ Refs = InterlockedIncrement((PLONG)&vacb->ReferenceCount);
if (vacb->SharedCacheMap->Trace)
{
DbgPrint("(%s:%i) VACB %p ++RefCount=%lu, Dirty %u, PageOut %lu\n",
- file, line, vacb, vacb->ReferenceCount, vacb->Dirty, vacb->PageOut);
+ file, line, vacb, Refs, vacb->Dirty, vacb->PageOut);
}
+
+ return Refs;
}
-VOID CcRosVacbDecRefCount_(PROS_VACB vacb, PCSTR file, INT line)
+ULONG CcRosVacbDecRefCount_(PROS_VACB vacb, PCSTR file, INT line)
{
- ASSERT(vacb->ReferenceCount != 0);
- --vacb->ReferenceCount;
- ASSERT(!(vacb->ReferenceCount == 0 && vacb->Dirty));
+ ULONG Refs;
+
+ Refs = InterlockedDecrement((PLONG)&vacb->ReferenceCount);
+ ASSERT(!(Refs == 0 && vacb->Dirty));
if (vacb->SharedCacheMap->Trace)
{
DbgPrint("(%s:%i) VACB %p --RefCount=%lu, Dirty %u, PageOut %lu\n",
- file, line, vacb, vacb->ReferenceCount, vacb->Dirty, vacb->PageOut);
+ file, line, vacb, Refs, vacb->Dirty, vacb->PageOut);
+ }
+
+ return Refs;
+}
+ULONG CcRosVacbGetRefCount_(PROS_VACB vacb, PCSTR file, INT line)
+{
+ ULONG Refs;
+
+ Refs = InterlockedCompareExchange((PLONG)&vacb->ReferenceCount, 0, 0);
+ if (vacb->SharedCacheMap->Trace)
+ {
+ DbgPrint("(%s:%i) VACB %p ==RefCount=%lu, Dirty %u, PageOut %lu\n",
+ file, line, vacb, Refs, vacb->Dirty, vacb->PageOut);
}
+
+ return Refs;
}
#endif
ASSERT(current->Dirty);
/* One reference is added above */
- if (current->ReferenceCount > 2)
+ if (CcRosVacbGetRefCount(current) > 2)
{
CcRosReleaseVacbLock(current);
current->SharedCacheMap->Callbacks->ReleaseFromLazyWrite(
current_entry = VacbLruListHead.Flink;
while (current_entry != &VacbLruListHead)
{
+ ULONG Refs;
+
current = CONTAINING_RECORD(current_entry,
ROS_VACB,
VacbLruListEntry);
}
/* Dereference the VACB */
- CcRosVacbDecRefCount(current);
+ Refs = CcRosVacbDecRefCount(current);
/* Check if we can free this entry now */
- if (current->ReferenceCount < 2)
+ if (Refs < 2)
{
ASSERT(!current->Dirty);
ASSERT(!current->MappedCount);
- ASSERT(current->ReferenceCount == 1);
+ ASSERT(Refs == 1);
RemoveEntryList(¤t->CacheMapVacbListEntry);
RemoveEntryList(¤t->VacbLruListEntry);
+ InitializeListHead(¤t->VacbLruListEntry);
InsertHeadList(&FreeList, ¤t->CacheMapVacbListEntry);
/* Calculate how many pages we freed for Mm */
current = CONTAINING_RECORD(current_entry,
ROS_VACB,
CacheMapVacbListEntry);
+ InitializeListHead(¤t->CacheMapVacbListEntry);
CcRosVacbDecRefCount(current);
CcRosInternalFreeVacb(current);
}
BOOLEAN Dirty,
BOOLEAN Mapped)
{
+ ULONG Refs;
ASSERT(SharedCacheMap);
DPRINT("CcRosReleaseVacb(SharedCacheMap 0x%p, Vacb 0x%p, Valid %u)\n",
{
Vacb->MappedCount++;
}
- CcRosVacbDecRefCount(Vacb);
+ Refs = CcRosVacbDecRefCount(Vacb);
if (Mapped && (Vacb->MappedCount == 1))
{
CcRosVacbIncRefCount(Vacb);
}
- ASSERT(Vacb->ReferenceCount != 0);
+ ASSERT(Refs > 0);
CcRosReleaseVacbLock(Vacb);
Vacb->Dirty = FALSE;
RemoveEntryList(&Vacb->DirtyVacbListEntry);
+ InitializeListHead(&Vacb->DirtyVacbListEntry);
CcTotalDirtyPages -= VACB_MAPPING_GRANULARITY / PAGE_SIZE;
Vacb->SharedCacheMap->DirtyPages -= VACB_MAPPING_GRANULARITY / PAGE_SIZE;
CcRosVacbDecRefCount(Vacb);
ULONG i;
NTSTATUS Status;
ULONG_PTR NumberOfPages;
+ PVOID BaseAddress = NULL;
/* Create a memory area. */
MmLockAddressSpace(MmGetKernelAddressSpace());
Status = MmCreateMemoryArea(MmGetKernelAddressSpace(),
0, // nothing checks for VACB mareas, so set to 0
- &Vacb->BaseAddress,
+ &BaseAddress,
VACB_MAPPING_GRANULARITY,
PAGE_READWRITE,
(PMEMORY_AREA*)&Vacb->MemoryArea,
0,
PAGE_SIZE);
+ ASSERT(Vacb->BaseAddress == NULL);
+ Vacb->BaseAddress = BaseAddress;
MmUnlockAddressSpace(MmGetKernelAddressSpace());
if (!NT_SUCCESS(Status))
{
ASSERT(((ULONG_PTR)Vacb->BaseAddress % PAGE_SIZE) == 0);
ASSERT((ULONG_PTR)Vacb->BaseAddress > (ULONG_PTR)MmSystemRangeStart);
+ ASSERT((ULONG_PTR)Vacb->BaseAddress + VACB_MAPPING_GRANULARITY - 1 > (ULONG_PTR)MmSystemRangeStart);
/* Create a virtual mapping for this memory area */
NumberOfPages = BYTES_TO_PAGES(VACB_MAPPING_GRANULARITY);
KeBugCheck(MEMORY_MANAGEMENT);
}
+ ASSERT(BaseAddress == Vacb->BaseAddress);
+ ASSERT(i * PAGE_SIZE < VACB_MAPPING_GRANULARITY);
+ ASSERT((ULONG_PTR)Vacb->BaseAddress + (i * PAGE_SIZE) >= (ULONG_PTR)BaseAddress);
+ ASSERT((ULONG_PTR)Vacb->BaseAddress + (i * PAGE_SIZE) > (ULONG_PTR)MmSystemRangeStart);
+
Status = MmCreateVirtualMapping(NULL,
(PVOID)((ULONG_PTR)Vacb->BaseAddress + (i * PAGE_SIZE)),
PAGE_READWRITE,
}
#endif
current->MappedCount = 0;
- current->DirtyVacbListEntry.Flink = NULL;
- current->DirtyVacbListEntry.Blink = NULL;
current->ReferenceCount = 0;
current->PinCount = 0;
KeInitializeMutex(¤t->Mutex, 0);
+ InitializeListHead(¤t->CacheMapVacbListEntry);
+ InitializeListHead(¤t->DirtyVacbListEntry);
+ InitializeListHead(¤t->VacbLruListEntry);
+
+ CcRosVacbIncRefCount(current);
+
+ Status = CcRosMapVacbInKernelSpace(current);
+ if (!NT_SUCCESS(Status))
+ {
+ CcRosVacbDecRefCount(current);
+ ExFreeToNPagedLookasideList(&VacbLookasideList, current);
+ return Status;
+ }
+
CcRosAcquireVacbLock(current, NULL);
KeAcquireGuardedMutex(&ViewLock);
current);
}
#endif
+ CcRosVacbDecRefCount(*Vacb);
CcRosReleaseVacbLock(*Vacb);
KeReleaseGuardedMutex(&ViewLock);
- ExFreeToNPagedLookasideList(&VacbLookasideList, *Vacb);
+ CcRosInternalFreeVacb(*Vacb);
*Vacb = current;
CcRosAcquireVacbLock(current, NULL);
return STATUS_SUCCESS;
}
KeReleaseSpinLock(&SharedCacheMap->CacheMapLock, oldIrql);
InsertTailList(&VacbLruListHead, ¤t->VacbLruListEntry);
- CcRosVacbIncRefCount(current);
KeReleaseGuardedMutex(&ViewLock);
MI_SET_USAGE(MI_USAGE_CACHE);
}
#endif
- Status = CcRosMapVacbInKernelSpace(current);
- if (!NT_SUCCESS(Status))
- {
- RemoveEntryList(¤t->CacheMapVacbListEntry);
- RemoveEntryList(¤t->VacbLruListEntry);
- CcRosReleaseVacbLock(current);
- ExFreeToNPagedLookasideList(&VacbLookasideList, current);
- }
+ /* Reference it to allow release */
+ CcRosVacbIncRefCount(current);
return Status;
}
{
PROS_VACB current;
NTSTATUS Status;
+ ULONG Refs;
ASSERT(SharedCacheMap);
{
return Status;
}
-
- CcRosVacbIncRefCount(current);
}
+ Refs = CcRosVacbGetRefCount(current);
+
KeAcquireGuardedMutex(&ViewLock);
/* Move to the tail of the LRU list */
DPRINT("*BaseAddress %p\n", *BaseAddress);
*Vacb = current;
*BaseOffset = current->FileOffset.QuadPart;
+
+ ASSERT(Refs > 1);
+
return STATUS_SUCCESS;
}
ASSERT(Vacb->PinCount == 0);
ASSERT(Vacb->ReferenceCount == 0);
+ ASSERT(IsListEmpty(&Vacb->CacheMapVacbListEntry));
+ ASSERT(IsListEmpty(&Vacb->DirtyVacbListEntry));
+ ASSERT(IsListEmpty(&Vacb->VacbLruListEntry));
+ RtlFillMemory(Vacb, sizeof(Vacb), 0xfd);
ExFreeToNPagedLookasideList(&VacbLookasideList, Vacb);
return STATUS_SUCCESS;
}
current = CONTAINING_RECORD(current_entry, ROS_VACB, CacheMapVacbListEntry);
CcRosAcquireVacbLock(current, NULL);
RemoveEntryList(¤t->VacbLruListEntry);
+ InitializeListHead(¤t->VacbLruListEntry);
if (current->Dirty)
{
KeAcquireSpinLock(&SharedCacheMap->CacheMapLock, &oldIrql);
{
current_entry = RemoveTailList(&FreeList);
current = CONTAINING_RECORD(current_entry, ROS_VACB, CacheMapVacbListEntry);
+ InitializeListHead(¤t->CacheMapVacbListEntry);
CcRosVacbDecRefCount(current);
CcRosInternalFreeVacb(current);
}