Use this if you're having problems getting accurate disk read and write statistics due to de-duplication.
param (
[int]$TotalGB,
[string]$location="D:\"
)
$KB = 1024
$MB = $KB * 1024
$GB = $MB * 1024
$MIN_FSIZE = $MB
$maxFileSize = 5 * $GB
$freeSpace = $TotalGB * $GB
[int]$i = 0
while ($maxFileSize -gt $MIN_FSIZE -and $freeSpace -gt 0)
{
[Uint64]$candidateFileSize = Get-Random -minimum $MIN_FSIZE -maximum $maxFileSize
if ($candidateFileSize -ge $freeSpace)
{
$maxFileSize = $maxFileSize / 2
}
else
{
$fileName = $location + "Random-" + ($i -as [string]) + ".rnd"
$i+=1
$fileSize = $candidateFileSize
Write-Host "---------------------------"
Write-Host "Creating"$fileName" of "$fileSize" bytes"
#Critical part of the code. Powershell supports file handles, so I use them!
$fstream = New-Object System.IO.FileStream($fileName,[io.filemode]::OpenOrCreate)
$w = New-Object System.IO.BinaryWriter($fstream)
$consumed = 0
#Critical part of the code. This creates a random byte array in memory and then flushes it to disk.
#Boost performance by making this buffer bigger.
while ($consumed -lt $fileSize)
{
$out = New-Object Byte[] $MB
(New-Object Random).NextBytes($out)
$w.Write($out)
$consumed += $MB
$out.clear()
}
Write-Host "Write Complete. Closing streams"
$w.Close()
$fstream.Close()
$freeSpace -= $fileSize
Write-Host "Remaining to Consume: "$freeSpace
}
}