Public/Start-Download.ps1
<# .SYNOPSIS PowerShell module to download files with support for multiple threads for improved speed. .DESCRIPTION Downloads a file from a specified URL. It supports multi-threaded downloads, progress reporting, hash verification, and automatic retries. To see all available parameters: Get-Help Start-Download -Full To see just the examples: Get-Help Start-Download -Examples .PARAMETER Url The URL of the file to download. Can be directly specified or piped from another command. Either a single URL or an array of URLs can be provided. .PARAMETER Destination The local path where the file should be saved. Can be either a file path or directory. If a directory is specified, the filename will be extracted from the URL or server response. .PARAMETER TempPath Directory to store temporary segment files. Defaults to system temp directory. .PARAMETER NoProgress Suppresses the download progress bar. .PARAMETER Quiet Suppresses all output except errors. .PARAMETER Force Overwrites the destination file if it already exists. .PARAMETER Threads Number of concurrent download threads. Higher numbers may improve speed but use more memory. Defaults to 1. Recommended range: 1-16. .PARAMETER MaxRetry Maximum number of retry attempts if download fails. Defaults to 3. .PARAMETER Timeout Timeout for the HTTP request in seconds. Defaults to 30. .PARAMETER ExpectedHash Expected file hash. If specified, verifies the downloaded file's hash matches. Will retry the download if verification fails. Warning: if processing multiple URLs through the pipeline operator, hash verification will be disabled after the first URL. .PARAMETER HashType Type of hash to verify. Valid values: MD5, SHA1, SHA256, SHA384, SHA512, CRC32. Defaults to MD5 if unspecified. .PARAMETER UserAgent User agent string for the HTTP request. Change if experiencing server restrictions. Available presets: - 'Chrome' (default): Latest Chrome browser - 'Firefox': Latest Firefox browser - 'Edge': Latest Edge browser - 'Safari': Latest Safari browser - 'Opera': Latest Opera browser - 'Simple': Simple Mozilla string - 'Wget': Wget-like user agent - 'Curl': Curl-like user agent - 'PS': PowerShell user agent - 'None': Empty string (no user agent) - Or provide your own custom user agent string .EXAMPLE Start-Download -Url "https://example.com/file.zip" .EXAMPLE Start-Download -Url "https://example.com/file.zip" -Destination "C:\Downloads" -Threads 8 .EXAMPLE Start-Download -Url "https://example.com/file.zip" -ExpectedHash "1234ABCD..." -HashType SHA256 .EXAMPLE Start-Download -Url "https://example.com/file.zip" -Destination "D:\Data" -Quiet -Force .EXAMPLE Start-Download -Url "https://example.com/file.zip" -TempDir "E:\Temp" -MaxRetry 5 #> function Start-Download { [CmdletBinding()] param ( [Parameter(Mandatory = $true, Position = 0, ValueFromPipeline = $true)][string]$Url, [Parameter()][string]$Destination = $PWD.Path, [Parameter()][string]$TempPath = $env:TEMP, [Parameter()][switch]$NoProgress, [Parameter()][switch]$Quiet, [Parameter()][switch]$Force, [Parameter()][int]$Threads = 0, [Parameter()][int]$MaxRetry = 3, [Parameter()][int]$Timeout = 30, [Parameter()][string]$ExpectedHash, [Parameter()][ValidateSet('MD5', 'SHA1', 'SHA256', 'SHA384', 'SHA512', 'CRC32')][string]$HashType = 'MD5', [Parameter()][string]$UserAgent = 'Chrome' ) begin { function Format-FileSize { param([long]$Size) switch ($Size) { { $_ -gt 1TB } { "{0:n2} TB" -f ($_ / 1TB); Break } { $_ -gt 1GB } { "{0:n2} GB" -f ($_ / 1GB); Break } { $_ -gt 1MB } { "{0:n2} MB" -f ($_ / 1MB); Break } { $_ -gt 1KB } { "{0:n2} KB" -f ($_ / 1KB); Break } default { "{0} B " -f $_ } } } $userAgents = @{ 'Chrome' = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36" 'Firefox' = "Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:123.0) Gecko/20100101 Firefox/123.0" 'Edge' = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/122.0.0.0 Safari/537.36 Edg/122.0.2365.92" 'Safari' = "Mozilla/5.0 (Macintosh; Intel Mac OS X 10_15_7) AppleWebKit/605.1.15 (KHTML, like Gecko) Version/17.3.1 Safari/605.1.15" 'Opera' = "Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 (KHTML, like Gecko) Chrome/128.0.0.0 Safari/537.36 OPR/114.0.0.0" 'Simple' = "Mozilla/5.0" 'Wget' = "Wget/1.21.4" 'Curl' = "curl/8.4.0" 'PS' = "PowerShell/7.4 (Windows NT 10.0; Win64; x64)" 'None' = $null } if (($HashType -eq "CRC32") -and -not ([type]::GetType("Win32Api"))) { try { $typeDefinition = "using System;`n" + "using System.Runtime.InteropServices;`n" + "public class Win32Api {`n" + " [DllImport(`"ntdll.dll`")]`n" + " public static extern uint RtlComputeCrc32(uint dwInitial, byte[] pData, int iLen);`n" + "}" Add-Type -TypeDefinition $typeDefinition.Trim() | Out-Null } catch { Write-Warning "Failed to load Win32Api. CRC32 hash verification will be disabled." $HashType = 'MD5' $ExpectedHash = $null } } if ($ExpectedHash) { $ExpectedHash = $ExpectedHash.ToUpper() } # If UserAgent is a preset name, use the corresponding string if ($userAgents.ContainsKey($UserAgent)) { $UserAgent = $userAgents[$UserAgent] } if ($Quiet -and $Verbose) { Write-Error "Cannot use Quiet and Verbose at the same time." return } $isPipeline = -not $PSBoundParameters.ContainsKey('Url') if ($isPipeline) { $script:pipelineUrls = @() } } process { if ($isPipeline) { $script:pipelineUrls += $Url } if ($isPipeline -and $script:pipelineUrls.Count -gt 1 -and $ExpectedHash) { Write-Warning "Hash verification is disabled when processing multiple URLs." $ExpectedHash = $null } Write-Verbose "Processing URL: $Url" $attempt = 0 $success = $false while ($attempt -lt $MaxRetry -and -not $success) { try { $attempt++ if ($attempt -gt 1) { Write-Verbose "Retry attempt $attempt of $MaxRetry" Start-Sleep -Seconds ($attempt * 2) } $BUFFER_SIZE = 64KB $MB = 1024 * 1024 $request = [System.Net.HttpWebRequest]::Create($Url) $request.Method = "HEAD" $request.UserAgent = $UserAgent $response = $request.GetResponse() if (Test-Path $Destination -PathType Container) { $fileName = "" $contentDisposition = $response.Headers["Content-Disposition"] if ($contentDisposition -match 'filename=(.+?)$') { $fileName = $matches[1].Trim('"', "'") } if (-not $fileName) { $fileName = [System.IO.Path]::GetFileName([System.Uri]::UnescapeDataString($Url)) } if (-not $fileName) { $fileName = "download" $contentType = $response.ContentType $extension = [System.Web.MimeMapping]::GetMimeMapping($contentType) if ($extension) { $fileName = "$fileName$extension" } } $OutFile = Join-Path $Destination $fileName } else { $OutFile = $Destination } if ($tempDir -and (Test-Path $tempDir)) { try { Write-Verbose "Cleaning up existing temp directory: $tempDir" Remove-Item -Path $tempDir -Recurse -Force -ErrorAction Stop } catch { Write-Warning "Failed to remove existing temp directory: $_" } } $contentLength = $response.ContentLength $acceptRanges = $response.Headers["Accept-Ranges"] $response.Close() $totalSize = if ($contentLength -gt 0) { Format-FileSize -Size $contentLength } if ($Threads -gt 1) { # Ensure we have valid content length for segmentation if ($contentLength -le 0) { Write-Verbose "Content length is invalid. Falling back to simple download." $Threads = 0 } # Ensure server supports range requests elseif ($acceptRanges -ne "bytes") { Write-Verbose "Server does not support range requests. Falling back to simple download." $Threads = 0 } # Ensure segments aren't too small else { $segmentSize = [math]::Ceiling($contentLength / $Threads) if ($segmentSize -lt 1MB) { Write-Verbose "File too small for multi-threaded download. Falling back to simple download." $Threads = 0 } } } $downloadTimer = [System.Diagnostics.Stopwatch]::StartNew() if (Test-Path $OutFile) { if ($Force) { Write-Verbose "Overwriting file at $OutFile" Remove-Item -Path $OutFile -Force -ErrorAction Ignore } else { Write-Warning "File already exists at $OutFile. Skipping download." return } } if ($Threads -eq 0) { $request = [System.Net.HttpWebRequest]::Create($Url) $request.UserAgent = $UserAgent $request.Method = "GET" $request.Timeout = $Timeout * 1000 $request.ReadWriteTimeout = $Timeout * 1000 $response = $request.GetResponse() $responseStream = $response.GetResponseStream() $fileStream = [System.IO.File]::Create($OutFile) $bufferSize = 8192 $buffer = New-Object byte[] $bufferSize $totalBytesRead = 0 $lastUpdate = [DateTime]::Now $lastBytes = 0 try { while (($bytesRead = $responseStream.Read($buffer, 0, $buffer.Length)) -gt 0) { $fileStream.Write($buffer, 0, $bytesRead) $totalBytesRead += $bytesRead if (-not $NoProgress) { $now = [DateTime]::Now if (($now - $lastUpdate).TotalMilliseconds -ge 100) { $progress = if ($contentLength -gt 0) { ($totalBytesRead / $contentLength) * 100 } else { -1 } $speed = ($totalBytesRead - $lastBytes) / ($now - $lastUpdate).TotalSeconds / 1MB $downloadedSize = Format-FileSize -Size $totalBytesRead $activity = "Downloading File: $fileName ($downloadedSize" $activity += if ($contentLength -gt 0) { " of $totalSize" } else { " downloaded" } $activity += ")" $status = if ($contentLength -gt 0) { "$([math]::Round($progress, 2))% Complete - $([math]::Round($speed, 2)) MB/s" } else { "$([math]::Round($speed, 2)) MB/s" } Write-Progress -Activity $activity ` -Status $status ` -PercentComplete $progress $lastUpdate = $now $lastBytes = $totalBytesRead } } } } catch [System.Net.WebException] { if ($_.Status -eq [System.Net.WebExceptionStatus]::Timeout) { throw "Failed to download file. Timeout of $Timeout seconds exceeded." } } finally { $fileStream.Close() $responseStream.Close() $response.Close() } } else { $tempDir = Join-Path $TempPath "DownloadSegments-$(New-Guid)" $segmentSizes = @{} # Create temp directory New-Item -ItemType Directory -Path $tempDir -Force | Out-Null # Define the download segment script block $downloadSegment = { param( [string]$url, [string]$tempFile, [long]$start, [long]$end, [int]$bufferSize, # This is coming in as 64KB [int]$timeout, [string]$userAgent ) $request = [System.Net.HttpWebRequest]::Create($url) $request.Method = "GET" $request.AddRange($start, $end) $request.Timeout = $timeout * 1000 $request.ReadWriteTimeout = $timeout * 1000 $request.UserAgent = $userAgent try { $response = $request.GetResponse() $stream = $response.GetResponseStream() $fileStream = [System.IO.File]::Create($tempFile) # Use the buffer size passed in from the main script [byte[]]$buffer = New-Object byte[] $bufferSize [long]$totalBytes = 0 [long]$expectedLength = $end - $start + 1 try { while ($totalBytes -lt $expectedLength) { # Calculate remaining bytes [long]$remaining = $expectedLength - $totalBytes # Never read more than buffer size or remaining bytes [int]$toRead = [Math]::Min([Math]::Min($buffer.Length, $remaining), [int]::MaxValue) if ($toRead -le 0) { break } # Read into buffer [int]$bytesRead = $stream.Read($buffer, 0, $toRead) if ($bytesRead -eq 0) { break } # Write exactly what we read $fileStream.Write($buffer, 0, $bytesRead) $totalBytes += $bytesRead Write-Output @{ BytesRead = $totalBytes } } } finally { $fileStream.Flush() } } finally { if ($fileStream) { $fileStream.Close() } if ($stream) { $stream.Close() } if ($response) { $response.Close() } } } # Initialize arrays with fixed size $jobs = New-Object object[] $Threads $tempFiles = New-Object string[] $Threads # Calculate segment sizes first for ($i = 0; $i -lt $Threads; $i++) { $start = $i * $segmentSize $end = if ($i -eq ($Threads - 1)) { $contentLength - 1 # Last segment gets remainder } else { ($i + 1) * $segmentSize - 1 } $segmentSizes[$i] = $end - $start + 1 $tempFiles[$i] = Join-Path $tempDir "segment_$i" } # Start download jobs for ($i = 0; $i -lt $Threads; $i++) { $start = $i * $segmentSize $end = if ($i -eq ($Threads - 1)) { $contentLength - 1 } else { ($i + 1) * $segmentSize - 1 } Write-Verbose "Starting segment ${i}: ${start} - ${end} (Size: $($segmentSizes[$i]) bytes)" $jobs[$i] = Start-Job -ScriptBlock $downloadSegment -ArgumentList $Url, $tempFiles[$i], $start, $end, $BUFFER_SIZE, $Timeout, $UserAgent } $fileName = [System.IO.Path]::GetFileName($OutFile) $lastUpdate = 0 $completedSegments = @{} $lastProgressTime = [DateTime]::Now $segmentLastProgress = @{} $segmentRetries = @{} $maxSegmentRetries = 5 function Restart-Segment { param( [int]$segmentIndex, [string]$reason ) if (-not $segmentRetries.ContainsKey($segmentIndex)) { $segmentRetries[$segmentIndex] = 0 } $segmentRetries[$segmentIndex]++ if ($segmentRetries[$segmentIndex] -gt $maxSegmentRetries) { Write-Warning "Segment $segmentIndex failed after $maxSegmentRetries retries: $reason" throw "Download failed - segment $segmentIndex max retries exceeded" } Write-Verbose "Restarting segment $segmentIndex (attempt $($segmentRetries[$segmentIndex]) of $maxSegmentRetries): $reason" # Clean up old job $oldJob = $jobs[$segmentIndex] if ($oldJob) { try { if ($oldJob.State -ne 'Completed') { $oldJob | Stop-Job -ErrorAction SilentlyContinue } $oldJob | Remove-Job -Force -ErrorAction SilentlyContinue } catch { Write-Warning "Failed to cleanup old job for segment $($segmentIndex): $_" } } # Calculate start and end positions $start = $segmentIndex * $segmentSize $end = [Math]::Min(($segmentIndex + 1) * $segmentSize - 1, $contentLength - 1) $tempFile = Join-Path $tempDir "segment_$segmentIndex" # Start new job $jobs[$segmentIndex] = Start-Job -ScriptBlock $downloadSegment -ArgumentList $Url, $tempFile, $start, $end, $BUFFER_SIZE, $Timeout, $UserAgent # Reset progress tracking $segmentLastProgress[$segmentIndex] = @{ LastTime = [DateTime]::Now LastBytes = 0 StuckCount = 0 } } while ($true) { $totalBytesRead = 0 $allComplete = $true $currentTime = [DateTime]::Now # Global timeout check - if no progress if (($currentTime - $lastProgressTime).TotalSeconds -gt $Timeout) { throw "Download timed out - no progress for $Timeout seconds" } for ($i = 0; $i -lt $Threads; $i++) { if ($completedSegments[$i]) { $totalBytesRead += $segmentSizes[$i] continue } $job = $jobs[$i] if (-not $job) { Restart-Segment -segmentIndex $i -reason "Job was lost" $allComplete = $false continue } try { # Get all output at once to prevent partial reads $data = $null if ($job.State -eq 'Completed') { $data = Receive-Job -Job $job -ErrorAction Stop } else { $data = Receive-Job -Job $job -Keep -ErrorAction Stop } if ($data -and $data.Count -gt 0) { $lastBytes = ($data | Select-Object -Last 1).BytesRead if ($lastBytes -ge $segmentSizes[$i]) { $completedSegments[$i] = $true $totalBytesRead += $segmentSizes[$i] $segmentLastProgress.Remove($i) if ($job.State -ne 'Completed') { $job | Stop-Job -ErrorAction SilentlyContinue } $job | Remove-Job -Force -ErrorAction SilentlyContinue $jobs[$i] = $null } else { $allComplete = $false $totalBytesRead += $lastBytes # Update progress tracking if (-not $segmentLastProgress.ContainsKey($i)) { $segmentLastProgress[$i] = @{ LastTime = $currentTime LastBytes = $lastBytes StuckCount = 0 } } else { if ($lastBytes -gt $segmentLastProgress[$i].LastBytes) { $segmentLastProgress[$i].LastTime = $currentTime $segmentLastProgress[$i].LastBytes = $lastBytes $segmentLastProgress[$i].StuckCount = 0 $lastProgressTime = $currentTime } } } } else { $allComplete = $false } } catch { if ($_.Exception.Message -match "Destination array was not long enough") { # This is likely a partial read issue, ignore and continue $allComplete = $false continue } Restart-Segment -segmentIndex $i -reason "Error: $_" $allComplete = $false continue } } if ($allComplete) { break } if (-not $NoProgress) { $progress = [Math]::Min(($totalBytesRead / $contentLength) * 100, 100) $speed = ($totalBytesRead - $lastUpdate) / 0.5 # MB/s $lastUpdate = $totalBytesRead $downloadedSize = Format-FileSize -Size $totalBytesRead Write-Progress -Activity "Downloading File: $fileName ($downloadedSize of $totalSize)" ` -Status "$([math]::Round($progress, 2))% Complete - $([math]::Round($speed / $MB, 2)) MB/s" ` -PercentComplete $progress } Start-Sleep -Milliseconds 500 } $finalFile = [System.IO.File]::Create($OutFile) try { for ($i = 0; $i -lt $Threads; $i++) { if (-not $NoProgress) { Write-Progress -Activity "Merging segments: $OutFile" ` -Status "Processing segment $($i + 1) of $Threads" ` -PercentComplete (($i / $Threads) * 100) } $tempFile = Join-Path $tempDir "segment_$i" $expectedSize = $segmentSizes[$i] if (-not (Test-Path $tempFile)) { throw "Missing segment file: $tempFile" } $bytes = [System.IO.File]::ReadAllBytes($tempFile) if ($bytes.Length -eq 0) { throw "Empty segment file: $tempFile" } if ($bytes.Length -ne $expectedSize) { throw "Segment size mismatch: Expected $expectedSize, got $($bytes.Length)" } $finalFile.Write($bytes, 0, $bytes.Length) } } finally { $finalFile.Close() Remove-Item -Path $tempDir -Recurse -Force -ErrorAction SilentlyContinue } if (-not $NoProgress) { Write-Progress -Activity "Downloading File" -Completed } $downloadTimer.Stop() } if ($ExpectedHash -or ($PSCmdlet.MyInvocation.BoundParameters["Verbose"].IsPresent)) { if ($HashType -eq "CRC32") { $allBytes = [System.IO.File]::ReadAllBytes($OutFile) $crc32 = [Win32Api]::RtlComputeCrc32(0, $allBytes, $allBytes.Length) $actualHash = $crc32.ToString("X8") } else { $actualHash = (Get-FileHash -Path $OutFile -Algorithm $HashType).Hash } } if ($ExpectedHash) { Write-Verbose "Verifying file hash..." if ($actualHash -ne $ExpectedHash) { Write-Warning "Hash verification failed! Expected: $ExpectedHash, Got: $actualHash" $success = $false continue } Write-Verbose "Hash verification successful" } if ($PSCmdlet.MyInvocation.BoundParameters["Verbose"].IsPresent) { Write-Output "`nFile downloaded successfully." $elapsed = $downloadTimer.Elapsed $formattedTime = if ($elapsed.Hours -gt 0) { "{0} hour{1} {2} minute{3} {4:N3} seconds" -f $elapsed.Hours, $(if ($elapsed.Hours -eq 1) {""} else {"s"}), $elapsed.Minutes, $(if ($elapsed.Minutes -eq 1) {""} else {"s"}), ($elapsed.Seconds + $elapsed.Milliseconds / 1000) } elseif ($elapsed.Minutes -gt 0) { "{0} minute{1} {2:N3} seconds" -f $elapsed.Minutes, $(if ($elapsed.Minutes -eq 1) {""} else {"s"}), ($elapsed.Seconds + $elapsed.Milliseconds / 1000) } else { "{0:N3} seconds" -f ($elapsed.Seconds + $elapsed.Milliseconds / 1000) } Write-Output "Path: $OutFile" Write-Output "Size: $(Format-FileSize -Size ((Get-Item $OutFile).Length))" Write-Output "Elapsed Time: $formattedTime" Write-Output "$($HashType): $actualHash`n" } else { if (-not $Quiet) { Write-Output "File downloaded successfully." } } $success = $true } catch { if ($attempt -ge $MaxRetry) { Write-Error "Failed after $MaxRetry attempts: $_" throw } Write-Warning "Download failed (attempt $attempt of $MaxRetry): $_" if (Test-Path $tempDir) { Remove-Item -Path $tempDir -Recurse -Force -ErrorAction SilentlyContinue } if (Test-Path $OutFile) { Remove-Item -Path $OutFile -Force -ErrorAction SilentlyContinue } } finally { $jobs | Where-Object { $_ } | ForEach-Object { try { if ($_.State -ne 'Completed') { $_ | Stop-Job -ErrorAction SilentlyContinue } $_ | Remove-Job -Force -ErrorAction SilentlyContinue Remove-Item -Path $tempDir -Recurse -Force -ErrorAction SilentlyContinue } catch { Write-Warning "Failed to cleanup job: $_" } } $jobs = @() } } } } #Start-Download http://downloads.dell.com/FOLDER12172494M/1/Precision-7680-8J4H2_Win11_1.0_A07.exe -Threads 10 -Force -ExpectedHash "30C763D5CE8A4DCCF8E18E66C3EA2555" -Destination "C:\Downloads\Precision-7680-8J4H2_Win11_1.0_A07.exe" -TempPath "C:\Downloads\Temp" -Verbose |