This commit is contained in:
2025-10-13 12:39:46 +02:00
parent be8555316f
commit f7b83e14a5
4 changed files with 385 additions and 32 deletions

View File

@@ -12,12 +12,30 @@
## Support: support@cqre.net ## ## Support: support@cqre.net ##
################################################## ##################################################
# KHDB Settings # Storage Settings
############### ##################
# Select storage provider: Azure or S3 (S3 = S3-compatible like IDrive e2)
# Default is Azure when not set.
StorageProvider = Azure
# Azure (if StorageProvider=Azure)
storageAccountName = storageAccountName =
containerName = containerName =
sasToken = sasToken =
# S3-compatible (if StorageProvider=S3)
# Example for IDrive e2: set endpoint URL to the region endpoint you were given.
# Access key/secret correspond to your S3-compatible credentials.
s3EndpointUrl =
s3Region = us-east-1
s3BucketName =
s3AccessKeyId =
s3SecretAccessKey =
# Many S3-compatible providers require path-style addressing
# (true recommended for MinIO/IDrive e2/Wasabi). Set to true/false.
s3ForcePathStyle = true
s3UseAwsTools = false
# Application Settings # Application Settings
###################### ######################
InstallationPath= InstallationPath=
@@ -26,7 +44,9 @@ WeakPasswordsDatabase=khdb.txt
# CheckOnlyEnabledUsers=true # CheckOnlyEnabledUsers=true
# Notes: # Notes:
# - Required PowerShell modules: DSInternals, ActiveDirectory, Az.Storage (for upload). # - Required PowerShell modules: DSInternals, ActiveDirectory
# For Azure uploads: Az.Storage
# For S3-compatible uploads: AWS.Tools.S3 or AWSPowerShell.NetCore
# - AD account permissions: Replication Directory Changes and Replication Directory Changes All # - AD account permissions: Replication Directory Changes and Replication Directory Changes All
# on the domain (DCSync-equivalent) are sufficient; full Domain Admin not required. # on the domain (DCSync-equivalent) are sufficient; full Domain Admin not required.

View File

@@ -12,7 +12,7 @@
################################################## ##################################################
<# <#
#Requires -Modules DSInternals, Az.Storage #Requires -Modules DSInternals
.SYNOPSIS .SYNOPSIS
Script for extracting NTLM hashes from live AD for further analysis. Script for extracting NTLM hashes from live AD for further analysis.
@@ -66,11 +66,164 @@ function Normalize-ReportPath([string]$p) {
return (Join-Path -Path $scriptRoot -ChildPath $p) return (Join-Path -Path $scriptRoot -ChildPath $p)
} }
# External settings # Storage provider selection (Azure by default)
$storageProvider = $ElysiumSettings['StorageProvider']
if ([string]::IsNullOrWhiteSpace($storageProvider)) { $storageProvider = 'Azure' }
# Azure settings
$storageAccountName = $ElysiumSettings['storageAccountName'] $storageAccountName = $ElysiumSettings['storageAccountName']
$containerName = $ElysiumSettings['containerName'] $containerName = $ElysiumSettings['containerName']
$sasToken = $ElysiumSettings['sasToken'] $sasToken = $ElysiumSettings['sasToken']
# S3-compatible settings
$s3EndpointUrl = $ElysiumSettings['s3EndpointUrl']
$s3Region = $ElysiumSettings['s3Region']
$s3BucketName = $ElysiumSettings['s3BucketName']
$s3AccessKeyId = $ElysiumSettings['s3AccessKeyId']
$s3SecretAccessKey = $ElysiumSettings['s3SecretAccessKey']
$s3ForcePathStyle = $ElysiumSettings['s3ForcePathStyle']
$s3UseAwsTools = $ElysiumSettings['s3UseAwsTools']
if ([string]::IsNullOrWhiteSpace($s3Region)) { $s3Region = 'us-east-1' }
try { $s3ForcePathStyle = [System.Convert]::ToBoolean($s3ForcePathStyle) } catch { $s3ForcePathStyle = $true }
try { $s3UseAwsTools = [System.Convert]::ToBoolean($s3UseAwsTools) } catch { $s3UseAwsTools = $false }
function Ensure-AWSS3Module {
# Ensure AWS SDK types are available via AWS Tools for PowerShell
try {
$null = [Amazon.S3.AmazonS3Client]
return
} catch {
try { Import-Module -Name AWS.Tools.S3 -ErrorAction Stop; return } catch {}
try { Import-Module -Name AWSPowerShell.NetCore -ErrorAction Stop; return } catch {}
throw "AWS Tools for PowerShell not found. Install with: Install-Module AWS.Tools.S3 -Scope CurrentUser"
}
}
function New-S3Client {
param(
[string]$EndpointUrl,
[string]$Region,
[string]$AccessKeyId,
[string]$SecretAccessKey,
[bool]$ForcePathStyle = $true
)
Ensure-AWSS3Module
$creds = New-Object Amazon.Runtime.BasicAWSCredentials($AccessKeyId, $SecretAccessKey)
$cfg = New-Object Amazon.S3.AmazonS3Config
if ($EndpointUrl) { $cfg.ServiceURL = $EndpointUrl }
if ($Region) {
try { $cfg.RegionEndpoint = [Amazon.RegionEndpoint]::GetBySystemName($Region) } catch {}
}
$cfg.ForcePathStyle = [bool]$ForcePathStyle
return (New-Object Amazon.S3.AmazonS3Client($creds, $cfg))
}
# Native S3 SigV4 (no AWS Tools) helpers
function Get-Bytes([string]$s) { return [System.Text.Encoding]::UTF8.GetBytes($s) }
function Get-HashHex([byte[]]$bytes) {
$sha = [System.Security.Cryptography.SHA256]::Create()
try { return ([BitConverter]::ToString($sha.ComputeHash($bytes))).Replace('-', '').ToLowerInvariant() } finally { $sha.Dispose() }
}
function Get-FileSha256Hex([string]$path) {
$sha = [System.Security.Cryptography.SHA256]::Create()
$fs = [System.IO.File]::OpenRead($path)
try { return ([BitConverter]::ToString($sha.ComputeHash($fs))).Replace('-', '').ToLowerInvariant() } finally { $fs.Close(); $sha.Dispose() }
}
function HmacSha256([byte[]]$key, [string]$data) {
$h = [System.Security.Cryptography.HMACSHA256]::new($key)
try { return $h.ComputeHash((Get-Bytes $data)) } finally { $h.Dispose() }
}
function GetSignatureKey([string]$secret, [string]$dateStamp, [string]$regionName, [string]$serviceName) {
$kDate = HmacSha256 (Get-Bytes ('AWS4' + $secret)) $dateStamp
$kRegion = HmacSha256 $kDate $regionName
$kService = HmacSha256 $kRegion $serviceName
return (HmacSha256 $kService 'aws4_request')
}
function UriEncode([string]$data, [bool]$encodeSlash) {
if ($null -eq $data) { return '' }
$enc = [System.Uri]::EscapeDataString($data)
if (-not $encodeSlash) { $enc = $enc -replace '%2F','/' }
return $enc
}
function BuildCanonicalPath([System.Uri]$uri) {
$segments = $uri.AbsolutePath.Split('/')
$encoded = @()
foreach ($seg in $segments) { $encoded += (UriEncode $seg $false) }
$path = ($encoded -join '/')
if (-not $path.StartsWith('/')) { $path = '/' + $path }
return $path
}
function ToHex([byte[]]$bytes) { return ([BitConverter]::ToString($bytes)).Replace('-', '').ToLowerInvariant() }
function BuildAuthHeaders($method, [System.Uri]$uri, [string]$region, [string]$accessKey, [string]$secretKey, [string]$payloadHash) {
$algorithm = 'AWS4-HMAC-SHA256'
$amzdate = (Get-Date).ToUniversalTime().ToString('yyyyMMddTHHmmssZ')
$datestamp = (Get-Date).ToUniversalTime().ToString('yyyyMMdd')
$hostHeader = $uri.Host
if (-not $uri.IsDefaultPort) { $hostHeader = "$hostHeader:$($uri.Port)" }
$canonicalUri = BuildCanonicalPath $uri
$canonicalQueryString = ''
$canonicalHeaders = "host:$hostHeader`n" + "x-amz-content-sha256:$payloadHash`n" + "x-amz-date:$amzdate`n"
$signedHeaders = 'host;x-amz-content-sha256;x-amz-date'
$canonicalRequest = "$method`n$canonicalUri`n$canonicalQueryString`n$canonicalHeaders`n$signedHeaders`n$payloadHash"
$credentialScope = "$datestamp/$region/s3/aws4_request"
$stringToSign = "$algorithm`n$amzdate`n$credentialScope`n$((Get-HashHex (Get-Bytes $canonicalRequest)))"
$signingKey = GetSignatureKey $secretKey $datestamp $region 's3'
$signature = ToHex (HmacSha256 $signingKey $stringToSign)
$authHeader = "$algorithm Credential=$accessKey/$credentialScope, SignedHeaders=$signedHeaders, Signature=$signature"
return @{ 'x-amz-date' = $amzdate; 'x-amz-content-sha256' = $payloadHash; 'Authorization' = $authHeader }
}
function BuildS3Uri([string]$endpointUrl, [string]$bucket, [string]$key, [bool]$forcePathStyle) {
$base = [System.Uri]$endpointUrl
$ub = [System.UriBuilder]::new($base)
if ($forcePathStyle) {
$p = ($ub.Path.TrimEnd('/'))
if ([string]::IsNullOrEmpty($p)) { $p = '/' }
$ub.Path = ($p.TrimEnd('/') + '/' + $bucket + '/' + $key)
} else {
$ub.Host = "$bucket." + $ub.Host
$p = $ub.Path.TrimEnd('/')
if ([string]::IsNullOrEmpty($p)) { $p = '/' }
$ub.Path = ($p.TrimEnd('/') + '/' + $key)
}
return $ub.Uri
}
function Invoke-S3PutFile([string]$endpointUrl, [string]$bucket, [string]$key, [string]$filePath, [string]$region, [string]$ak, [string]$sk, [bool]$forcePathStyle) {
$uri = BuildS3Uri -endpointUrl $endpointUrl -bucket $bucket -key $key -forcePathStyle $forcePathStyle
$payloadHash = Get-FileSha256Hex -path $filePath
Add-Type -AssemblyName System.Net.Http -ErrorAction SilentlyContinue
$client = [System.Net.Http.HttpClient]::new()
try {
$req = [System.Net.Http.HttpRequestMessage]::new([System.Net.Http.HttpMethod]::Put, $uri)
$stream = [System.IO.File]::OpenRead($filePath)
$req.Content = [System.Net.Http.StreamContent]::new($stream)
$hdrs = BuildAuthHeaders -method 'PUT' -uri $uri -region $region -accessKey $ak -secretKey $sk -payloadHash $payloadHash
$req.Headers.TryAddWithoutValidation('x-amz-date', $hdrs['x-amz-date']) | Out-Null
$req.Headers.TryAddWithoutValidation('Authorization', $hdrs['Authorization']) | Out-Null
$req.Headers.TryAddWithoutValidation('x-amz-content-sha256', $hdrs['x-amz-content-sha256']) | Out-Null
$resp = $client.SendAsync($req).Result
if (-not $resp.IsSuccessStatusCode) { throw "S3 PUT failed: $([int]$resp.StatusCode) $($resp.ReasonPhrase)" }
} finally { if ($req) { $req.Dispose() }; if ($stream) { $stream.Close(); $stream.Dispose() }; $client.Dispose() }
}
function Invoke-S3GetToFile([string]$endpointUrl, [string]$bucket, [string]$key, [string]$targetPath, [string]$region, [string]$ak, [string]$sk, [bool]$forcePathStyle) {
$uri = BuildS3Uri -endpointUrl $endpointUrl -bucket $bucket -key $key -forcePathStyle $forcePathStyle
$payloadHash = (Get-HashHex (Get-Bytes ''))
Add-Type -AssemblyName System.Net.Http -ErrorAction SilentlyContinue
$client = [System.Net.Http.HttpClient]::new()
try {
$req = [System.Net.Http.HttpRequestMessage]::new([System.Net.Http.HttpMethod]::Get, $uri)
$hdrs = BuildAuthHeaders -method 'GET' -uri $uri -region $region -accessKey $ak -secretKey $sk -payloadHash $payloadHash
$req.Headers.TryAddWithoutValidation('x-amz-date', $hdrs['x-amz-date']) | Out-Null
$req.Headers.TryAddWithoutValidation('Authorization', $hdrs['Authorization']) | Out-Null
$req.Headers.TryAddWithoutValidation('x-amz-content-sha256', $hdrs['x-amz-content-sha256']) | Out-Null
$resp = $client.SendAsync($req).Result
if (-not $resp.IsSuccessStatusCode) { throw "S3 GET failed: $([int]$resp.StatusCode) $($resp.ReasonPhrase)" }
$bytes = $resp.Content.ReadAsByteArrayAsync().Result
[System.IO.File]::WriteAllBytes($targetPath, $bytes)
} finally { if ($req) { $req.Dispose() }; $client.Dispose() }
}
# Retrieve the passphrase from a user environment variable # Retrieve the passphrase from a user environment variable
$passphrase = [System.Environment]::GetEnvironmentVariable("ELYSIUM_PASSPHRASE", [System.EnvironmentVariableTarget]::User) $passphrase = [System.Environment]::GetEnvironmentVariable("ELYSIUM_PASSPHRASE", [System.EnvironmentVariableTarget]::User)
if ([string]::IsNullOrWhiteSpace($passphrase)) { Write-Error 'Passphrase not found in ELYSIUM_PASSPHRASE environment variable.'; exit } if ([string]::IsNullOrWhiteSpace($passphrase)) { Write-Error 'Passphrase not found in ELYSIUM_PASSPHRASE environment variable.'; exit }
@@ -206,33 +359,75 @@ Write-Host "File has been encrypted: $encryptedFilePath"
# Calculate the local file checksum # Calculate the local file checksum
$localFileChecksum = Get-FileChecksum -Path $encryptedFilePath $localFileChecksum = Get-FileChecksum -Path $encryptedFilePath
# Create the context for Azure Blob Storage with SAS token if ($storageProvider -ieq 'S3') {
$sas = $sasToken # S3-compatible path (e.g., IDrive e2) without requiring AWS Tools
if ([string]::IsNullOrWhiteSpace($sas)) { Write-Error 'sasToken is missing in settings.'; exit } if ([string]::IsNullOrWhiteSpace($s3BucketName)) { Write-Error 's3BucketName is missing in settings.'; exit }
$sas = $sas.Trim(); if (-not $sas.StartsWith('?')) { $sas = '?' + $sas } if ([string]::IsNullOrWhiteSpace($s3AccessKeyId) -or [string]::IsNullOrWhiteSpace($s3SecretAccessKey)) { Write-Error 's3AccessKeyId / s3SecretAccessKey missing in settings.'; exit }
$storageContext = New-AzStorageContext -StorageAccountName $storageAccountName -SasToken $sas if ([string]::IsNullOrWhiteSpace($s3EndpointUrl)) { Write-Error 's3EndpointUrl is required for S3-compatible storage.'; exit }
# Ensure container exists $usedAwsTools = $false
$container = Get-AzStorageContainer -Name $containerName -Context $storageContext -ErrorAction SilentlyContinue if ($s3UseAwsTools) {
if (-not $container) { Write-Error "Azure container '$containerName' not found or access denied."; exit } try {
$s3Client = New-S3Client -EndpointUrl $s3EndpointUrl -Region $s3Region -AccessKeyId $s3AccessKeyId -SecretAccessKey $s3SecretAccessKey -ForcePathStyle:$s3ForcePathStyle
# Upload
$putReq = New-Object Amazon.S3.Model.PutObjectRequest -Property @{ BucketName = $s3BucketName; Key = $blobName; FilePath = $encryptedFilePath }
$null = $s3Client.PutObject($putReq)
Write-Host "Encrypted file uploaded to S3-compatible bucket (AWS Tools): $blobName"
$tempDownloadPath = [System.IO.Path]::GetTempFileName()
$getReq = New-Object Amazon.S3.Model.GetObjectRequest -Property @{ BucketName = $s3BucketName; Key = $blobName }
$getResp = $s3Client.GetObject($getReq)
$getResp.WriteResponseStreamToFile($tempDownloadPath, $true)
$getResp.Dispose()
$downloadedFileChecksum = Get-FileChecksum -Path $tempDownloadPath
$usedAwsTools = $true
} catch {
Write-Warning "AWS Tools path failed or not available. Falling back to native HTTP (SigV4). Details: $($_.Exception.Message)"
$usedAwsTools = $false
}
}
# Upload the encrypted file to Azure Blob Storage if (-not $usedAwsTools) {
Set-AzStorageBlobContent -File $encryptedFilePath -Container $containerName -Blob $blobName -Context $storageContext | Out-Null Invoke-S3PutFile -endpointUrl $s3EndpointUrl -bucket $s3BucketName -key $blobName -filePath $encryptedFilePath -region $s3Region -ak $s3AccessKeyId -sk $s3SecretAccessKey -forcePathStyle:$s3ForcePathStyle
Write-Host "Encrypted file uploaded to Azure Blob Storage: $blobName" Write-Host "Encrypted file uploaded to S3-compatible bucket: $blobName"
$tempDownloadPath = [System.IO.Path]::GetTempFileName()
Invoke-S3GetToFile -endpointUrl $s3EndpointUrl -bucket $s3BucketName -key $blobName -targetPath $tempDownloadPath -region $s3Region -ak $s3AccessKeyId -sk $s3SecretAccessKey -forcePathStyle:$s3ForcePathStyle
$downloadedFileChecksum = Get-FileChecksum -Path $tempDownloadPath
}
}
else {
# Azure Blob Storage path (default)
$sas = $sasToken
if ([string]::IsNullOrWhiteSpace($sas)) { Write-Error 'sasToken is missing in settings.'; exit }
$sas = $sas.Trim(); if (-not $sas.StartsWith('?')) { $sas = '?' + $sas }
try { Import-Module Az.Storage -ErrorAction Stop } catch {}
$storageContext = New-AzStorageContext -StorageAccountName $storageAccountName -SasToken $sas
# Download the blob to a temporary location to verify # Ensure container exists
$tempDownloadPath = [System.IO.Path]::GetTempFileName() $container = Get-AzStorageContainer -Name $containerName -Context $storageContext -ErrorAction SilentlyContinue
Get-AzStorageBlobContent -Blob $blobName -Container $containerName -Context $storageContext -Destination $tempDownloadPath -Force | Out-Null if (-not $container) { Write-Error "Azure container '$containerName' not found or access denied."; exit }
# Calculate the downloaded file checksum # Upload the encrypted file to Azure Blob Storage
$downloadedFileChecksum = Get-FileChecksum -Path $tempDownloadPath Set-AzStorageBlobContent -File $encryptedFilePath -Container $containerName -Blob $blobName -Context $storageContext | Out-Null
Write-Host "Encrypted file uploaded to Azure Blob Storage: $blobName"
# Download the blob to a temporary location to verify
$tempDownloadPath = [System.IO.Path]::GetTempFileName()
Get-AzStorageBlobContent -Blob $blobName -Container $containerName -Context $storageContext -Destination $tempDownloadPath -Force | Out-Null
# Calculate the downloaded file checksum
$downloadedFileChecksum = Get-FileChecksum -Path $tempDownloadPath
}
# Compare the checksums # Compare the checksums
if ($localFileChecksum -eq $downloadedFileChecksum) { if ($localFileChecksum -eq $downloadedFileChecksum) {
Write-Host "The file was correctly uploaded. Checksum verified." Write-Host "The file was correctly uploaded. Checksum verified."
# Clean up local and temporary files only on success # Clean up local and temporary files only on success
Remove-Item -Path $exportPath, $compressedFilePath, $encryptedFilePath, $tempDownloadPath -Force Remove-Item -Path $exportPath, $compressedFilePath, $encryptedFilePath, $tempDownloadPath -Force
Write-Host "Local and temporary files cleaned up after uploading to Azure Blob Storage." if ($storageProvider -ieq 'S3') {
Write-Host "Local and temporary files cleaned up after uploading to S3-compatible storage."
} else {
Write-Host "Local and temporary files cleaned up after uploading to Azure Blob Storage."
}
} }
else { else {
Write-Warning "Checksum verification failed. Keeping local artifacts for investigation: $exportPath, $compressedFilePath, $encryptedFilePath" Write-Warning "Checksum verification failed. Keeping local artifacts for investigation: $exportPath, $compressedFilePath, $encryptedFilePath"

View File

@@ -22,14 +22,18 @@ During first run, the tool will ask for passphrase that will be used to encrypt/
After installation, edit ElysiumSettings.txt, check all variables and add domains to test. After installation, edit ElysiumSettings.txt, check all variables and add domains to test.
### Update Known-Hashed Database (KHDB) ### Update Known-Hashed Database (KHDB)
Run script Elysium.ps1 as an administrator and choose option 1 (Update Known-Hashes Database). Run script Elysium.ps1 as an administrator and choose option 1 (Update Known-Hashes Database).
The script will then download the database from dedicated Azure Storage, decompresses it and updates the current database. The script downloads the database from the configured storage (Azure Blob or S3-compatible), decompresses it and updates the current database.
### Test Weak AD passwords ### Test Weak AD passwords
Run script Elysium.ps1 as an administrator and choose option 2 (Test Weak AD Passwords). Run script Elysium.ps1 as an administrator and choose option 2 (Test Weak AD Passwords).
The script will then ask for the domain to be tested and upon choice will ask for domain administrator password. The DA username is already provided in the script for each domain. The script will then ask for the domain to be tested and upon choice will ask for domain administrator password. The DA username is already provided in the script for each domain.
The tool then connects to Domain Controller and tests all enabled users in the domain against KHDB. PDF report with findings is then generated. The tool then connects to Domain Controller and tests all enabled users in the domain against KHDB. PDF report with findings is then generated.
### Send current hashes for update KHDB ### Send current hashes for update KHDB
Run script Elysium.ps1 as an administrator and choose option 3 (Extract and Send Hashes). Run script Elysium.ps1 as an administrator and choose option 3 (Extract and Send Hashes).
The tool will then ask for domain and password of domain administrator. With correct credentials, the tool will then extract current hashes (no history) of non-disabled users, compresses and encrypts them and sends them to the tool provider. The tool will then ask for domain and password of domain administrator. With correct credentials, the tool will then extract current hashes (no history) of non-disabled users, compresses and encrypts them and uploads them to the configured storage (Azure Blob or S3-compatible) for pickup by the tool provider.
S3-compatible usage notes:
- No AWS Tools required. The scripts can sign requests using native SigV4 via .NET and HttpClient.
- To force using AWS Tools instead, set `s3UseAwsTools = true` in `ElysiumSettings.txt` and install `AWS.Tools.S3`.
### Uninstallation ### Uninstallation
Run script Elysium.ps1 as an administrator and choose option 4 (Uninstall). Run script Elysium.ps1 as an administrator and choose option 4 (Uninstall).
The script will then delete everything and remove the passphrase variable. The script will then delete everything and remove the passphrase variable.

View File

@@ -16,7 +16,7 @@
Known hashes database update script for the Elysium AD password testing tool. Known hashes database update script for the Elysium AD password testing tool.
.DESCRIPTION .DESCRIPTION
This script downloads khdb.txt.zip from the designated Azure Storage account, validates and decompresses it, and atomically updates the current version with backup and logging. This script downloads khdb.txt.zip from the designated storage (Azure Blob or S3-compatible), validates and decompresses it, and atomically updates the current version with backup and logging.
#> #>
# safer defaults # safer defaults
@@ -87,6 +87,104 @@ function Build-BlobUri([string]$account, [string]$container, [string]$sas) {
return $ub.Uri.AbsoluteUri return $ub.Uri.AbsoluteUri
} }
function Ensure-AWSS3Module {
try { $null = [Amazon.S3.AmazonS3Client]; return } catch {}
try { Import-Module -Name AWS.Tools.S3 -ErrorAction Stop; return } catch {}
try { Import-Module -Name AWSPowerShell.NetCore -ErrorAction Stop; return } catch {}
throw "AWS Tools for PowerShell not found. Install with: Install-Module AWS.Tools.S3 -Scope CurrentUser"
}
function New-S3Client {
param(
[string]$EndpointUrl,
[string]$Region,
[string]$AccessKeyId,
[string]$SecretAccessKey,
[bool]$ForcePathStyle = $true
)
Ensure-AWSS3Module
$creds = New-Object Amazon.Runtime.BasicAWSCredentials($AccessKeyId, $SecretAccessKey)
$cfg = New-Object Amazon.S3.AmazonS3Config
if ($EndpointUrl) { $cfg.ServiceURL = $EndpointUrl }
if ($Region) { try { $cfg.RegionEndpoint = [Amazon.RegionEndpoint]::GetBySystemName($Region) } catch {} }
$cfg.ForcePathStyle = [bool]$ForcePathStyle
return (New-Object Amazon.S3.AmazonS3Client($creds, $cfg))
}
# Native S3 SigV4 (no AWS Tools) helpers
function Get-Bytes([string]$s) { return [System.Text.Encoding]::UTF8.GetBytes($s) }
function Get-HashHex([byte[]]$bytes) { $sha=[System.Security.Cryptography.SHA256]::Create(); try { ([BitConverter]::ToString($sha.ComputeHash($bytes))).Replace('-', '').ToLowerInvariant() } finally { $sha.Dispose() } }
function HmacSha256([byte[]]$key, [string]$data) { $h=[System.Security.Cryptography.HMACSHA256]::new($key); try { $h.ComputeHash((Get-Bytes $data)) } finally { $h.Dispose() } }
function GetSignatureKey([string]$secret, [string]$dateStamp, [string]$regionName, [string]$serviceName) {
$kDate = HmacSha256 (Get-Bytes ('AWS4' + $secret)) $dateStamp
$kRegion = HmacSha256 $kDate $regionName
$kService = HmacSha256 $kRegion $serviceName
HmacSha256 $kService 'aws4_request'
}
function UriEncode([string]$data, [bool]$encodeSlash) { $enc=[System.Uri]::EscapeDataString($data); if (-not $encodeSlash) { $enc = $enc -replace '%2F','/' }; $enc }
function BuildCanonicalPath([System.Uri]$uri) { $segments=$uri.AbsolutePath.Split('/'); $encoded=@(); foreach($s in $segments){ $encoded += (UriEncode $s $false) }; $p=($encoded -join '/'); if (-not $p.StartsWith('/')){ $p='/' + $p }; $p }
function ToHex([byte[]]$b) { ([BitConverter]::ToString($b)).Replace('-', '').ToLowerInvariant() }
function BuildAuthHeaders($method, [System.Uri]$uri, [string]$region, [string]$accessKey, [string]$secretKey, [string]$payloadHash) {
$algorithm = 'AWS4-HMAC-SHA256'
$amzdate = (Get-Date).ToUniversalTime().ToString('yyyyMMddTHHmmssZ')
$datestamp = (Get-Date).ToUniversalTime().ToString('yyyyMMdd')
$hostHeader = $uri.Host; if (-not $uri.IsDefaultPort) { $hostHeader = "$hostHeader:$($uri.Port)" }
$canonicalUri = BuildCanonicalPath $uri
$canonicalQueryString = ''
$canonicalHeaders = "host:$hostHeader`n" + "x-amz-content-sha256:$payloadHash`n" + "x-amz-date:$amzdate`n"
$signedHeaders = 'host;x-amz-content-sha256;x-amz-date'
$canonicalRequest = "$method`n$canonicalUri`n$canonicalQueryString`n$canonicalHeaders`n$signedHeaders`n$payloadHash"
$credentialScope = "$datestamp/$region/s3/aws4_request"
$stringToSign = "$algorithm`n$amzdate`n$credentialScope`n$((Get-HashHex (Get-Bytes $canonicalRequest)))"
$signingKey = GetSignatureKey $secretKey $datestamp $region 's3'
$signature = ToHex (HmacSha256 $signingKey $stringToSign)
$authHeader = "$algorithm Credential=$accessKey/$credentialScope, SignedHeaders=$signedHeaders, Signature=$signature"
@{ 'x-amz-date' = $amzdate; 'x-amz-content-sha256' = $payloadHash; 'Authorization' = $authHeader }
}
function BuildS3Uri([string]$endpointUrl, [string]$bucket, [string]$key, [bool]$forcePathStyle) {
$base=[System.Uri]$endpointUrl; $ub=[System.UriBuilder]::new($base)
if ($forcePathStyle) { $p=$ub.Path.TrimEnd('/'); if ([string]::IsNullOrEmpty($p)){ $p='/' }; $ub.Path = ($p.TrimEnd('/') + '/' + $bucket + '/' + $key) }
else { $ub.Host = "$bucket." + $ub.Host; $p=$ub.Path.TrimEnd('/'); if ([string]::IsNullOrEmpty($p)){ $p='/' }; $ub.Path = ($p.TrimEnd('/') + '/' + $key) }
$ub.Uri
}
function Invoke-S3HttpDownloadWithRetry([string]$endpointUrl, [string]$bucket, [string]$key, [string]$targetPath, [string]$region, [string]$ak, [string]$sk, [bool]$forcePathStyle) {
Add-Type -AssemblyName System.Net.Http -ErrorAction SilentlyContinue
$client = [System.Net.Http.HttpClient]::new()
$retries=5; $delay=2
try {
for($i=0;$i -lt $retries;$i++){
try {
$uri = BuildS3Uri -endpointUrl $endpointUrl -bucket $bucket -key $key -forcePathStyle $forcePathStyle
$payloadHash = (Get-HashHex (Get-Bytes ''))
$req = [System.Net.Http.HttpRequestMessage]::new([System.Net.Http.HttpMethod]::Get, $uri)
$hdrs = BuildAuthHeaders -method 'GET' -uri $uri -region $region -accessKey $ak -secretKey $sk -payloadHash $payloadHash
$req.Headers.TryAddWithoutValidation('x-amz-date', $hdrs['x-amz-date']) | Out-Null
$req.Headers.TryAddWithoutValidation('Authorization', $hdrs['Authorization']) | Out-Null
$req.Headers.TryAddWithoutValidation('x-amz-content-sha256', $hdrs['x-amz-content-sha256']) | Out-Null
$resp = $client.SendAsync($req).Result
if (-not $resp.IsSuccessStatusCode) { throw "HTTP $([int]$resp.StatusCode) $($resp.ReasonPhrase)" }
$totalBytes = $resp.Content.Headers.ContentLength
$stream = $resp.Content.ReadAsStreamAsync().Result
$fs = [System.IO.File]::Create($targetPath)
try {
$buffer = New-Object byte[] 8192
$totalRead = 0
while (($read = $stream.Read($buffer, 0, $buffer.Length)) -gt 0) {
$fs.Write($buffer, 0, $read)
$totalRead += $read
if ($totalBytes) { $pct = ($totalRead * 100.0) / $totalBytes; Write-Progress -Activity "Downloading khdb.txt.zip" -Status ("{0:N2}% Complete" -f $pct) -PercentComplete $pct } else { Write-Progress -Activity "Downloading khdb.txt.zip" -Status ("Downloaded {0:N0} bytes" -f $totalRead) -PercentComplete 0 }
}
} finally { $fs.Close(); $stream.Close() }
if ($resp) { $resp.Dispose() }
return
} catch {
if ($i -lt ($retries - 1)) { Write-Warning "Download failed (attempt $($i+1)/$retries): $($_.Exception.Message). Retrying in ${delay}s..."; Start-Sleep -Seconds $delay; $delay=[Math]::Min($delay*2,30) }
else { throw }
} finally { if ($req){ $req.Dispose() } }
}
} finally { $client.Dispose() }
}
function Invoke-DownloadWithRetry([System.Net.Http.HttpClient]$client, [string]$uri, [string]$targetPath) { function Invoke-DownloadWithRetry([System.Net.Http.HttpClient]$client, [string]$uri, [string]$targetPath) {
$retries = 5 $retries = 5
$delay = 2 $delay = 2
@@ -151,19 +249,55 @@ function Update-KHDB {
$installPath = Get-InstallationPath $settings $installPath = Get-InstallationPath $settings
if (-not (Test-Path $installPath)) { New-Item -Path $installPath -ItemType Directory -Force | Out-Null } if (-not (Test-Path $installPath)) { New-Item -Path $installPath -ItemType Directory -Force | Out-Null }
$storageAccountName = $settings['storageAccountName'] $storageProvider = $settings['StorageProvider']
$containerName = $settings['containerName'] if ([string]::IsNullOrWhiteSpace($storageProvider)) { $storageProvider = 'Azure' }
$sasToken = $settings['sasToken']
$uri = Build-BlobUri -account $storageAccountName -container $containerName -sas $sasToken $client = $null
$s3Bucket = $settings['s3BucketName']
$s3EndpointUrl = $settings['s3EndpointUrl']
$s3Region = $settings['s3Region']
$s3AK = $settings['s3AccessKeyId']
$s3SK = $settings['s3SecretAccessKey']
$s3Force = $settings['s3ForcePathStyle']
$s3UseAwsTools = $settings['s3UseAwsTools']
try { $s3Force = [System.Convert]::ToBoolean($s3Force) } catch { $s3Force = $true }
try { $s3UseAwsTools = [System.Convert]::ToBoolean($s3UseAwsTools) } catch { $s3UseAwsTools = $false }
$client = New-HttpClient
$tmpDir = New-Item -ItemType Directory -Path ([System.IO.Path]::Combine([System.IO.Path]::GetTempPath(), "elysium-khdb-" + [System.Guid]::NewGuid())) -Force $tmpDir = New-Item -ItemType Directory -Path ([System.IO.Path]::Combine([System.IO.Path]::GetTempPath(), "elysium-khdb-" + [System.Guid]::NewGuid())) -Force
$zipPath = Join-Path -Path $tmpDir.FullName -ChildPath 'khdb.txt.zip' $zipPath = Join-Path -Path $tmpDir.FullName -ChildPath 'khdb.txt.zip'
$extractDir = Join-Path -Path $tmpDir.FullName -ChildPath 'extract' $extractDir = Join-Path -Path $tmpDir.FullName -ChildPath 'extract'
New-Item -ItemType Directory -Path $extractDir -Force | Out-Null New-Item -ItemType Directory -Path $extractDir -Force | Out-Null
Write-Host "Downloading KHDB from Azure Blob Storage..." if ($storageProvider -ieq 'S3') {
Invoke-DownloadWithRetry -client $client -uri $uri -targetPath $zipPath if ([string]::IsNullOrWhiteSpace($s3Bucket)) { throw 's3BucketName is missing or empty.' }
if ([string]::IsNullOrWhiteSpace($s3AK) -or [string]::IsNullOrWhiteSpace($s3SK)) { throw 's3AccessKeyId / s3SecretAccessKey missing or empty.' }
if ([string]::IsNullOrWhiteSpace($s3EndpointUrl)) { throw 's3EndpointUrl is required for S3-compatible storage.' }
if ($s3UseAwsTools) {
try {
$s3Client = New-S3Client -EndpointUrl $s3EndpointUrl -Region $s3Region -AccessKeyId $s3AK -SecretAccessKey $s3SK -ForcePathStyle:$s3Force
Write-Host "Downloading KHDB from S3-compatible storage (AWS Tools)..."
# Use AWS SDK stream method into file for progress parity
$req = New-Object Amazon.S3.Model.GetObjectRequest -Property @{ BucketName = $s3Bucket; Key = 'khdb.txt.zip' }
$resp = $s3Client.GetObject($req)
try { $resp.WriteResponseStreamToFile($zipPath, $true) } finally { $resp.Dispose() }
} catch {
Write-Warning "AWS Tools path failed or not available. Falling back to native HTTP (SigV4). Details: $($_.Exception.Message)"
Write-Host "Downloading KHDB from S3-compatible storage..."
Invoke-S3HttpDownloadWithRetry -endpointUrl $s3EndpointUrl -bucket $s3Bucket -key 'khdb.txt.zip' -targetPath $zipPath -region $s3Region -ak $s3AK -sk $s3SK -forcePathStyle:$s3Force
}
} else {
Write-Host "Downloading KHDB from S3-compatible storage..."
Invoke-S3HttpDownloadWithRetry -endpointUrl $s3EndpointUrl -bucket $s3Bucket -key 'khdb.txt.zip' -targetPath $zipPath -region $s3Region -ak $s3AK -sk $s3SK -forcePathStyle:$s3Force
}
} else {
$storageAccountName = $settings['storageAccountName']
$containerName = $settings['containerName']
$sasToken = $settings['sasToken']
$uri = Build-BlobUri -account $storageAccountName -container $containerName -sas $sasToken
$client = New-HttpClient
Write-Host "Downloading KHDB from Azure Blob Storage..."
Invoke-DownloadWithRetry -client $client -uri $uri -targetPath $zipPath
}
Write-Host "Download completed. Extracting archive..." Write-Host "Download completed. Extracting archive..."
Expand-Archive -Path $zipPath -DestinationPath $extractDir -Force Expand-Archive -Path $zipPath -DestinationPath $extractDir -Force