AzureHunter.psm1

using namespace System.IO
using namespace AzureHunter.Logger
using namespace AzureHunter.AzureSearcher
using namespace AzureHunter.TimeStamp
using namespace AzureHunter.AzCloudInit
using namespace AzureHunter.CloudInit
#Region '.\classes\01.AzureHunter.Enums.ps1' 0
# Ref: https://docs.microsoft.com/en-us/office/office-365-management-api/office-365-management-activity-api-schema#auditlogrecordtype

enum AuditLogRecordType {
    All
    Mierda
    AeD
    AipDiscover
    AipFileDeleted
    AipHeartBeat
    AipProtectionAction
    AipSensitivityLabelAction
    AirAdminActionInvestigation
    AirInvestigation
    AirManualInvestigation
    ApplicationAudit
    AttackSim
    AzureActiveDirectory
    AzureActiveDirectoryAccountLogon
    AzureActiveDirectoryStsLogon
    CDPClassificationDocument
    CDPClassificationMailItem
    CDPHygieneSummary
    CDPMlInferencingResult
    CDPPostMailDeliveryAction
    CDPUnifiedFeedback
    CRM
    Campaign
    ComplianceDLPExchange
    ComplianceDLPExchangeClassification
    ComplianceDLPSharePoint
    ComplianceDLPSharePointClassification
    ComplianceSupervisionExchange
    ConsumptionResource
    CortanaBriefing
    CustomerKeyServiceEncryption
    DLPEndpoint
    DataCenterSecurityCmdlet
    DataGovernance
    DataInsightsRestApiAudit
    Discovery
    DlpSensitiveInformationType
    ExchangeAdmin
    ExchangeAggregatedOperation
    ExchangeItem
    ExchangeItemAggregated
    ExchangeItemGroup
    ExchangeSearch
    HRSignal
    HealthcareSignal
    HygieneEvent
    InformationBarrierPolicyApplication
    InformationWorkerProtection
    Kaizala
    LabelContentExplorer
    LargeContentMetadata
    MAPGAlerts
    MAPGPolicy
    MAPGRemediation
    MCASAlerts
    MDATPAudit
    MIPLabel
    MS365DCustomDetection
    MSDEGeneralSettings
    MSDEIndicatorsSettings
    MSDEResponseActions
    MSDERolesSettings
    MSTIC
    MailSubmission
    Microsoft365Group
    MicrosoftFlow
    MicrosoftForms
    MicrosoftStream
    MicrosoftTeams
    MicrosoftTeamsAdmin
    MicrosoftTeamsAnalytics
    MicrosoftTeamsDevice
    MicrosoftTeamsShifts
    MipAutoLabelExchangeItem
    MipAutoLabelProgressFeedback
    MipAutoLabelSharePointItem
    MipAutoLabelSharePointPolicyLocation
    MipAutoLabelSimulationCompletion
    MipAutoLabelSimulationProgress
    MipAutoLabelSimulationStatistics
    MipExactDataMatch
    MyAnalyticsSettings
    OfficeNative
    OfficeScripts
    OnPremisesFileShareScannerDlp
    OnPremisesSharePointScannerDlp
    OneDrive
    PhysicalBadgingSignal
    PowerAppsApp
    PowerAppsPlan
    PowerBIAudit
    PrivacyDataMinimization
    PrivacyDigestEmail
    PrivacyRemediationAction
    Project
    Quarantine
    Search
    SecurityComplianceAlerts
    SecurityComplianceCenterEOPCmdlet
    SecurityComplianceInsights
    SecurityComplianceRBAC
    SecurityComplianceUserChange
    SensitivityLabelAction
    SensitivityLabelPolicyMatch
    SensitivityLabeledFileAction
    SharePoint
    SharePointCommentOperation
    SharePointContentTypeOperation
    SharePointFieldOperation
    SharePointFileOperation
    SharePointListItemOperation
    SharePointListOperation
    SharePointSearch
    SharePointSharingOperation
    SkypeForBusinessCmdlets
    SkypeForBusinessPSTNUsage
    SkypeForBusinessUsersBlocked
    Sway
    SyntheticProbe
    TABLEntryRemoved
    TeamsEasyApprovals
    TeamsHealthcare
    ThreatFinder
    ThreatIntelligence
    ThreatIntelligenceAtpContent
    ThreatIntelligenceUrl
    UserTraining
    WDATPAlerts
    WorkplaceAnalytics
    Yammer
}

# Need to initialize Type somehow otherwise loading it in "ScriptsToRun" in module manifest fails...
[AuditLogRecordType]::AeD
#EndRegion '.\classes\01.AzureHunter.Enums.ps1' 139
#Region '.\classes\02.AzureHunter.Logger.ps1' 0
class Logger {

    <#
 
    .SYNOPSIS
        Function to write message logs from this script in JSON format to a log file. When "LogAsField" is passed it will expect a hashtable of items that will be added to the log as key/value pairs passed as value to the parameter "Dictonary".
 
    .PARAMETER Message
        The text to be written
 
    .PARAMETER OutputDir
        The directory where the scan results are stored
 
    .PARAMETER Dictionary
        It allows you to pass a dictionary (hashtable) where your keys and values will be converted to a json line. Nested keys are not supported.
 
    #>


    [Hashtable] $Dictionary
    [ValidateSet('DEBUG','ERROR','LOW','INFO','SPECIAL','REMOTELOG')]
    [string] $MessageType
    [string] $CallingModule = $( if(Get-PSCallStack){ $(Get-PSCallStack)[1].FunctionName } else {"NA"} )
    [string] $ScriptPath
    [string] $LogFileJSON
    [string] $LogFileTXT
    [string] $MessageColor
    [string] $BackgroundColor
    $Message
    [string] $LogRecordStdOut
    [string] $strTimeNow
    [bool] $LogToFile

    Logger () {
        $this.LogToFile = $False
        # Write-Host "Logging to Log File is $($this.LogToFile)"
    }

    [Logger] InitLogFile() {

        $this.LogToFile = $True

        # *** Getting a handle to the running script path so that we can refer to it *** #
        $this.ScriptPath = [System.IO.DirectoryInfo]::new($pwd)


        # This function can be chained with new() to instantiate the class and initialize a file
        $this.strTimeNow = (Get-Date).ToUniversalTime().ToString("yyMMdd-HHmmss")
        $this.LogFileJSON = "$($this.ScriptPath)\$($env:COMPUTERNAME)-azurehunter-$($this.strTimeNow).json"
        $this.LogFileTXT = "$($this.ScriptPath)\$($env:COMPUTERNAME)-azurehunter-$($this.strTimeNow).txt"
        return $this
    }

    LogMessage([string]$Message, [string]$MessageType, [Hashtable]$Dictionary, [System.Management.Automation.ErrorRecord]$LogErrorMessage) {
        
        # Capture LogType
        $this.MessageType = $MessageType.ToUpper()
        
        # Generate Data Dict
        $TimeNow = (Get-Date).ToUniversalTime().ToString("yy-MM-ddTHH:mm:ssZ")
        $LogRecord = [Ordered]@{
            "severity"      = $MessageType
            "timestamp"     = $TimeNow
            "hostname"      = $($env:COMPUTERNAME)
            "message"       = "NA"
        }

        # Let's log the dict as key-value pairs if it was passed
        if($null -ne $Dictionary) {
            ForEach ($key in $Dictionary.Keys){
                $LogRecord.Add($key, $Dictionary.Item($key))
            }
        }
        else {
            $LogRecord.message = $Message
        }

        # Should we log an Error?
        if ($null -ne $LogErrorMessage) {
            # Grab latest error namespace
            try {
                $ErrorNameSpace = $Error[0].Exception.GetType().FullName
            }
            catch {
                try {
                    $ErrorNameSpace = $Error.Exception.GetType().FullName
                }
                catch {
                    $ErrorNameSpace = "Undetermined"
                }
            }
            
            # Add Error specific fields
            $LogRecord.Add("error_name_space", $ErrorNameSpace)
            $LogRecord.Add("error_script_line", $LogErrorMessage.InvocationInfo.ScriptLineNumber)
            $LogRecord.Add("error_script_line_offset", $LogErrorMessage.InvocationInfo.OffsetInLine)
            $LogRecord.Add("error_full_line", $($LogErrorMessage.InvocationInfo.Line -replace '[^\p{L}\p{Nd}/(/)/{/}/_/[/]/./\s]', ''))
            $LogRecord.Add("error_message", $($LogErrorMessage.Exception.Message -replace '[^\p{L}\p{Nd}/(/)/{/}/_/[/]/./\s]', ''))
            $LogRecord.Add("error_id", $LogErrorMessage.FullyQualifiedErrorId)
        }

        $this.Message = $LogRecord

        # Convert log line to a readable line
        $this.LogRecordStdOut = ""
        foreach($key in $LogRecord.Keys) {
            $this.LogRecordStdOut += "$($LogRecord.$key) | "
        }
        $this.LogRecordStdOut = $this.LogRecordStdOut.TrimEnd("| ")

        # Converting log line to JSON
        $LogRecord = $LogRecord | ConvertTo-Json -Compress

        # Choosing the right StdOut Colors in case we need them
        Switch ($this.MessageType) {

            "Error" {
                $this.MessageColor = "Red"
                $this.BackgroundColor = "Black"
            }
            "Info" {
                $this.MessageColor = "Yellow"
                $this.BackgroundColor = "Black"
            }
            "Low" {
                $this.MessageColor = "Green"
                $this.BackgroundColor = "Black"
            }
            "Special" {
                $this.MessageColor = "White"
                $this.BackgroundColor = "DarkRed"
            }
            "RemoteLog" {
                $this.MessageColor = "DarkGreen"
                $this.BackgroundColor = "Green"
            }
            "Debug" {
                $this.MessageColor = "Black"
                $this.BackgroundColor = "DarkCyan"
            }

        }

        # Finally emit the logs
        if($this.LogToFile -eq $True) {
            $LogRecord | Out-File $this.LogFileJSON -Append ascii
            $this.LogRecordStdOut | Out-File $this.LogFileTXT -Append ascii
        }
        
        Write-Host $this.LogRecordStdOut -ForegroundColor $this.MessageColor -BackgroundColor $this.BackgroundColor
    }
}
#EndRegion '.\classes\02.AzureHunter.Logger.ps1' 152
#Region '.\classes\03.AzureHunter.TimeStamp.ps1' 0
#using namespace System.IO

class TimeStamp {

    # Public Properties
    [float] $Interval
    [float] $IntervalInMinutes
    [float] $UserDefinedInitialTimeInterval # this is the value passed by the user when invoking Search-AzureCloudUnifiedLog
    [bool] $InitialIntervalAdjusted
    [System.Globalization.CultureInfo] $Culture
    [DateTime] $StartTime
    [DateTime] $EndTime
    [DateTime] $StartTimeSlice
    [DateTime] $EndTimeSlice
    [DateTime] $StartTimeUTC
    [DateTime] $EndTimeUTC
    [DateTime] $StartTimeSliceUTC
    [DateTime] $EndTimeSliceUTC

    # Default, Overloaded Constructor
    TimeStamp([String] $StartTime, [String] $EndTime, [float] $UserDefinedInitialTimeInterval) {
        $this.Culture = New-Object System.Globalization.CultureInfo("en-AU")
        $this.StartTime = $this.ParseDateString($StartTime)
        $this.EndTime = $this.ParseDateString($EndTime)
        $this.UserDefinedInitialTimeInterval = $UserDefinedInitialTimeInterval
        $this.UpdateUTCTimestamp()
    }

    # Default, Parameterless Constructor
    TimeStamp() {
        $this.Culture = New-Object System.Globalization.CultureInfo("en-AU")
    }

    # Constructor
    [DateTime]ParseDateString ([String] $TimeStamp) {
        return [DateTime]::ParseExact($TimeStamp, $this.Culture.DateTimeFormat.SortableDateTimePattern, $null)
    }

    Reset() {
        $this.StartTimeSlice = [DateTime]::new(0)
        $this.EndTimeSlice = [DateTime]::new(0)
    }

    IncrementTimeSlice ([float] $HourlySlice) {

        $this.Interval = $HourlySlice

        # if running method for the first time, set $StartTimeSlice to $StartTime
        if(($this.StartTimeSlice -le $this.StartTime) -and ($this.EndTimeSlice -lt $this.StartTime)) {
            $this.StartTimeSlice = $this.StartTime
            $this.EndTimeSlice = $this.StartTime.AddHours($HourlySlice)
        }
        else {
            $this.StartTimeSlice = $this.EndTimeSlice
            $this.EndTimeSlice = $this.StartTimeSlice.AddHours($HourlySlice)
        }

        $this.UpdateUTCTimestamp()
    }

    [void]UpdateUTCTimestamp () {
        $this.StartTimeUTC = $this.StartTime.ToUniversalTime()
        $this.EndTimeUTC = $this.EndTime.ToUniversalTime()
        $this.StartTimeSliceUTC = $this.StartTimeSlice.ToUniversalTime()
        $this.EndTimeSliceUTC = $this.EndTimeSlice.ToUniversalTime()
    }
}
#EndRegion '.\classes\03.AzureHunter.TimeStamp.ps1' 68
#Region '.\classes\04.AzureHunter.AzureSearcher.ps1' 0
#using namespace System.IO

class AzureSearcher {

    # Public Properties
    [String[]] $Operations
    [String] $RecordType
    [String[]] $UserIds
    [String] $FreeText
    [DateTime] $StartTimeUTC
    [DateTime] $EndTimeUTC
    [String] $SessionId
    [TimeStamp] $TimeSlicer
    [int] $ResultSizeUpperThreshold
    [int] $ResultCountEstimate = 0

    [AzureSearcher] SetOperations([String[]] $Operations) {
        $this.Operations = $Operations
        return $this
    }

    [AzureSearcher] SetRecordType([AuditLogRecordType] $RecordType) {
        $this.RecordType = $RecordType.ToString()
        return $this
    }

    [AzureSearcher] SetUserIds([String[]] $UserIds) {
        $this.UserIds = $UserIds
        return $this
    }

    [AzureSearcher] SetFreeText([String] $FreeText) {
        $this.FreeText = $FreeText
        return $this
    }

    # Default, Overloaded Constructor
    AzureSearcher([TimeStamp] $TimeSlicer, [int] $ResultSizeUpperThreshold) {
        $this.TimeSlicer = $TimeSlicer
        $this.StartTimeUTC = $TimeSlicer.StartTimeSliceUTC
        $this.EndTimeUTC = $TimeSlicer.EndTimeSliceUTC
        $this.ResultSizeUpperThreshold = $ResultSizeUpperThreshold
    }

    [Array] SearchAzureAuditLog([String] $SessionId) {

        # Update Variables
        $this.StartTimeUTC = $this.TimeSlicer.StartTimeSliceUTC
        $this.EndTimeUTC = $this.TimeSlicer.EndTimeSliceUTC
        $this.SessionId = $SessionId

        try {
            if($this.Operations -and -not $this.RecordType) {
                throw "You must specify a RecordType if selecting and Operation"
            }
            elseif($this.RecordType -and ($this.RecordType -ne "All")) {
                
                if($this.Operations) {

                    if($this.FreeText){
                        # RecordType, Operations & FreeText parameters provided
                        $Results = Search-UnifiedAuditLog -StartDate $this.StartTimeUTC -EndDate $this.EndTimeUTC -ResultSize 5000 -SessionCommand ReturnLargeSet -SessionId $this.SessionId -RecordType $this.RecordType -Operations $this.Operations -FreeText $this.FreeText -ErrorAction Stop
                        return $Results
                    }
                    else {
                        # Only RecordType & Operations parameters provided
                        $Results = Search-UnifiedAuditLog -StartDate $this.StartTimeUTC -EndDate $this.EndTimeUTC -ResultSize 5000 -SessionCommand ReturnLargeSet -SessionId $this.SessionId -RecordType $this.RecordType -Operations $this.Operations -ErrorAction Stop
                        return $Results
                    }

                }
                
                else {
                    if($this.FreeText){
                        # Only RecordType & FreeText parameters provided
                        $Results = Search-UnifiedAuditLog -StartDate $this.StartTimeUTC -EndDate $this.EndTimeUTC -ResultSize 5000 -SessionCommand ReturnLargeSet -SessionId $this.SessionId -RecordType $this.RecordType -FreeText $this.FreeText -ErrorAction Stop
                        return $Results
                    }
                    else {
                        # Only RecordType parameter provided, no Operations or FreeText
                        $Results = Search-UnifiedAuditLog -StartDate $this.StartTimeUTC -EndDate $this.EndTimeUTC -ResultSize 5000 -SessionCommand ReturnLargeSet -SessionId $this.SessionId -RecordType $this.RecordType -ErrorAction Stop
                        return $Results
                    }
                }
                
            }
            elseif($this.UserIds -or $this.FreeText) {

                if($this.FreeText){
                    # Fetch all data matching a particular string and a given User
                    $Results = Search-UnifiedAuditLog -StartDate $this.StartTimeUTC -EndDate $this.EndTimeUTC -ResultSize 5000 -SessionCommand ReturnLargeSet -SessionId $this.SessionId -UserIds $this.UserIds -FreeText $this.FreeText -ErrorAction Stop
                    return $Results
                }
                else {
                    # Fetch all data for a given User only
                    $Results = Search-UnifiedAuditLog -StartDate $this.StartTimeUTC -EndDate $this.EndTimeUTC -ResultSize 5000 -SessionCommand ReturnLargeSet -SessionId $this.SessionId -UserIds $this.UserIds -ErrorAction Stop
                    return $Results
                }
            }
            else {
                # Fetch all data for everything
                $Results = Search-UnifiedAuditLog -StartDate $this.StartTimeUTC -EndDate $this.EndTimeUTC -ResultSize 5000 -SessionCommand ReturnLargeSet -SessionId $this.SessionId -ErrorAction Stop
                return $Results
            }
        }
        catch {
            throw $_
        }
    }

    AdjustTimeInterval([String] $AdjustmentMode, [String] $AzureLogSearchSessionName, [Int] $ResultCount) {

        # AdjustmentType: whether we should adjust time interval by increasing it or reducing it
        # AdjustmentMode: whether we should adjust time interval based on proportion or percentage

        # Run initial check of actions to perform
        $NeedToFetchLogs = $false
        if($ResultCount) {
            $NeedToFetchLogs = $false
            $this.ResultCountEstimate = $ResultCount
        }
        else {
            $NeedToFetchLogs = $true
        }

        # **** START: TIME WINDOW FLOW CONTROL ROUTINE **** #
        # ************************************************* #
        # This routine performs a series of checks to determine whether the time window
        # used for log extraction needs to be adjusted or not, in order to extract the
        # highest density of logs within a specified time interval

        # Only run this block if SkipAutomaticTimeWindowReduction is not set.
        # Determine initial optimal time interval (likely to be less than 30 min anyway) or whenever required by downstream log extractors
        $TimeWindowAdjustmentNumberOfAttempts = 1
        $ToleranceBeforeIncrementingTimeSlice = 3 # This controls how many cycles we will run before increasing the TimeSlice after getting ZERO results (I said zero, not null or empty)
        $ToleranceCounter = 1

        while(($TimeWindowAdjustmentNumberOfAttempts -le 3) -and ($NeedToFetchLogs -eq $true)) {



            # Run initial query to estimate results and adjust time intervals
            try {
                $Global:Logger.LogMessage("[INTERVAL FLOW CONTROL] Initial TimeSlice in local time: [StartDate] $($this.TimeSlicer.StartTimeSlice.ToString($this.TimeSlicer.Culture)) - [EndDate] $($this.TimeSlicer.EndTimeSlice.ToString($this.TimeSlicer.Culture))", "INFO", $null, $null)
                $Global:Logger.LogMessage("[INTERVAL FLOW CONTROL] Extracting data from Azure to estimate initial result size", "INFO", $null, $null)

                $Results = $this.SearchAzureAuditLog($AzureLogSearchSessionName)

                
            }
            catch [System.Management.Automation.RemoteException] {
                $Global:Logger.LogMessage("[INTERVAL FLOW CONTROL] Failed to query Azure API during initial ResultCountEstimate. Please check passed parameters and Azure API error", "ERROR", $null, $_)
                break
            }
            catch {
                Write-Host "ERROR ON: $_"
                if($TimeWindowAdjustmentNumberOfAttempts -lt 3) {
                    $Global:Logger.LogMessage("[INTERVAL FLOW CONTROL] Failed to query Azure API during initial ResultCountEstimate: Attempt $TimeWindowAdjustmentNumberOfAttempts of 3. Trying again", "ERROR", $null, $_)
                    $TimeWindowAdjustmentNumberOfAttempts++
                    continue
                }
                else {
                    $Global:Logger.LogMessage("[INTERVAL FLOW CONTROL] Failed to query Azure API during initial ResultCountEstimate: Attempt $TimeWindowAdjustmentNumberOfAttempts of 3. Exiting...", "ERROR", $null, $null)
                    break
                }
            }

            # Now check whether we got ANY RESULTS BACK AT ALL, if not, then there are no results for this particular timewindow. We need to increase timewindow and start again.
            try {
                $this.ResultCountEstimate = $Results[0].ResultCount
                $Global:Logger.LogMessage("[INTERVAL FLOW CONTROL] Initial Result Size estimate: $($this.ResultCountEstimate)", "INFO", $null, $null)
            }
            catch {
                $Global:Logger.LogMessage("[INTERVAL FLOW CONTROL] No results were returned with the current parameters within the designated time window. Increasing timeslice.", "LOW", $null, $null)
                $this.TimeSlicer.IncrementTimeSlice($this.TimeSlicer.UserDefinedInitialTimeInterval)
                continue
            }

            # If we get to this point then it means we have at least received SOME results back.
            # Check if the ResultEstimate is within expected limits.
            # If it is, then break from Time Window Flow Control routine and proceed to log extraction process with new timeslice
            if($this.ResultCountEstimate -le $this.ResultSizeUpperThreshold) {

                if($this.ResultCountEstimate -eq 0) {

                    if($ToleranceCounter -le $ToleranceBeforeIncrementingTimeSlice) {
                        # Probably an error, we need to do it again
                        $Global:Logger.LogMessage("[INTERVAL FLOW CONTROL] Query to Azure API during initial ResultCountEstimate returned ZERO results. This could be an API error. Attempting to retrieve results again BEFORE INCREMENTING TIMESLICE: Attempt $ToleranceCounter of $ToleranceBeforeIncrementingTimeSlice.", "LOW", $null, $null)
                        $ToleranceCounter++
                        continue
                    }
                    else {
                        $Global:Logger.LogMessage("[INTERVAL FLOW CONTROL] Query to Azure API during initial ResultCountEstimate returned ZERO results after too many attempts. There are no logs within current time interval. Increasing it by user defined $($this.TimeSlicer.UserDefinedInitialTimeInterval).", "ERROR", $null, $null)
                        $this.TimeSlicer.IncrementTimeSlice($this.TimeSlicer.UserDefinedInitialTimeInterval)
                        # Reset $ToleranceCounter
                        $ToleranceCounter = 1
                    }
                    
                }
                else {
                    # Results are not ZERO and are within the expected Threshold. Great news!

                    $Global:Logger.LogMessage("[INTERVAL FLOW CONTROL] Result Size estimate of $($this.ResultCountEstimate) in current time interval within expected threshold of $($this.ResultSizeUpperThreshold). No need to perform further time adjustments. Proceeding...", "INFO", $null, $null)

                    # Set control flags
                    $this.TimeSlicer.InitialIntervalAdjusted = $true
                    # Results within appettite, no need to adjust interval again
                    return
                }

                
            }
            else {
                break # break and go into TimeAdjustment routine below
            }
        }

        # This OptimalTimeIntervalCheck helps shorten the time it takes to arrive to a proper time window within the expected ResultSize window
        # Perform optimal time interval calculation via proportional estimation
        if($AdjustmentMode -eq "ProportionalAdjustment") {
            
            $Global:Logger.LogMessage("[INTERVAL FLOW CONTROL] Size of results is too big. Estimating Optimal Hourly Time Interval...", "DEBUG", $null, $null)
            $OptimalTimeSlice = ($this.ResultSizeUpperThreshold * $this.TimeSlicer.UserDefinedInitialTimeInterval) / $this.ResultCountEstimate
            $OptimalTimeSlice = [math]::Round($OptimalTimeSlice, 3)
            $IntervalInMinutes = $OptimalTimeSlice * 60
            $Global:Logger.LogMessage("[INTERVAL FLOW CONTROL] Estimated Optimal Hourly Time Interval: $OptimalTimeSlice ($IntervalInMinutes minutes). Reducing interval to this value...", "DEBUG", $null, $null)

            $this.TimeSlicer.UserDefinedInitialTimeInterval = $OptimalTimeSlice
            $this.TimeSlicer.Reset()
            $this.TimeSlicer.IncrementTimeSlice($OptimalTimeSlice)
            $this.TimeSlicer.InitialIntervalAdjusted = $true

            return
        }
        # Perform time interval adjustment based on IntevalReductionRate
        # if requested by downstream data processors
        elseif($AdjustmentMode -eq "PercentageAdjustment") {
            $TimeIntervalReductionRate = 0.2

            $AdjustedHourlyTimeInterval = $this.TimeSlicer.UserDefinedInitialTimeInterval - ($this.TimeSlicer.UserDefinedInitialTimeInterval * $TimeIntervalReductionRate)
            $AdjustedHourlyTimeInterval = [math]::Round($AdjustedHourlyTimeInterval, 3)
            $IntervalInMinutes = $AdjustedHourlyTimeInterval * 60
            $Global:Logger.LogMessage("[INTERVAL FLOW CONTROL] Size of results is too big. Reducing Hourly Time Interval by $TimeIntervalReductionRate to $AdjustedHourlyTimeInterval hours ($IntervalInMinutes minutes)", "INFO", $null, $null)
            
            $this.TimeSlicer.UserDefinedInitialTimeInterval = $AdjustedHourlyTimeInterval
            $this.TimeSlicer.Reset()
            $this.TimeSlicer.IncrementTimeSlice($AdjustedHourlyTimeInterval)

            return
        }
    }
}
#EndRegion '.\classes\04.AzureHunter.AzureSearcher.ps1' 253
#Region '.\classes\05.AzureHunter.CloudInit.ps1' 0
#using namespace AzureHunter.Logger

# A module to verify whether the right Cloud Modules are installed
class AzCloudInit {

    # Public Properties

    [array] $ModuleNames
    $Logger

    # Default, Overloaded Constructor
    AzCloudInit() {
        # Initialize Logger
        if(!$Global:Logger) {
            $this.Logger = [Logger]::New()
        }
        else {
            $this.Logger = $Global:Logger
        }
        $this.Logger.LogMessage("Initializing AzCloudInit Checks for AzureHunter", "DEBUG", $null, $null)
        
    }

    [void] InitializePreChecks([string[]] $ModuleNames) {
        $this.CheckModules($ModuleNames)
        $this.CheckBasicAuthentication()
    }

    [void] CheckModules ([string[]] $ModuleNames) {

        [System.Collections.ArrayList]$RequiredModules = @("ExchangeOnlineManagement")
        #$RequiredModules = @("ExchangeOnlineManagement","AzureAD","MSOnline")
        if($ModuleNames) {
            $ModuleNames | ForEach-Object { $RequiredModules.Add($_) | Out-Null }
        }
    
        # Attempt to import modules first, if already installed this will haste the build process
        # Those modules that cannot be imported should be flagged for installation
        $this.Logger.LogMessage("Importing Required Modules", "INFO", $null, $null)
        [System.Collections.ArrayList]$AbsentModules = @()

        ForEach($Module in $RequiredModules){
            $ModuleImported = Get-Module $Module -ErrorAction SilentlyContinue
            if(!$ModuleImported){
                
                try {
                    $this.Logger.LogMessage("Attempting to Import Module $Module", "INFO", $null, $null)
                    Import-Module $Module -ErrorAction Stop
                }
                catch {
                    $this.Logger.LogMessage("Module $Module not installed. Marked for installation", "INFO", $null, $null)
                    $AbsentModules.add($Module)
                }
            }
        }

        $this.Logger.LogMessage("Installing Dependent Modules if not already deployed in the current environment...", "INFO", $null, $null)
        ForEach($Module in $AbsentModules){
            $this.Logger.LogMessage("Checking availability of $Module", "INFO", $null, $null)
            $ModulePresent = Get-InstalledModule $Module -ErrorAction SilentlyContinue

            if(!$ModulePresent){
                $ShouldInstall = Read-Host -Prompt "Module $Module is required for AzureHunter to work, would you like to install it? (y/n)"
                if($ShouldInstall -eq "y") {
                    $this.Logger.LogMessage("Installing Module $Module", "INFO", $null, $null)
                    Install-Module $Module -Force -Scope CurrentUser
                    $this.Logger.LogMessage("Importing Module $Module", "INFO", $null, $null)
                    Import-Module $Module
                }
                else {
                    $this.Logger.LogMessage("Cannot proceed without $Module. Exiting...", "INFO", $null, $null)
                    exit
                }
                
            }
            else {
                $this.Logger.LogMessage("Module $Module already available", "INFO", $null, $null)
            }
        }
    }

    CheckBasicAuthentication() {
        # This routine will check whether Basic Auth is enabled on the system to be able to import all required modules from ExchangeOnline
        $this.Logger.LogMessage("Checking Basic Authentication", "INFO", $null, $null)
        if((Get-ItemProperty "HKLM:\SOFTWARE\Policies\Microsoft\Windows\WinRM\Client" -Name AllowBasic -ErrorAction SilentlyContinue).AllowBasic -eq 0) {

            $ShouldAllowBasic = Read-Host -Prompt "Basic Authentication is not enabled on this machine and it's required by ExchangeOnline to be able to import remote commands that AzureHunter utilizes. Would you like to enable it? (y/n)"
            if($ShouldAllowBasic -eq 'y') {
                Set-ItemProperty "HKLM:\SOFTWARE\Policies\Microsoft\Windows\WinRM\Client" -Name AllowBasic -Value 1 -ErrorAction SilentlyContinue
                Start-Sleep 1
                $this.Logger.LogMessage("Allowed Basic Authentication", "INFO", $null, $null)
            }
        }
    }

    ConnectExchangeOnline() {
        # Initialize ExchangeOnline Connection for queries
        $GetPSSessions = Get-PSSession | Select-Object -Property State, Name
        $ExOConnected = (@($GetPSSessions) -like '@{State=Opened; Name=ExchangeOnlineInternalSession*').Count -gt 0
        if($ExOConnected -ne "True") {
            Connect-ExchangeOnline
        }
    }
}
#EndRegion '.\classes\05.AzureHunter.CloudInit.ps1' 105
#Region '.\classes\101.AzureHunter.AuditLogSchemas.Generic.ps1' 0


class AuditLogSchemaGeneric {

    <#
 
    .SYNOPSIS
        Class to capture the generic format of UnifiedAuditLog Record
 
    #>


    #hidden [string] $PSComputerName
    #hidden [string] $RunspaceId
    #hidden [string] $PSShowComputerName
    [string] $RecordType
    [DateTime] $CreationDate
    [string] $UserIds
    [string] $Operations
    [string] $AuditData
    [Int32] $ResultIndex
    [Int32] $ResultCount
    [string] $Identity
    #hidden [string] $IsValid
    #hidden [string] $ObjectState

}
#EndRegion '.\classes\101.AzureHunter.AuditLogSchemas.Generic.ps1' 27
#Region '.\private\General-Utils.ps1' 0
<#
    This file will be loaded upon calling the module and contains generic util functions.
#>


Function New-GlobalVars {

    # *** BEGIN: GENERAL *** #

    # *** Getting a handle to the root path of the module so that we can refer to it *** #
    if ($PSScriptRoot) {
        $Global:AzHunterRoot = [System.IO.DirectoryInfo]::new($PSScriptRoot)
    } 
    else {
        $Global:AzHunterRoot = [System.IO.DirectoryInfo]::new($pwd)
    }
    if($Global:AzHunterRoot.FullName -match "source") {
        $Global:AzHunterRoot = $Global:AzHunterRoot.Parent
    }

}

Function New-OutputFolder {

    <#
    .SYNOPSIS
        Create new folders to store playbook outputs
  
    .DESCRIPTION
         
  
    .PARAMETER FolderName
        The name of the output folder to be created.
 
    #>


    [CmdletBinding(
        SupportsShouldProcess=$False
    )]
    Param (
        [Parameter( 
            Mandatory=$False,
            ValueFromPipeline=$False,
            ValueFromPipelineByPropertyName=$False,
            Position=0,
            HelpMessage='Plugin output folder'
        )]
        [ValidateNotNullOrEmpty()]
        $FolderName,

        [Parameter( 
            Mandatory=$False,
            ValueFromPipeline=$False,
            ValueFromPipelineByPropertyName=$False,
            Position=1,
            HelpMessage='Whether we want to create a new parent output folder to hold the results of our plugins'
        )]
        [ValidateNotNullOrEmpty()]
        [switch]$NewParentOutputFolder
    )

    # Initialize Logger
    if(!$Global:Logger){ $Logger = [Logger]::New() }
        
    if($NewParentOutputFolder) {
        try {
            # Configure Output Folder
            $CurrentFolder = [System.IO.DirectoryInfo]::new($pwd)
            $strTimeNow = (Get-Date).ToUniversalTime().ToString("yyMMdd-HHmmss")
            $ParentFolderName = "AzHunter-$strTimeNow-output"
            $Global:AzHunterParentOutputFolder = New-Item -Path $CurrentFolder.FullName -Name $ParentFolderName -ItemType Directory
            $Logger.LogMessage("Created Parent Output Folder: $Global:AzHunterParentOutputFolder", "INFO", $null, $null)
        }
        catch {
            Write-Host "Parent Output Folder Could not be Created"
        }
    }
    else {
        
        $ProposedFolder = "$Global:AzHunterParentOutputFolder\$FolderName"
        $FolderAlreadyExists = Test-Path $ProposedFolder
        if($FolderAlreadyExists) {
            $Logger.LogMessage("Folder $ProposedFolder already exists. No need to create a new one.", "INFO", $null, $null)
            $ExistingPath = Join-Path $Global:AzHunterParentOutputFolder $FolderName
            $AzHunterPlaybookOutputFolder = [System.IO.DirectoryInfo]::new($ExistingPath)
        }
        else {
            $AzHunterPlaybookOutputFolder = New-Item -Path $Global:AzHunterParentOutputFolder -Name $FolderName -ItemType Directory
        }
        
        return $AzHunterPlaybookOutputFolder
    }

}

New-GlobalVars
New-OutputFolder -NewParentOutputFolder
#EndRegion '.\private\General-Utils.ps1' 97
#Region '.\public\Invoke-AzHunterPlaybook.ps1' 0
<#
    CYBERNETHUNTER SECURITY OPERATIONS :)
    Author: Diego Perez (@darkquassar)
    Version: 1.1.0
    Module: Hunt-AzHunterPlaybook.ps1
    Description: This module contains some utilities to run playbooks through Azure, eDiscovery and O365 logs.
#>


Function Invoke-AzHunterPlaybook {
    <#
    .SYNOPSIS
        A PowerShell function to run playbooks over data obtained via AzureHunter
  
    .DESCRIPTION
        This function will perform....
  
    .PARAMETER PlayBookName
        The name of the playbook that will be executed against the dataset passed to this function
 
    .EXAMPLE
        XXXX
 
    .NOTES
        Please use this with care and for legitimate purposes. The author does not take responsibility on any damage performed as a result of employing this script.
    #>


    [CmdletBinding(
        SupportsShouldProcess=$False
    )]
    Param (
        [Parameter( 
            Mandatory=$False,
            ValueFromPipeline=$False,
            ValueFromPipelineByPropertyName=$False,
            Position=0,
            HelpMessage='The records to process from a powershell object'
        )]
        [ValidateNotNullOrEmpty()]
        [Object]$Records,

        [Parameter( 
            Mandatory=$False,
            ValueFromPipeline=$False,
            ValueFromPipelineByPropertyName=$False,
            Position=1,
            HelpMessage='A CSV or JSON file to process instead of providing records'
        )]
        [ValidateNotNullOrEmpty()]
        [String]$FileName,

        [Parameter( 
            Mandatory=$False,
            ValueFromPipeline=$False,
            ValueFromPipelineByPropertyName=$False,
            Position=2,
            HelpMessage='The type of Azure Log to process. It helps orient the selection of Playbooks. Not a required parameter.'
        )]
        [ValidateNotNullOrEmpty()]
        [ValidateSet('UnifiedAuditLog','eDiscoverySummaryReport','AzureAD')]
        [String]$AzureLogType,

        [Parameter( 
            Mandatory=$False,
            ValueFromPipeline=$False,
            ValueFromPipelineByPropertyName=$False,
            Position=3,
            HelpMessage='The playbook you would like to run for the current batch of logs'
        )]
        [ValidateNotNullOrEmpty()]
        [String[]]$PlayBooks='AzHunter.Playbook.UAL.Exporter',

        [Parameter( 
            Mandatory=$False,
            ValueFromPipeline=$False,
            ValueFromPipelineByPropertyName=$False,
            Position=3,
            HelpMessage='The playbook parameters, if required, that will be passed onto the playbook via Splatting. It needs to be a HashTable like: $Params = @{ "Path" = "TestFile.txt", "ExtractDetails" = $True }'

        )]
        [ValidateNotNullOrEmpty()]
        [hashtable]$PlayBookParameters,

        [Parameter( 
            Mandatory=$False,
            ValueFromPipeline=$False,
            ValueFromPipelineByPropertyName=$False,
            Position=4,
            HelpMessage='Whether we want records returned back to the console'
        )]
        [ValidateNotNullOrEmpty()]
        [switch]$PassThru

    )

    BEGIN {

        # Initialize Logger
        if(!$Global:Logger){ $Logger = [Logger]::New() }
        $Logger.LogMessage("Initializing AzHunterPlaybook Module", "INFO", $null, $null)

        # Determine path to Playbooks folder
        # This is required to pre-load the Base Playbook "AzHunterBase" to do initial sanitization of logs
        if ($PSScriptRoot) {
            $ScriptPath = [System.IO.DirectoryInfo]::new($PSScriptRoot)
            if($ScriptPath.FullName -match "AzureHunter\\source"){
                $ScriptPath = $ScriptPath.Parent
                $Script:PlaybooksPath = Join-Path $ScriptPath.FullName "playbooks"
            }
            else {
                $Script:PlaybooksPath = Join-Path $ScriptPath.FullName "playbooks"
            }
        }
        else {
            $ScriptPath = [System.IO.DirectoryInfo]::new($pwd)
            $PlaybooksFolderPresent = Get-ChildItem -Path $ScriptPath.FullName -Directory -Filter "Playbooks"
            if($PlaybooksFolderPresent){
                $Script:PlaybooksPath = Join-Path $ScriptPath "playbooks"
            }
            else {
                $Logger.LogMessage("Could not find Playbooks folder", "ERROR", $null, $_)
                throw "Could not find Playbooks folder"
            }
        }

        # Load Base Playbook
        try {
            . "$Script:PlaybooksPath\AzHunter.Playbook.Base.ps1"
        }
        catch {
            $Logger.LogMessage("Could not load AzHunter.Playbook.Base", "ERROR", $null, $_)
        }

        # Grab List of All Playbook File Paths
        [System.Collections.ArrayList]$PlaybookFileList = @()
        $PlaybookFiles = Get-ChildItem $Script:PlaybooksPath\* -File -Filter "AzHunter.Playbook*.ps1" -Exclude "AzHunter.Playbook.Base*"
        $PlaybookFiles | ForEach-Object { 
            $PlaybookFileList.Add([System.IO.FileInfo]::new($_)) | Out-Null
        }

        # Determine whether we have an object with records or a pointer to a file for the Records parameter
        if($Records.GetType() -eq [System.String]) {
            $Logger.LogMessage("Records parameter points to a file, creating file object.", "INFO", $null, $null)
            $Records = [System.IO.FileInfo]::new($Records)
        }

    }

    PROCESS {

        if(($AzureLogType -eq "UnifiedAuditLog") -or ($Playbook -contains "UAL")) {
            if($Records.GetType() -ne [System.Object[]]) {
                $Logger.LogMessage("Sorry we have not yet implemented the processing of UAL records from files. You need to load the UAL CSV file into an array first using Import-Csv", "ERROR", $null, $_)
                break
            }
            # Let's cast UAL records to a [AuditLogSchemaGeneric] Type dropping unnecessary properties
            $Logger.LogMessage("Pre-Processing Records", "INFO", $null, $null)
            [System.Collections.ArrayList]$AzHunterRecords = @()
            $Records | ForEach-Object { 
                $SingleRecord = $_ | Select-Object -Property RecordType, CreationDate, UserIds, Operations, AuditData, ResultIndex, ResultCount, Identity
                $AzHunterRecords.Add($SingleRecord -as [AuditLogSchemaGeneric]) | Out-Null }

            $Records = $AzHunterRecords.AzureHuntersRecordsArray
        }

        # (1) Applying Base Playbook
        # Don't apply sorting first since it can be very slow for big datasets
        # $BasePlaybookRecords = [AzHunterBase]::new($AzHunterRecords).DedupRecords("Identity").SortRecords("CreationDate")

        # (2) Applying Remaining Playbooks
        
        ForEach($Playbook in $PlayBooks) {
            $Logger.LogMessage("Checking Playbooks to be applied to the data...", "INFO", $null, $null)

            # Let's run the Playbooks passing in the records
            $PlaybookFileList | ForEach-Object {
                $PlaybookBaseName = $_.BaseName
                
                if($PlaybookBaseName -eq $Playbook) {
                    try {

                        $Logger.LogMessage("Loading Playbook $PlaybookBaseName", "INFO", $null, $null)

                        . $_.FullName # Load Playbook file in the current session

                        if($PassThru) {
                            if($PlayBookParameters) {
                                $ProcessedRecords = Start-AzHunterPlaybook @PlayBookParameters -Records $Records -PassThru
                            }
                            else {
                                $ProcessedRecords = Start-AzHunterPlaybook -Records $Records -PassThru
                            }
                            return $ProcessedRecords
                        }
                        else {
                            if($PlayBookParameters) {
                                Start-AzHunterPlaybook @PlayBookParameters -Records $Records 
                            }
                            else {
                                Start-AzHunterPlaybook -Records $Records
                            }
                        }
                    }
                    catch {
                        $Logger.LogMessage("Could not load Playbook $Playbook", "ERROR", $null, $_)
                    }
                }
            }
        }
    }

    END {
        $Logger.LogMessage("Finished running Playbooks", "SPECIAL", $null, $null)
    }

}

Export-ModuleMember -Function 'Invoke-AzHunterPlaybook'
#EndRegion '.\public\Invoke-AzHunterPlaybook.ps1' 218
#Region '.\public\Search-AzureCloudUnifiedLog.ps1' 0
<#
    CYBERNETHUNTER SECURITY OPERATIONS :)
    Author: Diego Perez (@darkquassar)
    Version: 1.1.0
    Module: Search-AzureCloudUnifiedLog.ps1
    Description: This module contains some utilities to search through Azure and O365 unified audit log.
#>


#using namespace AzureHunter.AzureSearcher
#using namespace AzureHunter.Logger
#using namespace AzureHunter.TimeStamp
#using namespace AzureHunter.AzCloudInit

try {
    Get-Command Invoke-AzHunterPlaybook
}
catch {
    # Need to Import Module Invoke-AzHunterPlaybook
    . .\Invoke-AzHunterPlaybook.ps1
}

$AzureHunterLogo = @'
                                    _ _ _
          /\ | | | | | |
         / \ _____ _ _ __ ___| |__| |_ _ _ __ | |_ ___ _ __
        / /\ \ |_ / | | | '__/ _ \ __ | | | | '_ \| __/ _ \ '__|
       / ____ \ / /| |_| | | | __/ | | | |_| | | | | || __/ |
      /_/ \_\/___|\__,_|_| \___|_| |_|\__,_|_| |_|\__\___|_|
                                                                
         a powershell framework to run threat hunting playbooks on Azure data
 
                      ╰(⇀︿⇀)つ-]═───> by Diego Perez (@darkquassar)
 
'@


Function Search-AzureCloudUnifiedLog {
    <#
    .SYNOPSIS
        A PowerShell function to search the Azure Unified Audit Log (UAL)
  
    .DESCRIPTION
        This function will allow you to retrieve UAL logs iteratively implementing some safeguards to ensure the maximum log density is exported, avoiding flaky mistakes produced by the powershell ExchangeOnline API.
  
    .PARAMETER StartDate
        Start Date in the form: year-month-dayThour:minute:seconds
 
    .PARAMETER EndDate
        End Date in the form: year-month-dayThour:minute:seconds
 
    .PARAMETER TimeInterval
        Time Interval in hours. This represents the interval windows that will be queried between StartDate and EndDate. This is a sliding window.
 
    .PARAMETER AggregatedResultsFlushSize
        The ammount of logs that need to be accumulated before deduping and exporting. Logs are accumulated in batches, setting it to 0 (zero) gets rid of this requirement and exports all batches individually. It is recommended to set this value to 50000 for long searches (i.e. extended in time). The higher the value, the more RAM it will consume but the fewer duplicates you will find in your final results.
 
    .PARAMETER ResultSizeUpperThreshold
        Maximum amount of records we want returned within our current time slice and Azure session. It is recommended this is left with the default 20k.
 
    .PARAMETER AuditLogRecordType
        The record type that you would like to return. For a list of available ones, check API documentation: https://docs.microsoft.com/en-us/office/office-365-management-api/office-365-management-activity-api-schema#auditlogrecordtype. The default value is "All"
 
    .PARAMETER AuditLogOperations
        Based on the record type, there are different kinds of operations associated with them. Specify them here separated by commas, each value enclosed within quotation marks.
 
    .PARAMETER UserIDs
        The users you would like to investigate. If this parameter is not provided it will default to all users. Specify them here separated by commas, each value enclosed within quotation marks.
 
    .PARAMETER FreeText
        You can search the log using FreeText strings, matches are performed based on a "contains" method (i.e. no RegEx)
 
    .PARAMETER SkipAutomaticTimeWindowReduction
        This parameter will skip automatic adjustment of the TimeInterval windows between your Start and End Dates.
  
    .EXAMPLE
        Search-AzureCloudUnifiedLog -StartDate "2021-03-06T10:00:00" -EndDate "2021-06-09T12:40:00" -TimeInterval 12 -AggregatedResultsFlushSize 5000 -Verbose
 
    .EXAMPLE
        XXXX
  
    .NOTES
        Please use this with care and for legitimate purposes. The author does not take responsibility on any damage performed as a result of employing this script.
    #>


    [CmdletBinding(
        SupportsShouldProcess=$False
    )]
    Param (
        [Parameter( 
            Mandatory=$True,
            ValueFromPipeline=$False,
            ValueFromPipelineByPropertyName=$False,
            Position=0,
            HelpMessage='Start Date in the form: year-month-dayThour:minute:seconds'
        )]
        [ValidatePattern("\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}")]
        [ValidateNotNullOrEmpty()]
        [string]$StartDate,

        [Parameter( 
            Mandatory=$True,
            ValueFromPipeline=$True,
            ValueFromPipelineByPropertyName=$True,
            Position=1,
            HelpMessage='End Date in the form: year-month-dayThour:minute:seconds'
        )]
        [ValidatePattern("\d{4}-\d{2}-\d{2}T\d{2}:\d{2}:\d{2}")]
        [ValidateNotNullOrEmpty()]
        [string]$EndDate,

        [Parameter(
            Mandatory=$False,
            ValueFromPipeline=$True,
            ValueFromPipelineByPropertyName=$True,
            Position=2,
            HelpMessage='Time Interval in hours. This represents the interval windows that will be queried between StartDate and EndDate'
        )]
        [ValidateNotNullOrEmpty()]
        [float]$TimeInterval=12,

        [Parameter( 
            Mandatory=$False,
            ValueFromPipeline=$False,
            ValueFromPipelineByPropertyName=$False,
            Position=4,
            HelpMessage='The ammount of logs that need to be accumulated before deduping and exporting, setting it to 0 (zero) gets rid of this requirement and exports all batches individually. It is recommended to set this value to 50000 for long searches. The higher the value, the more RAM it will consume but the fewer duplicates you will find in your final results.'
        )]
        [ValidateNotNullOrEmpty()]
        [int]$AggregatedResultsFlushSize=0,

        [Parameter( 
            Mandatory=$False,
            ValueFromPipeline=$False,
            ValueFromPipelineByPropertyName=$False,
            Position=5,
            HelpMessage='Maximum amount of records we want returned within our current time slice and Azure session. It is recommended this is left with the default 20k'
        )]
        [ValidateNotNullOrEmpty()]
        [int]$ResultSizeUpperThreshold=20000,

        [Parameter(
            Mandatory=$False,
            ValueFromPipeline=$False,
            ValueFromPipelineByPropertyName=$False,
            Position=6,
            HelpMessage='The record type that you would like to return. For a list of available ones, check API documentation: https://docs.microsoft.com/en-us/office/office-365-management-api/office-365-management-activity-api-schema#auditlogrecordtype'
        )]
        [string]$AuditLogRecordType="All",

        [Parameter(
            Mandatory=$False,
            ValueFromPipeline=$False,
            ValueFromPipelineByPropertyName=$False,
            Position=7,
            HelpMessage='Based on the record type, there are different kinds of operations associated with them. Specify them here separated by commas, each value enclosed within quotation marks'
        )]
        [string[]]$AuditLogOperations,

        [Parameter(
            Mandatory=$False,
            ValueFromPipeline=$False,
            ValueFromPipelineByPropertyName=$False,
            Position=8,
            HelpMessage='The users you would like to investigate. If this parameter is not provided it will default to all users. Specify them here separated by commas, each value enclosed within quotation marks'
        )]
        [string]$UserIDs,

        [Parameter(
            Mandatory=$False,
            ValueFromPipeline=$False,
            ValueFromPipelineByPropertyName=$False,
            Position=9,
            HelpMessage='You can search the log using FreeText strings'
        )]
        [string]$FreeText,

        [Parameter( 
            Mandatory=$False,
            ValueFromPipeline=$False,
            ValueFromPipelineByPropertyName=$False,
            Position=10,
            HelpMessage='This parameter will skip automatic adjustment of the TimeInterval windows between your Start and End Dates.'
        )]
        [ValidateNotNullOrEmpty()]
        [switch]$SkipAutomaticTimeWindowReduction,

        [Parameter( 
            Mandatory=$False,
            ValueFromPipeline=$False,
            ValueFromPipelineByPropertyName=$False,
            Position=11,
            HelpMessage='Whether to run this module with fake data'
        )]
        [ValidateNotNullOrEmpty()]
        [switch]$RunTestOnly
    )

    BEGIN {

        # Show Logo mofo
        Write-Host -ForegroundColor Green $AzureHunterLogo

        # *** Getting a handle to the running script path so that we can refer to it *** #
        if ($PSScriptRoot) {
            $ScriptPath = [System.IO.DirectoryInfo]::new($PSScriptRoot)
        } 
        else {
            $ScriptPath = [System.IO.DirectoryInfo]::new($pwd)
        }

        # Initialize Logger
        $Global:Logger = [Logger]::New().InitLogFile()
        $Logger.LogMessage("Logs will be written to: $($Logger.ScriptPath)", "DEBUG", $null, $null)
        # Initialize Pre Checks
        if(!$RunTestOnly) {
            $CloudInit = [AzCloudInit]::new()
            $CloudInit.InitializePreChecks($null)
            # Authenticate to Exchange Online
            $GetPSSessions = Get-PSSession | Select-Object -Property State, Name
            $ExOConnected = (@($GetPSSessions) -like '@{State=Opened; Name=ExchangeOnlineInternalSession*').Count -gt 0
            if($ExOConnected -ne "True") {
                try {
                    Connect-ExchangeOnline -UseMultithreading $True -ShowProgress $True
                }
                catch {
                    $Logger.LogMessage("Could not connect to Exchange Online. Please run Connect-ExchangeOnline before running AzureHunter", "ERROR", $null, $_)
                    break
                }
            }
        }
    }

    PROCESS {

        # Grab Start and End Timestamps
        $TimeSlicer = [TimeStamp]::New($StartDate, $EndDate, $TimeInterval)
        $TimeSlicer.IncrementTimeSlice($TimeInterval)

        # Initialize Azure Searcher
        $AzureSearcher = [AzureSearcher]::new($TimeSlicer, $ResultSizeUpperThreshold)
        $AzureSearcher.SetRecordType([AuditLogRecordType]::$AuditLogRecordType).SetOperations($AuditLogOperations).SetUserIds($UserIds).SetFreeText($FreeText) | Out-Null
        $Logger.LogMessage("AzureSearcher Settings | RecordType: $($AzureSearcher.RecordType) | Operations: $($AzureSearcher.Operations) | UserIDs: $($AzureSearcher.UserIds) | FreeText: $($AzureSearcher.FreeText)", "SPECIAL", $null, $null)

        # Records Counter
        $TotalRecords = 0

        # Flow Control
        $TimeWindowAdjustmentNumberOfAttempts = 1  # How many times the TimeWindowAdjustmentNumberOfAttempts should be attempted before proceeding to the next block
        $NumberOfAttempts = 1   # How many times a call to the API should be attempted before proceeding to the next block
        $ResultCountEstimate = 0 # Start with a value that triggers the time window reduction loop
        # $ResultSizeUpperThreshold --> Maximum amount of records we want returned within our current time slice and Azure session
        $ShouldExportResults = $true # whether results should be exported or not in a given loop, this flag helps determine whether export routines should run when there are errors or no records provided
        $TimeIntervalReductionRate = 0.2 # the percentage by which the time interval is reduced until returned results is within $ResultSizeUpperThreshold
        $FirstOptimalTimeIntervalCheckDone = $false # whether we should perform the initial optimal timeslice check when looking for automatic time window reduction, it's initial value is $false because it means it hasn't yet been performed
        [System.Collections.ArrayList]$Script:AggregatedResults = @()

        $Logger.LogMessage("Upper Log ResultSize Threshold for each Batch: $ResultSizeUpperThreshold", "SPECIAL", $null, $null)
        $Logger.LogMessage("Aggregated Results Max Size: $AggregatedResultsFlushSize", "SPECIAL", $null, $null)

        # **** CHECK IF RUNNING TEST ONLY ****
        if($RunTestOnly) {
            $TestRecords = Get-Content ".\tests\test-data\test-auditlogs.json" | ConvertFrom-Json
            Invoke-AzHunterPlaybook -Records $TestRecords -Playbooks "AzHunter.Playbook.UAL.Exporter"
            break
        }

        # Search audit log between $TimeSlicer.StartTimeSlice and $TimeSlicer.EndTimeSlice
        while($TimeSlicer.StartTimeSlice -le $TimeSlicer.EndTime) {

            # **** START: TIME WINDOW FLOW CONTROL ROUTINE **** #
            # ************************************************* #
            if($FirstOptimalTimeIntervalCheckDone -eq $false) {
                # $AdjustmentMode = ProportionalAdjustment
                # $AzureLogSearchSessionName = RandomSessionName
                # $ResultCount = $null
                $RandomSessionName = "azurehunter-$(Get-Random)"
                $AzureSearcher.AdjustTimeInterval("ProportionalAdjustment", $RandomSessionName, $null)
                if($TimeSlicer.InitialIntervalAdjusted -eq $True) {
                    $FirstOptimalTimeIntervalCheckDone = $true
                }
                
            }
            # **** END: TIME WINDOW FLOW CONTROL ROUTINE **** #
            # *********************************************** #

            # **** START: DATA MINING FROM AZURE ROUTINE **** #
            # *********************************************** #

            # Setup block variables
            $RandomSessionName = "azurehunter-$(Get-Random)"
            $NumberOfAttempts = 1

            # We need the result cumulus to keep track of the batch of ResultSizeUpperThreshold logs (20k by default)
            # These logs will then get sort by date and the last date used as the new $StartTimeSlice value
            [System.Collections.ArrayList]$Script:ResultCumulus = @()

            # *** RETURN LARGE SET LOOP ***
            # Loop through paged results and extract all of them sequentially, before going into the next TimeSlice cycle

            while(
                    ($Script:Results.Count -ne 0) -or 
                    ($ShouldRunReturnLargeSetLoop -eq $true) -or 
                    ($NumberOfAttempts -le 3)
                ) {
                # NOTE: when the ShouldRunReturnLargeSetLoop variable is set, it means we need to continue requesting logs within the same session until we have exhausted all available logs in the Azure Session. This means that for large datasets, the AggregatedResultsFlushSize parameter won't count unless we reduce the size of ResultSizeUpperThreshold below that of AggregatedResultsFlushSize

                # Run for this loop
                $Logger.LogMessage("Fetching next batch of logs. Session: $RandomSessionName", "LOW", $null, $null)
                $Script:Results = $AzureSearcher.SearchAzureAuditLog($RandomSessionName)

                # Test whether we got any results at all
                # If we got results, we need to determine wether the ResultSize is too big and run additional Data Consistency Checks
                if($Script:Results.Count -eq 0) {
                    $Logger.LogMessage("No more logs remaining in session $RandomSessionName. Exporting results and going into the next iteration...", "LOW", $null, $null)
                    $ShouldExportResults = $true
                    break
                }
                # We DID GET RESULTS. Let's run Data Consistency Checks before proceeding
                else {

                    $ResultCountEstimate = $Script:Results[0].ResultCount
                    $Logger.LogMessage("Batch Result Size: $ResultCountEstimate | Session: $RandomSessionName", "LOW", $null, $null)

                    # *** DATA CONSISTENCY CHECK 01: Log density within threshold *** #
                    # *************************************************************** #

                    # Test whether result size is within threshold limits
                    # Since a particular TimeInterval does not guarantee it will produce the desired log density for
                    # all time slices (log volume varies in the enterprise throught the day)
                    # This check should not matter if the user selected to Skip Automatic TimeWindow Reduction.
                    if(-not $SkipAutomaticTimeWindowReduction) {

                        if($ResultCountEstimate -eq 0) {
                            $Logger.LogMessage("Result density is ZERO. We need to try again. Attempt $NumberOfAttempts of 3", "DEBUG", $null, $null)
                            # Set results export flag
                            $ShouldExportResults = $false
                            $NumberOfAttempts++
                            continue
                        }
                        if($ResultCountEstimate -gt $ResultSizeUpperThreshold) {
                            $Logger.LogMessage("Result density is HIGHER THAN THE THRESHOLD of $ResultSizeUpperThreshold. We need to adjust time intervals.", "DEBUG", $null, $null)
                            $Logger.LogMessage("Time Interval prior to running adjustment: $($TimeSlicer.UserDefinedInitialTimeInterval)", "DEBUG", $null, $null)
                            # Set results export flag
                            $ShouldExportResults = $false
                            
                            $RandomSessionName = "azurehunter-$(Get-Random)"
                            $AzureSearcher.AdjustTimeInterval("PercentageAdjustment", $RandomSessionName, $ResultCountEstimate)
                            $Logger.LogMessage("Time Interval after running adjustment: $($TimeSlicer.UserDefinedInitialTimeInterval)", "DEBUG", $null, $null)
                            break
                        }
                        # Else if results within Threshold limits
                        else {
                            $ShouldExportResults = $true
                        }
                    }
                    
                    
                    # *** DATA CONSISTENCY CHECK 02: Sequential data consistency *** #
                    # ************************************************************** #

                    # PROBLEM WE TRIED TO SOLVE HERE: at some point Azure may start returning result indices that are not sequential and thus the results will (a) be inconsistent and (b) mess up with any script. However, using the ReturnLargeSet switch is the best way to export the highest amount of logs within a given timespan. So the solution was to implement a check and abort log exporting when result index stops being sequential.

                    # Tracking session and results for current and previous sessions
                    # This will aid in checks below for log index integrity
                    if($CurrentSession){ $FormerSession = $CurrentSession } else {$FormerSession = $RandomSessionName}
                    $CurrentSession = $RandomSessionName
                    if($HighestEndResultIndex){ $FormerHighestEndResultIndex = $HighestEndResultIndex } else {$FormerHighestEndResultIndex = $EndResultIndex}
                    $StartResultIndex = $Script:Results[0].ResultIndex
                    $HighestEndResultIndex = $Script:Results[($Script:Results.Count - 1)].ResultIndex

                    # Check for Azure API and/or Powershell crazy behaviour when it goes back and re-exports duplicated results
                    # Check (1): Is the current End Record Index lower than the previous End Record Index? --> YES --> then crazy shit, abort cycle and proceed with next iteration
                    # Check (2): Is the current End Record Index lower than the current Start Record Index? --> YES --> then crazy shit, abort cycle and proceed with next iteration

                    # Only run this check within the same session (since comparing these parameters between different sessions will return erroneous checks of course)
                    if($FormerSession -eq $CurrentSession) {
                        if (($HighestEndResultIndex -lt $FormerHighestEndResultIndex) -or ($StartResultIndex -gt $HighestEndResultIndex)) {

                            $Logger.LogMessage("Azure API or Search-UnifiedAuditLog behaving weirdly and going back in time... Need to abort this cycle and try again | CurrentSession = $CurrentSession | FormerSession = $FormerSession | FormerHighestEndResultIndex = $FormerHighestEndResultIndex | CurrentHighestEndResultIndex = $HighestEndResultIndex | StartResultIndex = $StartResultIndex | Result Count = $($Script:Results.Count)", "ERROR", $null, $null)
                            
                            if($NumberOfAttempts -lt 3) {
                                $RandomSessionName = "azurehunter-$(Get-Random)"
                                $Logger.LogMessage("Failed to query Azure API: Attempt $NumberOfAttempts of 3. Trying again in new session: $RandomSessionName", "ERROR", $null, $null)
                                $NumberOfAttempts++
                                continue
                            }
                            else {
                                $Logger.LogMessage("Failed to query Azure API: Attempt $NumberOfAttempts of 3. Exporting collected partial results so far and increasing timeslice", "SPECIAL", $null, $null)
                                $ShouldExportResults = $true
                                break
                            }
                        }
                    }
                }

                # Collate Results
                # Append partial results to the ResultCumulus
                # in preparation for deduping and sorting
                $StartingResultIndex = $Script:Results[0].ResultIndex
                $EndResultIndex = $Script:Results[($Script:Results.Count - 1)].ResultIndex
                $Logger.LogMessage("Adding records $StartingResultIndex to $EndResultIndex", "INFO", $null, $null)
                $Script:Results | ForEach-Object { $Script:ResultCumulus.add($_) | Out-Null }

            }
            # **** END: DATA MINING FROM AZURE ROUTINE **** #
            # ********************************************* #

            # **** START: DATA POST-PROCESSING ROUTINE **** #
            # ********************************************* #
            # If available results are bigger than the Threshold, then don't export logs
            if($ShouldExportResults -eq $false) {
                continue
            }
            else {

                # Exporting logs. Run additional check for Results.Count
                try {
                    if($Script:ResultCumulus.Count -ne 0) {
                        # Sorting and Deduplicating Results
                        # DEDUPING
                        $Logger.LogMessage("Sorting and Deduplicating current batch Results", "LOW", $null, $null)
                        $ResultCountBeforeDedup = $Script:ResultCumulus.Count
                        $DedupedResults = $Script:ResultCumulus | Sort-Object -Property Identity -Unique
                        # For some reason when assigning to $DedupedResults PSOBJECT, the .Count property does not return a value when there's only a single record, so we found a workaround
                        if($Script:ResultCumulus.Count -eq 1) {$ResultCountAfterDedup = 1} else {$ResultCountAfterDedup = $DedupedResults.Count}
                        $ResultCountDuplicates = $ResultCountBeforeDedup - $ResultCountAfterDedup

                        $Logger.LogMessage("Removed $ResultCountDuplicates Duplicate Records from current batch", "SPECIAL", $null, $null)

                        # SORTING by TimeStamp
                        $SortedResults = $DedupedResults | Sort-Object -Property CreationDate
                        $Logger.LogMessage("Current batch Result Size = $($SortedResults.Count)", "SPECIAL", $null, $null)
                        
                        if($AggregatedResultsFlushSize -eq 0){
                            $Logger.LogMessage("No Aggregated Results parameter configured. Exporting current batch of records to $ExportFileName", "DEBUG", $null, $null)
                            #$SortedResults | Export-Csv $ExportFileName -NoTypeInformation -NoClobber -Append
                            Invoke-AzHunterPlaybook -Records $SortedResults -Playbooks "AzHunter.Playbook.UAL.Exporter"

                            # Count total records so far
                            $TotalRecords = $TotalRecords + $SortedResults.Count
                            $FirstCreationDateRecord = $SortedResults[0].CreationDate
                            $LastCreationDateRecord = $SortedResults[($SortedResults.Count -1)].CreationDate
                            # Report total records
                            $Logger.LogMessage("Total Records exported so far: $TotalRecords ", "SPECIAL", $null, $null)
                        }
                        elseif($Script:AggregatedResults.Count -ge $AggregatedResultsFlushSize) {

                            # Need to add latest batch of results before exporting
                            $Logger.LogMessage("AGGREGATED RESULTS | Reached maximum Aggregated Batch Threshold of $AggregatedResultsFlushSize", "INFO", $null, $null)
                            $Logger.LogMessage("AGGREGATED RESULTS | Adding current batch results to Aggregated Results", "SPECIAL", $null, $null)
                            $SortedResults | ForEach-Object { $Script:AggregatedResults.add($_) | Out-Null }

                            $AggResultCountBeforeDedup = $Script:AggregatedResults.Count
                            $Script:AggregatedResults = $Script:AggregatedResults | Sort-Object -Property Identity -Unique
                            $AggResultCountAfterDedup = $Script:AggregatedResults.Count
                            $AggResultCountDuplicates = $AggResultCountBeforeDedup - $AggResultCountAfterDedup
                            $Logger.LogMessage("AGGREGATED RESULTS | Removed $AggResultCountDuplicates Duplicate Records from Aggregated Results", "SPECIAL", $null, $null)
                            Invoke-AzHunterPlaybook -Records $Script:AggregatedResults -Playbooks "AzHunter.Playbook.UAL.Exporter"

                            # Count records so far
                            $TotalRecords = $TotalRecords + $Script:AggregatedResults.Count
                            $FirstCreationDateRecord = $SortedResults[0].CreationDate
                            $LastCreationDateRecord = $SortedResults[($SortedResults.Count -1)].CreationDate
                            # Report total records
                            $Logger.LogMessage("Total Records EXPORTED so far: $TotalRecords ", "SPECIAL", $null, $null)

                            # Reset $Script:AggregatedResults
                            [System.Collections.ArrayList]$Script:AggregatedResults = @()
                        }
                        else {
                            $Logger.LogMessage("AGGREGATED RESULTS | Adding current batch results to Aggregated Results", "SPECIAL", $null, $null)
                            $SortedResults | ForEach-Object { $Script:AggregatedResults.add($_) | Out-Null }

                            # Count records so far
                            $TotalAggregatedBatchRecords = $Script:AggregatedResults.Count
                            $FirstCreationDateRecord = $SortedResults[0].CreationDate
                            $LastCreationDateRecord = $SortedResults[($SortedResults.Count -1)].CreationDate
                            # Report total records
                            $Logger.LogMessage("AGGREGATED RESULTS | Total Records aggregated in current batch: $TotalAggregatedBatchRecords", "SPECIAL", $null, $null)
                        }

                        $Logger.LogMessage("TimeStamp of first received record in local time: $($FirstCreationDateRecord.ToLocalTime().ToString($TimeSlicer.Culture))", "SPECIAL", $null, $null)
                        $Logger.LogMessage("TimeStamp of latest received record in local time: $($LastCreationDateRecord.ToLocalTime().ToString($TimeSlicer.Culture))", "SPECIAL", $null, $null)

                        # Let's add an extra second so we avoid exporting logs that match the latest exported timestamps
                        # there is a risk we can loose a few logs by doing this, but it reduces duplicates significatively
                        $TimeSlicer.EndTimeSlice = $LastCreationDateRecord.AddSeconds(1).ToLocalTime()

                        # INCREASE TIME INTERVAL FOR NEXT CYCLE
                        $TimeSlicer.IncrementTimeSlice($TimeSlicer.UserDefinedInitialTimeInterval)

                        $Logger.LogMessage("INCREMENTED TIMESLICE | Next TimeSlice in local time: [StartDate] $($TimeSlicer.StartTimeSlice.ToString($TimeSlicer.Culture)) - [EndDate] $($TimeSlicer.EndTimeSlice.ToString($TimeSlicer.Culture))", "INFO", $null, $null)

                        # Set flag to run ReturnLargeSet loop next time
                        $ShouldRunReturnLargeSetLoop = $true
                        $SortedResults = $null
                        [System.Collections.ArrayList]$Script:ResultCumulus = @()
                    }
                    else {
                        $Logger.LogMessage("No logs found in current timewindow. Sliding to the next timeslice", "DEBUG", $null, $null)
                        # Let's add an extra second so we avoid exporting logs that match the latest exported timestamps
                        # there is a risk we can loose a few logs by doing this, but it reduces duplicates significatively

                        # INCREASE TIME INTERVAL FOR NEXT CYCLE
                        $TimeSlicer.IncrementTimeSlice($TimeSlicer.UserDefinedInitialTimeInterval)

                        $Logger.LogMessage("INCREMENTED TIMESLICE | Next TimeSlice in local time: [StartDate] $($TimeSlicer.StartTimeSlice.ToString($TimeSlicer.Culture)) - [EndDate] $($TimeSlicer.EndTimeSlice.ToString($TimeSlicer.Culture))", "INFO", $null, $null)

                        # NOTE: We are missing here a routine to capture when $TimeSlicer.StartTimeSlice -ge $TimeSlicer.EndTime and we have results in the Aggregated Batch that have not reached the export threshold. Will need to move the exporting routine to a nested function so it can be invoked here to export the last batch before the end of the timespan.
                        continue # try again
                    }
                }
                catch {
                    Write-Host $_
                    $Logger.LogMessage("GENERIC ERROR", "ERROR", $null, $_)
                }
            }
            # **** END: DATA POST-PROCESSING ROUTINE **** #
            # ********************************************* #
        }
    }
    END {
        $Logger.LogMessage("AZUREHUNTER | FINISHED EXTRACTING RECORDS", "SPECIAL", $null, $null)
    }
}

Export-ModuleMember -Function 'Search-AzureCloudUnifiedLog'
#EndRegion '.\public\Search-AzureCloudUnifiedLog.ps1' 527
#Region '.\public\Test-AzureCloudUnifiedLog.ps1' 0
#using namespace AzureHunter.CloudInit

Function Test-AzureCloudUnifiedLog {
    
    Invoke-HuntAzureAuditLogs -Records @(1,2,3,4,5,6,7,8,9,10)
    $TestArray = @(1,2,3,4,5)
    $Exporter = [Exporter]::new($TestArray)
    Write-Host $Exporter.RecordArray
}

Function Test-CloudInitClass {
    
    # This should print the initialization output message
    # Then check module availability
    # Then finally connect to exchange online
    $CloudInit = [AzCloudInit]::new()
    $CloudInit.InitializePreChecks($null)
    # Authenticate to ExchangeOnline
    $GetPSSessions = Get-PSSession | Select-Object -Property State, Name
    $ExOConnected = (@($GetPSSessions) -like '@{State=Opened; Name=ExchangeOnlineInternalSession*').Count -gt 0
    if($ExOConnected -ne "True") {
        Connect-ExchangeOnline -UseMultithreading $True -ShowProgress $True
    }
}

Function Test-SearchUnifiedAuditLog {

    $StartDate = [DateTime]::UtcNow.AddDays(-0.045)
    $EndDate = [DateTime]::UtcNow
    $Results = Search-UnifiedAuditLog -StartDate $StartDate -EndDate $EndDate -ResultSize 100
    return $Results
}

Function Test-AzHunterSearchLogWithTestData {

    Search-AzureCloudUnifiedLog -StartDate "2020-03-06T10:00:00" -EndDate "2020-03-09T12:40:00" -TimeInterval 12 -UserIDs "test.user@contoso.com" -AggregatedResultsFlushSize 100 -RunTestOnly -Verbose
}
#EndRegion '.\public\Test-AzureCloudUnifiedLog.ps1' 38